Plim commited on
Commit
8ac44c4
1 Parent(s): 1d17287

Model save

Browse files
.ipynb_checkpoints/README-checkpoint.md ADDED
@@ -0,0 +1,70 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ language:
3
+ - fr
4
+ license: apache-2.0
5
+ tags:
6
+ - automatic-speech-recognition
7
+ - mozilla-foundation/common_voice_7_0
8
+ - generated_from_trainer
9
+ datasets:
10
+ - common_voice
11
+ model-index:
12
+ - name: ''
13
+ results: []
14
+ ---
15
+
16
+ <!-- This model card has been generated automatically according to the information the Trainer had access to. You
17
+ should probably proofread and complete it, then remove this comment. -->
18
+
19
+ #
20
+
21
+ This model is a fine-tuned version of [facebook/wav2vec2-xls-r-300m](https://huggingface.co/facebook/wav2vec2-xls-r-300m) on the MOZILLA-FOUNDATION/COMMON_VOICE_7_0 - FR dataset.
22
+ It achieves the following results on the evaluation set:
23
+ - Loss: 0.5417
24
+ - Wer: 0.4479
25
+
26
+ ## Model description
27
+
28
+ More information needed
29
+
30
+ ## Intended uses & limitations
31
+
32
+ More information needed
33
+
34
+ ## Training and evaluation data
35
+
36
+ More information needed
37
+
38
+ ## Training procedure
39
+
40
+ ### Training hyperparameters
41
+
42
+ The following hyperparameters were used during training:
43
+ - learning_rate: 7.5e-05
44
+ - train_batch_size: 8
45
+ - eval_batch_size: 8
46
+ - seed: 42
47
+ - gradient_accumulation_steps: 4
48
+ - total_train_batch_size: 32
49
+ - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
50
+ - lr_scheduler_type: linear
51
+ - lr_scheduler_warmup_steps: 2000
52
+ - num_epochs: 0.2
53
+ - mixed_precision_training: Native AMP
54
+
55
+ ### Training results
56
+
57
+ | Training Loss | Epoch | Step | Validation Loss | Wer |
58
+ |:-------------:|:-----:|:----:|:---------------:|:------:|
59
+ | 6.9106 | 0.04 | 500 | 6.7171 | 1.0 |
60
+ | 3.0034 | 0.08 | 1000 | 3.0126 | 1.0 |
61
+ | 2.8699 | 0.12 | 1500 | 2.8509 | 0.9817 |
62
+ | 1.629 | 0.16 | 2000 | 0.7787 | 0.5861 |
63
+
64
+
65
+ ### Framework versions
66
+
67
+ - Transformers 4.17.0.dev0
68
+ - Pytorch 1.10.2+cu102
69
+ - Datasets 1.18.2.dev0
70
+ - Tokenizers 0.11.0
.ipynb_checkpoints/config-checkpoint.json DELETED
@@ -1,107 +0,0 @@
1
- {
2
- "_name_or_path": "facebook/wav2vec2-xls-r-300m",
3
- "activation_dropout": 0.1,
4
- "adapter_kernel_size": 3,
5
- "adapter_stride": 2,
6
- "add_adapter": false,
7
- "apply_spec_augment": true,
8
- "architectures": [
9
- "Wav2Vec2ForPreTraining"
10
- ],
11
- "attention_dropout": 0.0,
12
- "bos_token_id": 1,
13
- "classifier_proj_size": 256,
14
- "codevector_dim": 768,
15
- "contrastive_logits_temperature": 0.1,
16
- "conv_bias": true,
17
- "conv_dim": [
18
- 512,
19
- 512,
20
- 512,
21
- 512,
22
- 512,
23
- 512,
24
- 512
25
- ],
26
- "conv_kernel": [
27
- 10,
28
- 3,
29
- 3,
30
- 3,
31
- 3,
32
- 2,
33
- 2
34
- ],
35
- "conv_stride": [
36
- 5,
37
- 2,
38
- 2,
39
- 2,
40
- 2,
41
- 2,
42
- 2
43
- ],
44
- "ctc_loss_reduction": "mean",
45
- "ctc_zero_infinity": false,
46
- "diversity_loss_weight": 0.1,
47
- "do_stable_layer_norm": true,
48
- "eos_token_id": 2,
49
- "feat_extract_activation": "gelu",
50
- "feat_extract_dropout": 0.0,
51
- "feat_extract_norm": "layer",
52
- "feat_proj_dropout": 0.0,
53
- "feat_quantizer_dropout": 0.0,
54
- "final_dropout": 0.0,
55
- "hidden_act": "gelu",
56
- "hidden_dropout": 0.0,
57
- "hidden_size": 1024,
58
- "initializer_range": 0.02,
59
- "intermediate_size": 4096,
60
- "layer_norm_eps": 1e-05,
61
- "layerdrop": 0.0,
62
- "mask_feature_length": 64,
63
- "mask_feature_min_masks": 0,
64
- "mask_feature_prob": 0.25,
65
- "mask_time_length": 10,
66
- "mask_time_min_masks": 2,
67
- "mask_time_prob": 0.75,
68
- "model_type": "wav2vec2",
69
- "num_adapter_layers": 3,
70
- "num_attention_heads": 16,
71
- "num_codevector_groups": 2,
72
- "num_codevectors_per_group": 320,
73
- "num_conv_pos_embedding_groups": 16,
74
- "num_conv_pos_embeddings": 128,
75
- "num_feat_extract_layers": 7,
76
- "num_hidden_layers": 24,
77
- "num_negatives": 100,
78
- "output_hidden_size": 1024,
79
- "pad_token_id": 283,
80
- "proj_codevector_dim": 768,
81
- "tdnn_dilation": [
82
- 1,
83
- 2,
84
- 3,
85
- 1,
86
- 1
87
- ],
88
- "tdnn_dim": [
89
- 512,
90
- 512,
91
- 512,
92
- 512,
93
- 1500
94
- ],
95
- "tdnn_kernel": [
96
- 5,
97
- 3,
98
- 3,
99
- 1,
100
- 1
101
- ],
102
- "torch_dtype": "float32",
103
- "transformers_version": "4.17.0.dev0",
104
- "use_weighted_layer_sum": false,
105
- "vocab_size": 284,
106
- "xvector_output_dim": 512
107
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
.ipynb_checkpoints/eval-checkpoint.py CHANGED
@@ -49,9 +49,9 @@ def log_results(result: Dataset, args: Dict[str, str]):
49
  def normalize_text(text: str) -> str:
50
  """DO ADAPT FOR YOUR USE CASE. this function normalizes the target text."""
51
 
52
- chars_to_ignore_regex = '[,?.!-;:"“%‘”�—…–=^_`{|}~£§«®°±´µ·º»½×ßáãäìíðñòóõöøýþÿāăąćċčďđēėęěğġħĩī생집]' # noqa: W605 IMPORTANT: this should correspond to the chars that were ignored during training
53
 
54
- text = re.sub(chars_to_ignore_regex, "", text.lower())
55
 
56
  # In addition, we can normalize the target text, e.g. removing new lines characters etc...
57
  # note that order is important here!
@@ -68,7 +68,7 @@ def main(args):
68
  dataset = load_dataset(args.dataset, args.config, split=args.split, use_auth_token=True)
69
 
70
  # for testing: only process the first two examples as a test
71
- dataset = dataset.select(range(10))
72
 
73
  # load processor
74
  feature_extractor = AutoFeatureExtractor.from_pretrained(args.model_id)
@@ -86,7 +86,7 @@ def main(args):
86
  batch["audio"]["array"], chunk_length_s=args.chunk_length_s, stride_length_s=args.stride_length_s
87
  )
88
 
89
- batch["prediction"] = prediction["text"].replace("<s>", "")
90
  batch["target"] = normalize_text(batch["sentence"])
91
  return batch
92
 
 
49
  def normalize_text(text: str) -> str:
50
  """DO ADAPT FOR YOUR USE CASE. this function normalizes the target text."""
51
 
52
+ chars_to_ignore_regex = '[^a-zàâäçéèêëîïôöùûüÿ\'’ ]' # noqa: W605 IMPORTANT: this should correspond to the chars that were ignored during training
53
 
54
+ text = re.sub(chars_to_ignore_regex, "", text.lower()).replace('’', "'")
55
 
56
  # In addition, we can normalize the target text, e.g. removing new lines characters etc...
57
  # note that order is important here!
 
68
  dataset = load_dataset(args.dataset, args.config, split=args.split, use_auth_token=True)
69
 
70
  # for testing: only process the first two examples as a test
71
+ # dataset = dataset.select(range(10))
72
 
73
  # load processor
74
  feature_extractor = AutoFeatureExtractor.from_pretrained(args.model_id)
 
86
  batch["audio"]["array"], chunk_length_s=args.chunk_length_s, stride_length_s=args.stride_length_s
87
  )
88
 
89
+ batch["prediction"] = prediction["text"]
90
  batch["target"] = normalize_text(batch["sentence"])
91
  return batch
92
 
.ipynb_checkpoints/preprocessor_config-checkpoint.json DELETED
@@ -1,9 +0,0 @@
1
- {
2
- "do_normalize": true,
3
- "feature_extractor_type": "Wav2Vec2FeatureExtractor",
4
- "feature_size": 1,
5
- "padding_side": "right",
6
- "padding_value": 0,
7
- "return_attention_mask": true,
8
- "sampling_rate": 16000
9
- }
 
 
 
 
 
 
 
 
 
 
.ipynb_checkpoints/run-checkpoint.sh CHANGED
@@ -1,33 +1,39 @@
 
1
  python run_speech_recognition_ctc.py \
2
- --dataset_name="mozilla-foundation/common_voice_7_0" \
3
- --model_name_or_path="facebook/wav2vec2-xls-r-300m" \
4
- --dataset_config_name="fr" \
5
- --output_dir="./" \
6
- --overwrite_output_dir \
7
- --num_train_epochs="0.2" \
8
- --per_device_train_batch_size="8" \
9
- --per_device_eval_batch_size="8" \
10
- --gradient_accumulation_steps="4" \
11
- --learning_rate="7.5e-5" \
12
- --warmup_steps="2000" \
13
- --length_column_name="input_length" \
14
- --evaluation_strategy="steps" \
15
- --text_column_name="sentence" \
16
- --save_steps="500" \
17
- --eval_steps="500" \
18
- --logging_steps="100" \
19
- --layerdrop="0.0" \
20
- --activation_dropout="0.1" \
21
- --save_total_limit="3" \
22
- --freeze_feature_encoder \
23
- --feat_proj_dropout="0.0" \
24
- --mask_time_prob="0.75" \
25
- --mask_time_length="10" \
26
- --mask_feature_prob="0.25" \
27
- --mask_feature_length="64" \
28
- --gradient_checkpointing \
29
- --use_auth_token \
30
- --fp16 \
31
- --group_by_length \
32
- --do_train --do_eval \
33
- --push_to_hub
 
 
 
 
 
 
1
+ WANDB_PROJECT=xls-r-300-fr
2
  python run_speech_recognition_ctc.py \
3
+ --activation_dropout="0.1" \
4
+ --dataset_name="mozilla-foundation/common_voice_7_0" \
5
+ --dataset_config_name="fr" \
6
+ --eval_steps="500" \
7
+ --evaluation_strategy="steps" \
8
+ --feat_proj_dropout="0.0" \
9
+ --freeze_feature_encoder \
10
+ --fp16 \
11
+ --gradient_accumulation_steps="8" \
12
+ --gradient_checkpointing \
13
+ --group_by_length \
14
+ --layerdrop="0.0" \
15
+ --learning_rate="7.5e-5" \
16
+ --length_column_name="input_length" \
17
+ --load_best_model_at_end \
18
+ --logging_steps="100" \
19
+ --mask_feature_length="64" \
20
+ --mask_feature_prob="0.25" \
21
+ --mask_time_length="10" \
22
+ --mask_time_prob="0.75" \
23
+ --max_train_samples="1000" \
24
+ --max_eval_samples="200" \
25
+ --model_name_or_path="facebook/wav2vec2-xls-r-300m" \
26
+ --num_train_epochs="0.2" \
27
+ --output_dir="./" \
28
+ --overwrite_output_dir \
29
+ --per_device_train_batch_size="8" \
30
+ --per_device_eval_batch_size="8" \
31
+ --preprocessing_num_workers="4" \
32
+ --push_to_hub \
33
+ --report_to="wandb" \
34
+ --save_steps="500" \
35
+ --save_total_limit="3" \
36
+ --text_column_name="sentence" \
37
+ --use_auth_token \
38
+ --warmup_steps="2000" \
39
+ --do_train --do_eval
.ipynb_checkpoints/special_tokens_map-checkpoint.json DELETED
@@ -1 +0,0 @@
1
- {"unk_token": "[UNK]", "pad_token": "[PAD]"}
 
 
.ipynb_checkpoints/tokenizer_config-checkpoint.json DELETED
@@ -1 +0,0 @@
1
- {"unk_token": "[UNK]", "bos_token": null, "eos_token": null, "pad_token": "[PAD]", "do_lower_case": false, "word_delimiter_token": "|", "special_tokens_map_file": null, "tokenizer_file": null, "name_or_path": "./", "tokenizer_class": "Wav2Vec2CTCTokenizer"}
 
 
.ipynb_checkpoints/vocab-checkpoint.json DELETED
@@ -1 +0,0 @@
1
- {"'": 1, "a": 2, "b": 3, "c": 4, "d": 5, "e": 6, "f": 7, "g": 8, "h": 9, "i": 10, "j": 11, "k": 12, "l": 13, "m": 14, "n": 15, "o": 16, "p": 17, "q": 18, "r": 19, "s": 20, "t": 21, "u": 22, "v": 23, "w": 24, "x": 25, "y": 26, "z": 27, "\u00e0": 28, "\u00e1": 29, "\u00e2": 30, "\u00e4": 31, "\u00e5": 32, "\u00e6": 33, "\u00e7": 34, "\u00e8": 35, "\u00e9": 36, "\u00ea": 37, "\u00eb": 38, "\u00ec": 39, "\u00ed": 40, "\u00ee": 41, "\u00ef": 42, "\u00f1": 43, "\u00f2": 44, "\u00f3": 45, "\u00f4": 46, "\u00f5": 47, "\u00f6": 48, "\u00f8": 49, "\u00f9": 50, "\u00fa": 51, "\u00fb": 52, "\u00fc": 53, "\u00fe": 54, "\u00ff": 55, "\u0101": 56, "\u0107": 57, "\u010b": 58, "\u010d": 59, "\u0111": 60, "\u0113": 61, "\u0121": 62, "\u012b": 63, "\u0131": 64, "\u0137": 65, "\u013a": 66, "\u013c": 67, "\u013e": 68, "\u0142": 69, "\u0144": 70, "\u0146": 71, "\u0148": 72, "\u014d": 73, "\u014f": 74, "\u0151": 75, "\u0153": 76, "\u0159": 77, "\u015b": 78, "\u015f": 79, "\u0161": 80, "\u0163": 81, "\u0165": 82, "\u0169": 83, "\u016b": 84, "\u016f": 85, "\u0171": 86, "\u0173": 87, "\u017a": 88, "\u017c": 89, "\u017e": 90, "\u01a1": 91, "\u01b0": 92, "\u01c0": 93, "\u01c3": 94, "\u01ce": 95, "\u01d4": 96, "\u01eb": 97, "\u01f9": 98, "\u0219": 99, "\u021b": 100, "\u0259": 101, "\u0268": 102, "\u0289": 103, "\u0294": 104, "\u02bb": 105, "\u02bc": 106, "\u02bd": 107, "\u02be": 108, "\u02bf": 109, "\u02d0": 110, "\u02e2": 111, "\u0300": 112, "\u0301": 113, "\u0302": 114, "\u0303": 115, "\u0307": 116, "\u0308": 117, "\u0310": 118, "\u0320": 119, "\u0327": 120, "\u0331": 121, "\u0332": 122, "\u03b1": 123, "\u03b2": 124, "\u03b3": 125, "\u03b4": 126, "\u03b5": 127, "\u03b6": 128, "\u03b7": 129, "\u03b8": 130, "\u03b9": 131, "\u03ba": 132, "\u03bb": 133, "\u03bc": 134, "\u03bd": 135, "\u03bf": 136, "\u03c0": 137, "\u03c1": 138, "\u03c2": 139, "\u03c3": 140, "\u03c4": 141, "\u03c5": 142, "\u03c6": 143, "\u03c7": 144, "\u03c8": 145, "\u03c9": 146, "\u03cc": 147, "\u0430": 148, "\u0433": 149, "\u0435": 150, "\u0437": 151, "\u0438": 152, "\u043a": 153, "\u043c": 154, "\u043d": 155, "\u043f": 156, "\u0440": 157, "\u0446": 158, "\u0447": 159, "\u044d": 160, "\u044f": 161, "\u0454": 162, "\u0456": 163, "\u0458": 164, "\u04ab": 165, "\u04cc": 166, "\u0563": 167, "\u0566": 168, "\u0627": 169, "\u0628": 170, "\u0629": 171, "\u062f": 172, "\u0631": 173, "\u0644": 174, "\u0645": 175, "\u0646": 176, "\u0648": 177, "\u064a": 178, "\u1240": 179, "\u12a8": 180, "\u12c8": 181, "\u12f0": 182, "\u1300": 183, "\u1320": 184, "\u1e0d": 185, "\u1e25": 186, "\u1e45": 187, "\u1e47": 188, "\u1e63": 189, "\u1e6d": 190, "\u1e6f": 191, "\u1e93": 192, "\u1ea1": 193, "\u1ea3": 194, "\u1ea7": 195, "\u1ead": 196, "\u1eaf": 197, "\u1eb5": 198, "\u1ebf": 199, "\u1ec1": 200, "\u1ec5": 201, "\u1ec7": 202, "\u1ecb": 203, "\u1ed1": 204, "\u1ed3": 205, "\u1ed5": 206, "\u1ed9": 207, "\u1edb": 208, "\u1edd": 209, "\u1ee3": 210, "\u1ee5": 211, "\u1ee7": 212, "\u1ee9": 213, "\u1eed": 214, "\u1ef3": 215, "\u2010": 216, "\u2015": 217, "\u201e": 218, "\u2020": 219, "\u2032": 220, "\u2039": 221, "\u203a": 222, "\u20ac": 223, "\u20bd": 224, "\u2102": 225, "\u2115": 226, "\u211a": 227, "\u211d": 228, "\u2124": 229, "\u2130": 230, "\u2135": 231, "\u2192": 232, "\u2194": 233, "\u2205": 234, "\u2206": 235, "\u2208": 236, "\u2212": 237, "\u221e": 238, "\u2228": 239, "\u223c": 240, "\u2265": 241, "\u22c5": 242, "\u2500": 243, "\u2609": 244, "\u2c45": 245, "\u2c4e": 246, "\u3044": 247, "\u3046": 248, "\u305f": 249, "\u3064": 250, "\u306c": 251, "\u306e": 252, "\u3072": 253, "\u3078": 254, "\u307e": 255, "\u3080": 256, "\u3081": 257, "\u3082": 258, "\u3084": 259, "\u3094": 260, "\u4e39": 261, "\u4e43": 262, "\u4eac": 263, "\u5317": 264, "\u626c": 265, "\u6587": 266, "\u661f": 267, "\u672f": 268, "\u675c": 269, "\u7261": 270, "\u750c": 271, "\u7f8e": 272, "\u897f": 273, "\u8cb4": 274, "\u9752": 275, "\u9986": 276, "\ua751": 277, "\uace0": 278, "\uae30": 279, "\uba39": 280, "\uc0bc": 281, "|": 0, "[UNK]": 282, "[PAD]": 283}
 
 
all_results.json DELETED
@@ -1,14 +0,0 @@
1
- {
2
- "epoch": 0.2,
3
- "eval_loss": 0.5416641235351562,
4
- "eval_runtime": 816.3514,
5
- "eval_samples": 15941,
6
- "eval_samples_per_second": 19.527,
7
- "eval_steps_per_second": 2.441,
8
- "eval_wer": 0.4478597919027506,
9
- "train_loss": 4.42930416077822,
10
- "train_runtime": 8599.8864,
11
- "train_samples": 395042,
12
- "train_samples_per_second": 9.187,
13
- "train_steps_per_second": 0.287
14
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
config.json CHANGED
@@ -76,7 +76,7 @@
76
  "num_hidden_layers": 24,
77
  "num_negatives": 100,
78
  "output_hidden_size": 1024,
79
- "pad_token_id": 283,
80
  "proj_codevector_dim": 768,
81
  "tdnn_dilation": [
82
  1,
@@ -102,6 +102,6 @@
102
  "torch_dtype": "float32",
103
  "transformers_version": "4.17.0.dev0",
104
  "use_weighted_layer_sum": false,
105
- "vocab_size": 284,
106
  "xvector_output_dim": 512
107
  }
 
76
  "num_hidden_layers": 24,
77
  "num_negatives": 100,
78
  "output_hidden_size": 1024,
79
+ "pad_token_id": 40,
80
  "proj_codevector_dim": 768,
81
  "tdnn_dilation": [
82
  1,
 
102
  "torch_dtype": "float32",
103
  "transformers_version": "4.17.0.dev0",
104
  "use_weighted_layer_sum": false,
105
+ "vocab_size": 41,
106
  "xvector_output_dim": 512
107
  }
eval.py CHANGED
@@ -49,9 +49,9 @@ def log_results(result: Dataset, args: Dict[str, str]):
49
  def normalize_text(text: str) -> str:
50
  """DO ADAPT FOR YOUR USE CASE. this function normalizes the target text."""
51
 
52
- chars_to_ignore_regex = '[,?.!-;:"“%‘”�—…–=^_`{|}~£§«®°±´µ·º»½×ßáãäìíðñòóõöøýþÿāăąćċčďđēėęěğġħĩī생집]' # noqa: W605 IMPORTANT: this should correspond to the chars that were ignored during training
53
 
54
- text = re.sub(chars_to_ignore_regex, "", text.lower())
55
 
56
  # In addition, we can normalize the target text, e.g. removing new lines characters etc...
57
  # note that order is important here!
@@ -68,7 +68,7 @@ def main(args):
68
  dataset = load_dataset(args.dataset, args.config, split=args.split, use_auth_token=True)
69
 
70
  # for testing: only process the first two examples as a test
71
- dataset = dataset.select(range(10))
72
 
73
  # load processor
74
  feature_extractor = AutoFeatureExtractor.from_pretrained(args.model_id)
@@ -86,7 +86,7 @@ def main(args):
86
  batch["audio"]["array"], chunk_length_s=args.chunk_length_s, stride_length_s=args.stride_length_s
87
  )
88
 
89
- batch["prediction"] = prediction["text"].replace("<s>", "")
90
  batch["target"] = normalize_text(batch["sentence"])
91
  return batch
92
 
 
49
  def normalize_text(text: str) -> str:
50
  """DO ADAPT FOR YOUR USE CASE. this function normalizes the target text."""
51
 
52
+ chars_to_ignore_regex = '[^a-zàâäçéèêëîïôöùûüÿ\'’ ]' # noqa: W605 IMPORTANT: this should correspond to the chars that were ignored during training
53
 
54
+ text = re.sub(chars_to_ignore_regex, "", text.lower()).replace('’', "'")
55
 
56
  # In addition, we can normalize the target text, e.g. removing new lines characters etc...
57
  # note that order is important here!
 
68
  dataset = load_dataset(args.dataset, args.config, split=args.split, use_auth_token=True)
69
 
70
  # for testing: only process the first two examples as a test
71
+ # dataset = dataset.select(range(10))
72
 
73
  # load processor
74
  feature_extractor = AutoFeatureExtractor.from_pretrained(args.model_id)
 
86
  batch["audio"]["array"], chunk_length_s=args.chunk_length_s, stride_length_s=args.stride_length_s
87
  )
88
 
89
+ batch["prediction"] = prediction["text"]
90
  batch["target"] = normalize_text(batch["sentence"])
91
  return batch
92
 
eval_results.json DELETED
@@ -1,9 +0,0 @@
1
- {
2
- "epoch": 0.2,
3
- "eval_loss": 0.5416641235351562,
4
- "eval_runtime": 816.3514,
5
- "eval_samples": 15941,
6
- "eval_samples_per_second": 19.527,
7
- "eval_steps_per_second": 2.441,
8
- "eval_wer": 0.4478597919027506
9
- }
 
 
 
 
 
 
 
 
 
 
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:3ec1c7675b56877de46f623ac51149e73d4f88cac691dc72595621d35344ce9b
3
- size 1263088113
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b042f9e3e1a3adceae4bf7dbadeee3279b0dc94f7582075e5635e30c922536ed
3
+ size 1262091761
run.sh CHANGED
@@ -1,33 +1,39 @@
 
1
  python run_speech_recognition_ctc.py \
2
- --dataset_name="mozilla-foundation/common_voice_7_0" \
3
- --model_name_or_path="facebook/wav2vec2-xls-r-300m" \
4
- --dataset_config_name="fr" \
5
- --output_dir="./" \
6
- --overwrite_output_dir \
7
- --num_train_epochs="0.2" \
8
- --per_device_train_batch_size="8" \
9
- --per_device_eval_batch_size="8" \
10
- --gradient_accumulation_steps="4" \
11
- --learning_rate="7.5e-5" \
12
- --warmup_steps="2000" \
13
- --length_column_name="input_length" \
14
- --evaluation_strategy="steps" \
15
- --text_column_name="sentence" \
16
- --save_steps="500" \
17
- --eval_steps="500" \
18
- --logging_steps="100" \
19
- --layerdrop="0.0" \
20
- --activation_dropout="0.1" \
21
- --save_total_limit="3" \
22
- --freeze_feature_encoder \
23
- --feat_proj_dropout="0.0" \
24
- --mask_time_prob="0.75" \
25
- --mask_time_length="10" \
26
- --mask_feature_prob="0.25" \
27
- --mask_feature_length="64" \
28
- --gradient_checkpointing \
29
- --use_auth_token \
30
- --fp16 \
31
- --group_by_length \
32
- --do_train --do_eval \
33
- --push_to_hub
 
 
 
 
 
 
1
+ WANDB_PROJECT=xls-r-300-fr
2
  python run_speech_recognition_ctc.py \
3
+ --activation_dropout="0.1" \
4
+ --dataset_name="mozilla-foundation/common_voice_7_0" \
5
+ --dataset_config_name="fr" \
6
+ --eval_steps="500" \
7
+ --evaluation_strategy="steps" \
8
+ --feat_proj_dropout="0.0" \
9
+ --freeze_feature_encoder \
10
+ --fp16 \
11
+ --gradient_accumulation_steps="8" \
12
+ --gradient_checkpointing \
13
+ --group_by_length \
14
+ --layerdrop="0.0" \
15
+ --learning_rate="7.5e-5" \
16
+ --length_column_name="input_length" \
17
+ --load_best_model_at_end \
18
+ --logging_steps="100" \
19
+ --mask_feature_length="64" \
20
+ --mask_feature_prob="0.25" \
21
+ --mask_time_length="10" \
22
+ --mask_time_prob="0.75" \
23
+ --max_train_samples="1000" \
24
+ --max_eval_samples="200" \
25
+ --model_name_or_path="facebook/wav2vec2-xls-r-300m" \
26
+ --num_train_epochs="0.2" \
27
+ --output_dir="./" \
28
+ --overwrite_output_dir \
29
+ --per_device_train_batch_size="8" \
30
+ --per_device_eval_batch_size="8" \
31
+ --preprocessing_num_workers="4" \
32
+ --push_to_hub \
33
+ --report_to="wandb" \
34
+ --save_steps="500" \
35
+ --save_total_limit="3" \
36
+ --text_column_name="sentence" \
37
+ --use_auth_token \
38
+ --warmup_steps="2000" \
39
+ --do_train --do_eval
train_results.json DELETED
@@ -1,8 +0,0 @@
1
- {
2
- "epoch": 0.2,
3
- "train_loss": 4.42930416077822,
4
- "train_runtime": 8599.8864,
5
- "train_samples": 395042,
6
- "train_samples_per_second": 9.187,
7
- "train_steps_per_second": 0.287
8
- }
 
 
 
 
 
 
 
 
 
trainer_state.json DELETED
@@ -1,205 +0,0 @@
1
- {
2
- "best_metric": null,
3
- "best_model_checkpoint": null,
4
- "epoch": 0.19999594985925762,
5
- "global_step": 2469,
6
- "is_hyper_param_search": false,
7
- "is_local_process_zero": true,
8
- "is_world_process_zero": true,
9
- "log_history": [
10
- {
11
- "epoch": 0.01,
12
- "learning_rate": 3.7125e-06,
13
- "loss": 19.7984,
14
- "step": 100
15
- },
16
- {
17
- "epoch": 0.02,
18
- "learning_rate": 7.425e-06,
19
- "loss": 13.3485,
20
- "step": 200
21
- },
22
- {
23
- "epoch": 0.02,
24
- "learning_rate": 1.1174999999999999e-05,
25
- "loss": 10.0074,
26
- "step": 300
27
- },
28
- {
29
- "epoch": 0.03,
30
- "learning_rate": 1.4925e-05,
31
- "loss": 8.4765,
32
- "step": 400
33
- },
34
- {
35
- "epoch": 0.04,
36
- "learning_rate": 1.8675e-05,
37
- "loss": 6.9106,
38
- "step": 500
39
- },
40
- {
41
- "epoch": 0.04,
42
- "eval_loss": 6.717121124267578,
43
- "eval_runtime": 908.8081,
44
- "eval_samples_per_second": 17.541,
45
- "eval_steps_per_second": 2.193,
46
- "eval_wer": 1.0,
47
- "step": 500
48
- },
49
- {
50
- "epoch": 0.05,
51
- "learning_rate": 2.2424999999999996e-05,
52
- "loss": 5.4175,
53
- "step": 600
54
- },
55
- {
56
- "epoch": 0.06,
57
- "learning_rate": 2.6174999999999996e-05,
58
- "loss": 4.1471,
59
- "step": 700
60
- },
61
- {
62
- "epoch": 0.06,
63
- "learning_rate": 2.9925e-05,
64
- "loss": 3.3536,
65
- "step": 800
66
- },
67
- {
68
- "epoch": 0.07,
69
- "learning_rate": 3.3675e-05,
70
- "loss": 3.0742,
71
- "step": 900
72
- },
73
- {
74
- "epoch": 0.08,
75
- "learning_rate": 3.7424999999999995e-05,
76
- "loss": 3.0034,
77
- "step": 1000
78
- },
79
- {
80
- "epoch": 0.08,
81
- "eval_loss": 3.01257061958313,
82
- "eval_runtime": 815.2192,
83
- "eval_samples_per_second": 19.554,
84
- "eval_steps_per_second": 2.445,
85
- "eval_wer": 1.0,
86
- "step": 1000
87
- },
88
- {
89
- "epoch": 0.09,
90
- "learning_rate": 4.1175e-05,
91
- "loss": 2.9695,
92
- "step": 1100
93
- },
94
- {
95
- "epoch": 0.1,
96
- "learning_rate": 4.4924999999999994e-05,
97
- "loss": 2.9321,
98
- "step": 1200
99
- },
100
- {
101
- "epoch": 0.11,
102
- "learning_rate": 4.8675e-05,
103
- "loss": 2.916,
104
- "step": 1300
105
- },
106
- {
107
- "epoch": 0.11,
108
- "learning_rate": 5.2424999999999994e-05,
109
- "loss": 2.8952,
110
- "step": 1400
111
- },
112
- {
113
- "epoch": 0.12,
114
- "learning_rate": 5.6175e-05,
115
- "loss": 2.8699,
116
- "step": 1500
117
- },
118
- {
119
- "epoch": 0.12,
120
- "eval_loss": 2.850850820541382,
121
- "eval_runtime": 814.3164,
122
- "eval_samples_per_second": 19.576,
123
- "eval_steps_per_second": 2.447,
124
- "eval_wer": 0.9817464201091995,
125
- "step": 1500
126
- },
127
- {
128
- "epoch": 0.13,
129
- "learning_rate": 5.9925e-05,
130
- "loss": 2.7752,
131
- "step": 1600
132
- },
133
- {
134
- "epoch": 0.14,
135
- "learning_rate": 6.367499999999999e-05,
136
- "loss": 2.4403,
137
- "step": 1700
138
- },
139
- {
140
- "epoch": 0.15,
141
- "learning_rate": 6.7425e-05,
142
- "loss": 1.9873,
143
- "step": 1800
144
- },
145
- {
146
- "epoch": 0.15,
147
- "learning_rate": 7.1175e-05,
148
- "loss": 1.7549,
149
- "step": 1900
150
- },
151
- {
152
- "epoch": 0.16,
153
- "learning_rate": 7.48875e-05,
154
- "loss": 1.629,
155
- "step": 2000
156
- },
157
- {
158
- "epoch": 0.16,
159
- "eval_loss": 0.7786943316459656,
160
- "eval_runtime": 820.0379,
161
- "eval_samples_per_second": 19.439,
162
- "eval_steps_per_second": 2.43,
163
- "eval_wer": 0.586136293396518,
164
- "step": 2000
165
- },
166
- {
167
- "epoch": 0.17,
168
- "learning_rate": 5.948827292110874e-05,
169
- "loss": 1.5154,
170
- "step": 2100
171
- },
172
- {
173
- "epoch": 0.18,
174
- "learning_rate": 4.349680170575692e-05,
175
- "loss": 1.4719,
176
- "step": 2200
177
- },
178
- {
179
- "epoch": 0.19,
180
- "learning_rate": 2.7505330490405118e-05,
181
- "loss": 1.4058,
182
- "step": 2300
183
- },
184
- {
185
- "epoch": 0.19,
186
- "learning_rate": 1.1513859275053305e-05,
187
- "loss": 1.3579,
188
- "step": 2400
189
- },
190
- {
191
- "epoch": 0.2,
192
- "step": 2469,
193
- "total_flos": 1.1608261301380424e+19,
194
- "train_loss": 4.42930416077822,
195
- "train_runtime": 8599.8864,
196
- "train_samples_per_second": 9.187,
197
- "train_steps_per_second": 0.287
198
- }
199
- ],
200
- "max_steps": 2469,
201
- "num_train_epochs": 1,
202
- "total_flos": 1.1608261301380424e+19,
203
- "trial_name": null,
204
- "trial_params": null
205
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:839f65dcbffe149e64e74ecde13830fc1e30c874f884e617aadad73ff74a039e
3
  size 2991
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6e91b39976977c0a605521323547644a3699b13dbfd044f77607812c3c5cc2b4
3
  size 2991
vocab.json CHANGED
@@ -1 +1 @@
1
- {"'": 1, "a": 2, "b": 3, "c": 4, "d": 5, "e": 6, "f": 7, "g": 8, "h": 9, "i": 10, "j": 11, "k": 12, "l": 13, "m": 14, "n": 15, "o": 16, "p": 17, "q": 18, "r": 19, "s": 20, "t": 21, "u": 22, "v": 23, "w": 24, "x": 25, "y": 26, "z": 27, "à": 28, "á": 29, "â": 30, "ä": 31, "å": 32, "æ": 33, "ç": 34, "è": 35, "é": 36, "ê": 37, "ë": 38, "ì": 39, "í": 40, "î": 41, "ï": 42, "ñ": 43, "ò": 44, "ó": 45, "ô": 46, "õ": 47, "ö": 48, "ø": 49, "ù": 50, "ú": 51, "û": 52, "ü": 53, "þ": 54, "ÿ": 55, "ā": 56, "ć": 57, "ċ": 58, "č": 59, "đ": 60, "ē": 61, "ġ": 62, "ī": 63, "ı": 64, "ķ": 65, "ĺ": 66, "ļ": 67, "ľ": 68, "ł": 69, "ń": 70, "ņ": 71, "ň": 72, "ō": 73, "ŏ": 74, "ő": 75, "œ": 76, "ř": 77, "ś": 78, "ş": 79, "š": 80, "ţ": 81, "ť": 82, "ũ": 83, "ū": 84, "ů": 85, "ű": 86, "ų": 87, "ź": 88, "ż": 89, "ž": 90, "ơ": 91, "ư": 92, "ǀ": 93, "ǃ": 94, "ǎ": 95, "ǔ": 96, "ǫ": 97, "ǹ": 98, "ș": 99, "ț": 100, "ə": 101, "ɨ": 102, "ʉ": 103, "ʔ": 104, "ʻ": 105, "ʼ": 106, "ʽ": 107, "ʾ": 108, "ʿ": 109, "ː": 110, "ˢ": 111, "̀": 112, "́": 113, "̂": 114, "̃": 115, "̇": 116, "̈": 117, "̐": 118, "̠": 119, "̧": 120, "̱": 121, "̲": 122, "α": 123, "β": 124, "γ": 125, "δ": 126, "ε": 127, "ζ": 128, "η": 129, "θ": 130, "ι": 131, "κ": 132, "λ": 133, "μ": 134, "ν": 135, "ο": 136, "π": 137, "ρ": 138, "ς": 139, "σ": 140, "τ": 141, "υ": 142, "φ": 143, "χ": 144, "ψ": 145, "ω": 146, "ό": 147, "а": 148, "г": 149, "е": 150, "з": 151, "и": 152, "к": 153, "м": 154, "н": 155, "п": 156, "р": 157, "ц": 158, "ч": 159, "э": 160, "я": 161, "є": 162, "і": 163, "ј": 164, "ҫ": 165, "ӌ": 166, "գ": 167, "զ": 168, "ا": 169, "ب": 170, "ة": 171, "د": 172, "ر": 173, "ل": 174, "م": 175, "ن": 176, "و": 177, "ي": 178, "ቀ": 179, "ከ": 180, "ወ": 181, "ደ": 182, "ጀ": 183, "ጠ": 184, "ḍ": 185, "ḥ": 186, "ṅ": 187, "ṇ": 188, "ṣ": 189, "ṭ": 190, "ṯ": 191, "ẓ": 192, "ạ": 193, "ả": 194, "ầ": 195, "ậ": 196, "ắ": 197, "ẵ": 198, "ế": 199, "ề": 200, "ễ": 201, "ệ": 202, "ị": 203, "ố": 204, "ồ": 205, "ổ": 206, "ộ": 207, "ớ": 208, "ờ": 209, "ợ": 210, "ụ": 211, "ủ": 212, "ứ": 213, "ử": 214, "ỳ": 215, "‐": 216, "―": 217, "„": 218, "†": 219, "′": 220, "‹": 221, "›": 222, "€": 223, "₽": 224, "ℂ": 225, "ℕ": 226, "ℚ": 227, "ℝ": 228, "ℤ": 229, "ℰ": 230, "ℵ": 231, "→": 232, "↔": 233, "∅": 234, "∆": 235, "∈": 236, "−": 237, "∞": 238, "∨": 239, "∼": 240, "≥": 241, "⋅": 242, "─": 243, "☉": 244, "ⱅ": 245, "ⱎ": 246, "い": 247, "う": 248, "た": 249, "つ": 250, "ぬ": 251, "の": 252, "ひ": 253, "へ": 254, "ま": 255, "む": 256, "め": 257, "も": 258, "や": 259, "ゔ": 260, "丹": 261, "乃": 262, "京": 263, "北": 264, "扬": 265, "文": 266, "星": 267, "术": 268, "杜": 269, "牡": 270, "甌": 271, "美": 272, "西": 273, "貴": 274, "青": 275, "馆": 276, "ꝑ": 277, "고": 278, "기": 279, "먹": 280, "삼": 281, "|": 0, "[UNK]": 282, "[PAD]": 283}
 
1
+ {"'": 1, "a": 2, "b": 3, "c": 4, "d": 5, "e": 6, "f": 7, "g": 8, "h": 9, "i": 10, "j": 11, "k": 12, "l": 13, "m": 14, "n": 15, "o": 16, "p": 17, "q": 18, "r": 19, "s": 20, "t": 21, "u": 22, "v": 23, "w": 24, "x": 25, "y": 26, "z": 27, "à": 28, "â": 29, "ç": 30, "è": 31, "é": 32, "ê": 33, "î": 34, "ï": 35, "ô": 36, "ù": 37, "û": 38, "|": 0, "[UNK]": 39, "[PAD]": 40}
wandb/debug-internal.log ADDED
@@ -0,0 +1 @@
 
 
1
+ run-20220130_224738-2uzt3kt1/logs/debug-internal.log
wandb/debug.log ADDED
@@ -0,0 +1 @@
 
 
1
+ run-20220130_224738-2uzt3kt1/logs/debug.log
wandb/latest-run ADDED
@@ -0,0 +1 @@
 
 
1
+ run-20220130_224738-2uzt3kt1
wandb/run-20220130_224738-2uzt3kt1/files/conda-environment.yaml ADDED
File without changes
wandb/run-20220130_224738-2uzt3kt1/files/config.yaml ADDED
@@ -0,0 +1,672 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ wandb_version: 1
2
+
3
+ _n_gpu:
4
+ desc: null
5
+ value: 1
6
+ _name_or_path:
7
+ desc: null
8
+ value: facebook/wav2vec2-xls-r-300m
9
+ _wandb:
10
+ desc: null
11
+ value:
12
+ cli_version: 0.12.9
13
+ framework: huggingface
14
+ huggingface_version: 4.17.0.dev0
15
+ is_jupyter_run: false
16
+ is_kaggle_kernel: false
17
+ m:
18
+ - 1: train/global_step
19
+ 6:
20
+ - 3
21
+ - 1: train/train_runtime
22
+ 5: 1
23
+ 6:
24
+ - 1
25
+ - 1: train/train_samples_per_second
26
+ 5: 1
27
+ 6:
28
+ - 1
29
+ - 1: train/train_steps_per_second
30
+ 5: 1
31
+ 6:
32
+ - 1
33
+ - 1: train/total_flos
34
+ 5: 1
35
+ 6:
36
+ - 1
37
+ - 1: train/train_loss
38
+ 5: 1
39
+ 6:
40
+ - 1
41
+ - 1: train/epoch
42
+ 5: 1
43
+ 6:
44
+ - 1
45
+ python_version: 3.8.8
46
+ start_time: 1643582858
47
+ t:
48
+ 1:
49
+ - 1
50
+ - 5
51
+ - 11
52
+ 3:
53
+ - 13
54
+ 4: 3.8.8
55
+ 5: 0.12.9
56
+ 6: 4.17.0.dev0
57
+ 8:
58
+ - 5
59
+ activation_dropout:
60
+ desc: null
61
+ value: 0.1
62
+ adafactor:
63
+ desc: null
64
+ value: false
65
+ adam_beta1:
66
+ desc: null
67
+ value: 0.9
68
+ adam_beta2:
69
+ desc: null
70
+ value: 0.999
71
+ adam_epsilon:
72
+ desc: null
73
+ value: 1.0e-08
74
+ adapter_kernel_size:
75
+ desc: null
76
+ value: 3
77
+ adapter_stride:
78
+ desc: null
79
+ value: 2
80
+ add_adapter:
81
+ desc: null
82
+ value: false
83
+ add_cross_attention:
84
+ desc: null
85
+ value: false
86
+ apply_spec_augment:
87
+ desc: null
88
+ value: true
89
+ architectures:
90
+ desc: null
91
+ value:
92
+ - Wav2Vec2ForPreTraining
93
+ attention_dropout:
94
+ desc: null
95
+ value: 0.0
96
+ bad_words_ids:
97
+ desc: null
98
+ value: null
99
+ bf16:
100
+ desc: null
101
+ value: false
102
+ bf16_full_eval:
103
+ desc: null
104
+ value: false
105
+ bos_token_id:
106
+ desc: null
107
+ value: 1
108
+ chunk_size_feed_forward:
109
+ desc: null
110
+ value: 0
111
+ classifier_proj_size:
112
+ desc: null
113
+ value: 256
114
+ codevector_dim:
115
+ desc: null
116
+ value: 768
117
+ contrastive_logits_temperature:
118
+ desc: null
119
+ value: 0.1
120
+ conv_bias:
121
+ desc: null
122
+ value: true
123
+ conv_dim:
124
+ desc: null
125
+ value:
126
+ - 512
127
+ - 512
128
+ - 512
129
+ - 512
130
+ - 512
131
+ - 512
132
+ - 512
133
+ conv_kernel:
134
+ desc: null
135
+ value:
136
+ - 10
137
+ - 3
138
+ - 3
139
+ - 3
140
+ - 3
141
+ - 2
142
+ - 2
143
+ conv_stride:
144
+ desc: null
145
+ value:
146
+ - 5
147
+ - 2
148
+ - 2
149
+ - 2
150
+ - 2
151
+ - 2
152
+ - 2
153
+ cross_attention_hidden_size:
154
+ desc: null
155
+ value: null
156
+ ctc_loss_reduction:
157
+ desc: null
158
+ value: mean
159
+ ctc_zero_infinity:
160
+ desc: null
161
+ value: false
162
+ dataloader_drop_last:
163
+ desc: null
164
+ value: false
165
+ dataloader_num_workers:
166
+ desc: null
167
+ value: 0
168
+ dataloader_pin_memory:
169
+ desc: null
170
+ value: true
171
+ ddp_bucket_cap_mb:
172
+ desc: null
173
+ value: None
174
+ ddp_find_unused_parameters:
175
+ desc: null
176
+ value: None
177
+ debug:
178
+ desc: null
179
+ value: '[]'
180
+ decoder_start_token_id:
181
+ desc: null
182
+ value: null
183
+ deepspeed:
184
+ desc: null
185
+ value: None
186
+ disable_tqdm:
187
+ desc: null
188
+ value: false
189
+ diversity_loss_weight:
190
+ desc: null
191
+ value: 0.1
192
+ diversity_penalty:
193
+ desc: null
194
+ value: 0.0
195
+ do_eval:
196
+ desc: null
197
+ value: true
198
+ do_predict:
199
+ desc: null
200
+ value: false
201
+ do_sample:
202
+ desc: null
203
+ value: false
204
+ do_stable_layer_norm:
205
+ desc: null
206
+ value: true
207
+ do_train:
208
+ desc: null
209
+ value: true
210
+ early_stopping:
211
+ desc: null
212
+ value: false
213
+ encoder_no_repeat_ngram_size:
214
+ desc: null
215
+ value: 0
216
+ eos_token_id:
217
+ desc: null
218
+ value: 2
219
+ eval_accumulation_steps:
220
+ desc: null
221
+ value: None
222
+ eval_batch_size:
223
+ desc: null
224
+ value: 8
225
+ eval_steps:
226
+ desc: null
227
+ value: 500
228
+ evaluation_strategy:
229
+ desc: null
230
+ value: steps
231
+ feat_extract_activation:
232
+ desc: null
233
+ value: gelu
234
+ feat_extract_dropout:
235
+ desc: null
236
+ value: 0.0
237
+ feat_extract_norm:
238
+ desc: null
239
+ value: layer
240
+ feat_proj_dropout:
241
+ desc: null
242
+ value: 0.0
243
+ feat_quantizer_dropout:
244
+ desc: null
245
+ value: 0.0
246
+ final_dropout:
247
+ desc: null
248
+ value: 0.0
249
+ finetuning_task:
250
+ desc: null
251
+ value: null
252
+ forced_bos_token_id:
253
+ desc: null
254
+ value: null
255
+ forced_eos_token_id:
256
+ desc: null
257
+ value: null
258
+ fp16:
259
+ desc: null
260
+ value: true
261
+ fp16_backend:
262
+ desc: null
263
+ value: auto
264
+ fp16_full_eval:
265
+ desc: null
266
+ value: false
267
+ fp16_opt_level:
268
+ desc: null
269
+ value: O1
270
+ gradient_accumulation_steps:
271
+ desc: null
272
+ value: 8
273
+ gradient_checkpointing:
274
+ desc: null
275
+ value: true
276
+ greater_is_better:
277
+ desc: null
278
+ value: false
279
+ group_by_length:
280
+ desc: null
281
+ value: true
282
+ half_precision_backend:
283
+ desc: null
284
+ value: amp
285
+ hidden_act:
286
+ desc: null
287
+ value: gelu
288
+ hidden_dropout:
289
+ desc: null
290
+ value: 0.0
291
+ hidden_size:
292
+ desc: null
293
+ value: 1024
294
+ hub_model_id:
295
+ desc: null
296
+ value: None
297
+ hub_strategy:
298
+ desc: null
299
+ value: every_save
300
+ hub_token:
301
+ desc: null
302
+ value: <HUB_TOKEN>
303
+ id2label:
304
+ desc: null
305
+ value:
306
+ '0': LABEL_0
307
+ '1': LABEL_1
308
+ ignore_data_skip:
309
+ desc: null
310
+ value: false
311
+ initializer_range:
312
+ desc: null
313
+ value: 0.02
314
+ intermediate_size:
315
+ desc: null
316
+ value: 4096
317
+ is_decoder:
318
+ desc: null
319
+ value: false
320
+ is_encoder_decoder:
321
+ desc: null
322
+ value: false
323
+ label2id:
324
+ desc: null
325
+ value:
326
+ LABEL_0: 0
327
+ LABEL_1: 1
328
+ label_names:
329
+ desc: null
330
+ value: None
331
+ label_smoothing_factor:
332
+ desc: null
333
+ value: 0.0
334
+ layer_norm_eps:
335
+ desc: null
336
+ value: 1.0e-05
337
+ layerdrop:
338
+ desc: null
339
+ value: 0.0
340
+ learning_rate:
341
+ desc: null
342
+ value: 7.5e-05
343
+ length_column_name:
344
+ desc: null
345
+ value: input_length
346
+ length_penalty:
347
+ desc: null
348
+ value: 1.0
349
+ load_best_model_at_end:
350
+ desc: null
351
+ value: true
352
+ local_rank:
353
+ desc: null
354
+ value: -1
355
+ log_level:
356
+ desc: null
357
+ value: -1
358
+ log_level_replica:
359
+ desc: null
360
+ value: -1
361
+ log_on_each_node:
362
+ desc: null
363
+ value: true
364
+ logging_dir:
365
+ desc: null
366
+ value: ./runs/Jan30_22-46-41_job-3261699b-76eb-4c28-8419-66a66c5c9199
367
+ logging_first_step:
368
+ desc: null
369
+ value: false
370
+ logging_nan_inf_filter:
371
+ desc: null
372
+ value: true
373
+ logging_steps:
374
+ desc: null
375
+ value: 100
376
+ logging_strategy:
377
+ desc: null
378
+ value: steps
379
+ lr_scheduler_type:
380
+ desc: null
381
+ value: linear
382
+ mask_feature_length:
383
+ desc: null
384
+ value: 64
385
+ mask_feature_min_masks:
386
+ desc: null
387
+ value: 0
388
+ mask_feature_prob:
389
+ desc: null
390
+ value: 0.25
391
+ mask_time_length:
392
+ desc: null
393
+ value: 10
394
+ mask_time_min_masks:
395
+ desc: null
396
+ value: 2
397
+ mask_time_prob:
398
+ desc: null
399
+ value: 0.75
400
+ max_grad_norm:
401
+ desc: null
402
+ value: 1.0
403
+ max_length:
404
+ desc: null
405
+ value: 20
406
+ max_steps:
407
+ desc: null
408
+ value: -1
409
+ metric_for_best_model:
410
+ desc: null
411
+ value: loss
412
+ min_length:
413
+ desc: null
414
+ value: 0
415
+ model_type:
416
+ desc: null
417
+ value: wav2vec2
418
+ mp_parameters:
419
+ desc: null
420
+ value: ''
421
+ no_cuda:
422
+ desc: null
423
+ value: false
424
+ no_repeat_ngram_size:
425
+ desc: null
426
+ value: 0
427
+ num_adapter_layers:
428
+ desc: null
429
+ value: 3
430
+ num_attention_heads:
431
+ desc: null
432
+ value: 16
433
+ num_beam_groups:
434
+ desc: null
435
+ value: 1
436
+ num_beams:
437
+ desc: null
438
+ value: 1
439
+ num_codevector_groups:
440
+ desc: null
441
+ value: 2
442
+ num_codevectors_per_group:
443
+ desc: null
444
+ value: 320
445
+ num_conv_pos_embedding_groups:
446
+ desc: null
447
+ value: 16
448
+ num_conv_pos_embeddings:
449
+ desc: null
450
+ value: 128
451
+ num_feat_extract_layers:
452
+ desc: null
453
+ value: 7
454
+ num_hidden_layers:
455
+ desc: null
456
+ value: 24
457
+ num_negatives:
458
+ desc: null
459
+ value: 100
460
+ num_return_sequences:
461
+ desc: null
462
+ value: 1
463
+ num_train_epochs:
464
+ desc: null
465
+ value: 0.2
466
+ optim:
467
+ desc: null
468
+ value: adamw_hf
469
+ output_attentions:
470
+ desc: null
471
+ value: false
472
+ output_dir:
473
+ desc: null
474
+ value: ./
475
+ output_hidden_size:
476
+ desc: null
477
+ value: 1024
478
+ output_hidden_states:
479
+ desc: null
480
+ value: false
481
+ output_scores:
482
+ desc: null
483
+ value: false
484
+ overwrite_output_dir:
485
+ desc: null
486
+ value: true
487
+ pad_token_id:
488
+ desc: null
489
+ value: 40
490
+ past_index:
491
+ desc: null
492
+ value: -1
493
+ per_device_eval_batch_size:
494
+ desc: null
495
+ value: 8
496
+ per_device_train_batch_size:
497
+ desc: null
498
+ value: 8
499
+ per_gpu_eval_batch_size:
500
+ desc: null
501
+ value: None
502
+ per_gpu_train_batch_size:
503
+ desc: null
504
+ value: None
505
+ prediction_loss_only:
506
+ desc: null
507
+ value: false
508
+ prefix:
509
+ desc: null
510
+ value: null
511
+ problem_type:
512
+ desc: null
513
+ value: null
514
+ proj_codevector_dim:
515
+ desc: null
516
+ value: 768
517
+ pruned_heads:
518
+ desc: null
519
+ value: {}
520
+ push_to_hub:
521
+ desc: null
522
+ value: true
523
+ push_to_hub_model_id:
524
+ desc: null
525
+ value: None
526
+ push_to_hub_organization:
527
+ desc: null
528
+ value: None
529
+ push_to_hub_token:
530
+ desc: null
531
+ value: <PUSH_TO_HUB_TOKEN>
532
+ remove_invalid_values:
533
+ desc: null
534
+ value: false
535
+ remove_unused_columns:
536
+ desc: null
537
+ value: true
538
+ repetition_penalty:
539
+ desc: null
540
+ value: 1.0
541
+ report_to:
542
+ desc: null
543
+ value: '[''wandb'']'
544
+ resume_from_checkpoint:
545
+ desc: null
546
+ value: None
547
+ return_dict:
548
+ desc: null
549
+ value: true
550
+ return_dict_in_generate:
551
+ desc: null
552
+ value: false
553
+ run_name:
554
+ desc: null
555
+ value: ./
556
+ save_on_each_node:
557
+ desc: null
558
+ value: false
559
+ save_steps:
560
+ desc: null
561
+ value: 500
562
+ save_strategy:
563
+ desc: null
564
+ value: steps
565
+ save_total_limit:
566
+ desc: null
567
+ value: 3
568
+ seed:
569
+ desc: null
570
+ value: 42
571
+ sep_token_id:
572
+ desc: null
573
+ value: null
574
+ sharded_ddp:
575
+ desc: null
576
+ value: '[]'
577
+ skip_memory_metrics:
578
+ desc: null
579
+ value: true
580
+ task_specific_params:
581
+ desc: null
582
+ value: null
583
+ tdnn_dilation:
584
+ desc: null
585
+ value:
586
+ - 1
587
+ - 2
588
+ - 3
589
+ - 1
590
+ - 1
591
+ tdnn_dim:
592
+ desc: null
593
+ value:
594
+ - 512
595
+ - 512
596
+ - 512
597
+ - 512
598
+ - 1500
599
+ tdnn_kernel:
600
+ desc: null
601
+ value:
602
+ - 5
603
+ - 3
604
+ - 3
605
+ - 1
606
+ - 1
607
+ temperature:
608
+ desc: null
609
+ value: 1.0
610
+ tf32:
611
+ desc: null
612
+ value: None
613
+ tie_encoder_decoder:
614
+ desc: null
615
+ value: false
616
+ tie_word_embeddings:
617
+ desc: null
618
+ value: true
619
+ tokenizer_class:
620
+ desc: null
621
+ value: null
622
+ top_k:
623
+ desc: null
624
+ value: 50
625
+ top_p:
626
+ desc: null
627
+ value: 1.0
628
+ torch_dtype:
629
+ desc: null
630
+ value: float32
631
+ torchscript:
632
+ desc: null
633
+ value: false
634
+ tpu_metrics_debug:
635
+ desc: null
636
+ value: false
637
+ tpu_num_cores:
638
+ desc: null
639
+ value: None
640
+ train_batch_size:
641
+ desc: null
642
+ value: 8
643
+ transformers_version:
644
+ desc: null
645
+ value: 4.17.0.dev0
646
+ use_bfloat16:
647
+ desc: null
648
+ value: false
649
+ use_legacy_prediction_loop:
650
+ desc: null
651
+ value: false
652
+ use_weighted_layer_sum:
653
+ desc: null
654
+ value: false
655
+ vocab_size:
656
+ desc: null
657
+ value: 41
658
+ warmup_ratio:
659
+ desc: null
660
+ value: 0.0
661
+ warmup_steps:
662
+ desc: null
663
+ value: 2000
664
+ weight_decay:
665
+ desc: null
666
+ value: 0.0
667
+ xpu_backend:
668
+ desc: null
669
+ value: None
670
+ xvector_output_dim:
671
+ desc: null
672
+ value: 512
wandb/run-20220130_224738-2uzt3kt1/files/output.log ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+
3
+
4
+ 67%|██████████████████████████████████████████████████████████████████████████████████████████ | 2/3 [00:07<00:03, 3.88s/it]
5
+ 100%|███████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 3/3 [00:10<00:00, 3.23s/it]
6
+ Training completed. Do not forget to share your model on huggingface.co/models =)
7
+ 100%|███████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| 3/3 [00:10<00:00, 3.48s/it]
8
+ Saving model checkpoint to ./
9
+ Configuration saved in ./config.json
10
+ Model weights saved in ./pytorch_model.bin
11
+ Configuration saved in ./preprocessor_config.json
12
+ Saving model checkpoint to ./
13
+ Configuration saved in ./config.json
14
+ Model weights saved in ./pytorch_model.bin
wandb/run-20220130_224738-2uzt3kt1/files/requirements.txt ADDED
@@ -0,0 +1,180 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ aiohttp==3.8.1
2
+ aiosignal==1.2.0
3
+ analytics-python==1.4.0
4
+ anyio==3.5.0
5
+ appdirs==1.4.4
6
+ argon2-cffi-bindings==21.2.0
7
+ argon2-cffi==21.3.0
8
+ asgiref==3.5.0
9
+ asttokens==2.0.5
10
+ async-timeout==4.0.2
11
+ attrs==21.4.0
12
+ audioread==2.1.9
13
+ backcall==0.2.0
14
+ backoff==1.10.0
15
+ bcrypt==3.2.0
16
+ beautifulsoup4==4.9.3
17
+ black==21.12b0
18
+ bleach==4.1.0
19
+ brotlipy==0.7.0
20
+ certifi==2020.12.5
21
+ cffi==1.14.3
22
+ chardet==3.0.4
23
+ charset-normalizer==2.0.10
24
+ click==8.0.3
25
+ conda-build==3.21.4
26
+ conda-package-handling==1.7.2
27
+ conda==4.9.2
28
+ configparser==5.2.0
29
+ cryptography==3.2.1
30
+ cycler==0.11.0
31
+ datasets==1.18.2.dev0
32
+ debugpy==1.5.1
33
+ decorator==4.4.2
34
+ defusedxml==0.7.1
35
+ dill==0.3.4
36
+ dnspython==2.1.0
37
+ docker-pycreds==0.4.0
38
+ entrypoints==0.3
39
+ executing==0.8.2
40
+ fastapi==0.73.0
41
+ ffmpy==0.3.0
42
+ filelock==3.0.12
43
+ fonttools==4.29.0
44
+ frozenlist==1.3.0
45
+ fsspec==2022.1.0
46
+ gitdb==4.0.9
47
+ gitpython==3.1.26
48
+ glob2==0.7
49
+ gradio==2.7.5.2
50
+ h11==0.13.0
51
+ huggingface-hub==0.4.0
52
+ idna==2.10
53
+ importlib-resources==5.4.0
54
+ ipykernel==6.7.0
55
+ ipython-genutils==0.2.0
56
+ ipython==8.0.1
57
+ ipywidgets==7.6.3
58
+ jedi==0.17.0
59
+ jinja2==2.11.3
60
+ jiwer==2.3.0
61
+ joblib==1.1.0
62
+ json5==0.9.6
63
+ jsonschema==4.4.0
64
+ jupyter-client==7.1.2
65
+ jupyter-core==4.9.1
66
+ jupyterlab-pygments==0.1.2
67
+ jupyterlab-server==1.2.0
68
+ jupyterlab-widgets==1.0.2
69
+ jupyterlab==2.2.9
70
+ kiwisolver==1.3.2
71
+ libarchive-c==2.9
72
+ librosa==0.8.1
73
+ llvmlite==0.38.0
74
+ markdown2==2.4.2
75
+ markupsafe==1.1.1
76
+ matplotlib-inline==0.1.3
77
+ matplotlib==3.5.1
78
+ mistune==0.8.4
79
+ mkl-fft==1.3.0
80
+ mkl-random==1.1.1
81
+ mkl-service==2.3.0
82
+ monotonic==1.6
83
+ multidict==6.0.2
84
+ multiprocess==0.70.12.2
85
+ mypy-extensions==0.4.3
86
+ nano==0.10.0
87
+ nbclient==0.5.10
88
+ nbconvert==6.4.1
89
+ nbformat==5.1.3
90
+ nest-asyncio==1.5.4
91
+ notebook==6.4.8
92
+ numba==0.55.1
93
+ numpy==1.19.2
94
+ olefile==0.46
95
+ packaging==21.3
96
+ pandas==1.4.0
97
+ pandocfilters==1.5.0
98
+ paramiko==2.9.2
99
+ parso==0.8.1
100
+ pathspec==0.9.0
101
+ pathtools==0.1.2
102
+ pexpect==4.8.0
103
+ pickleshare==0.7.5
104
+ pillow==8.1.2
105
+ pip==21.3.1
106
+ pkginfo==1.7.0
107
+ platformdirs==2.4.1
108
+ pooch==1.6.0
109
+ prometheus-client==0.13.0
110
+ promise==2.3
111
+ prompt-toolkit==3.0.8
112
+ protobuf==3.19.4
113
+ psutil==5.8.0
114
+ ptyprocess==0.7.0
115
+ pure-eval==0.2.2
116
+ pyarrow==6.0.1
117
+ pycosat==0.6.3
118
+ pycparser==2.20
119
+ pycryptodome==3.13.0
120
+ pydantic==1.9.0
121
+ pydub==0.25.1
122
+ pygments==2.8.0
123
+ pynacl==1.5.0
124
+ pyopenssl==19.1.0
125
+ pyparsing==3.0.7
126
+ pyrsistent==0.18.1
127
+ pysocks==1.7.1
128
+ python-dateutil==2.8.2
129
+ python-etcd==0.4.5
130
+ python-levenshtein==0.12.2
131
+ python-multipart==0.0.5
132
+ pytz==2021.1
133
+ pyyaml==5.4.1
134
+ pyzmq==22.3.0
135
+ regex==2022.1.18
136
+ requests==2.24.0
137
+ resampy==0.2.2
138
+ ruamel-yaml==0.15.87
139
+ sacremoses==0.0.47
140
+ scikit-learn==1.0.2
141
+ scipy==1.7.3
142
+ send2trash==1.8.0
143
+ sentry-sdk==1.5.4
144
+ setuptools==50.3.1.post20201107
145
+ shortuuid==1.0.8
146
+ six==1.15.0
147
+ smmap==5.0.0
148
+ sniffio==1.2.0
149
+ soundfile==0.10.3.post1
150
+ soupsieve==2.2
151
+ stack-data==0.1.4
152
+ starlette==0.17.1
153
+ subprocess32==3.5.4
154
+ termcolor==1.1.0
155
+ terminado==0.13.1
156
+ testpath==0.5.0
157
+ threadpoolctl==3.0.0
158
+ tokenizers==0.11.4
159
+ tomli==1.2.3
160
+ torch==1.10.2
161
+ torchaudio==0.10.2
162
+ torchelastic==0.2.2
163
+ torchtext==0.9.1
164
+ torchvision==0.9.1
165
+ tornado==6.1
166
+ tqdm==4.62.3
167
+ traitlets==5.1.1
168
+ transformers==4.17.0.dev0
169
+ typing-extensions==4.0.1
170
+ urllib3==1.25.11
171
+ uvicorn==0.17.1
172
+ wandb==0.12.9
173
+ wcwidth==0.2.5
174
+ webencodings==0.5.1
175
+ wheel==0.35.1
176
+ widgetsnbextension==3.5.2
177
+ xxhash==2.0.2
178
+ yarl==1.7.2
179
+ yaspin==2.1.0
180
+ zipp==3.7.0
wandb/run-20220130_224738-2uzt3kt1/files/wandb-metadata.json ADDED
@@ -0,0 +1,63 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "os": "Linux-4.15.0-151-generic-x86_64-with-glibc2.10",
3
+ "python": "3.8.8",
4
+ "heartbeatAt": "2022-01-30T22:47:39.607019",
5
+ "startedAt": "2022-01-30T22:47:38.310593",
6
+ "docker": null,
7
+ "gpu": "Tesla V100S-PCIE-32GB",
8
+ "gpu_count": 1,
9
+ "cpu_count": 60,
10
+ "cuda": null,
11
+ "args": [
12
+ "--activation_dropout=0.1",
13
+ "--dataset_name=mozilla-foundation/common_voice_7_0",
14
+ "--dataset_config_name=fr",
15
+ "--eval_steps=500",
16
+ "--evaluation_strategy=steps",
17
+ "--feat_proj_dropout=0.0",
18
+ "--freeze_feature_encoder",
19
+ "--fp16",
20
+ "--gradient_accumulation_steps=8",
21
+ "--gradient_checkpointing",
22
+ "--group_by_length",
23
+ "--layerdrop=0.0",
24
+ "--learning_rate=7.5e-5",
25
+ "--length_column_name=input_length",
26
+ "--load_best_model_at_end",
27
+ "--logging_steps=100",
28
+ "--mask_feature_length=64",
29
+ "--mask_feature_prob=0.25",
30
+ "--mask_time_length=10",
31
+ "--mask_time_prob=0.75",
32
+ "--max_train_samples=1000",
33
+ "--max_eval_samples=200",
34
+ "--model_name_or_path=facebook/wav2vec2-xls-r-300m",
35
+ "--num_train_epochs=0.2",
36
+ "--output_dir=./",
37
+ "--overwrite_output_dir",
38
+ "--per_device_train_batch_size=8",
39
+ "--per_device_eval_batch_size=8",
40
+ "--preprocessing_num_workers=4",
41
+ "--push_to_hub",
42
+ "--report_to=wandb",
43
+ "--save_steps=500",
44
+ "--save_total_limit=3",
45
+ "--text_column_name=sentence",
46
+ "--use_auth_token",
47
+ "--warmup_steps=2000",
48
+ "--do_train",
49
+ "--do_eval"
50
+ ],
51
+ "state": "running",
52
+ "program": "run_speech_recognition_ctc.py",
53
+ "codePath": "run_speech_recognition_ctc.py",
54
+ "git": {
55
+ "remote": "https://huggingface.co/Plim/xls-r-300m-fr",
56
+ "commit": "1d172876193bf100999c8d09d283f8d0894252f2"
57
+ },
58
+ "email": "[email protected]",
59
+ "root": "/workspace/xls-r-300m-fr",
60
+ "host": "job-3261699b-76eb-4c28-8419-66a66c5c9199",
61
+ "username": "ovh",
62
+ "executable": "/opt/conda/bin/python"
63
+ }
wandb/run-20220130_224738-2uzt3kt1/files/wandb-summary.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"train/train_runtime": 12.893, "train/train_samples_per_second": 15.512, "train/train_steps_per_second": 0.233, "train/total_flos": 2.67196543170048e+16, "train/train_loss": 12.496875762939453, "train/epoch": 0.19, "train/global_step": 3, "_runtime": 12, "_timestamp": 1643582870, "_step": 0}
wandb/run-20220130_224738-2uzt3kt1/logs/debug-internal.log ADDED
@@ -0,0 +1,57 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2022-01-30 22:47:39,297 INFO MainThread:23196 [internal.py:wandb_internal():87] W&B internal server running at pid: 23196, started at: 2022-01-30 22:47:39.296970
2
+ 2022-01-30 22:47:39,300 INFO WriterThread:23196 [datastore.py:open_for_write():77] open: /workspace/xls-r-300m-fr/wandb/run-20220130_224738-2uzt3kt1/run-2uzt3kt1.wandb
3
+ 2022-01-30 22:47:39,301 DEBUG HandlerThread:23196 [handler.py:handle_request():130] handle_request: check_version
4
+ 2022-01-30 22:47:39,304 DEBUG SenderThread:23196 [sender.py:send():234] send: header
5
+ 2022-01-30 22:47:39,304 DEBUG SenderThread:23196 [sender.py:send_request():248] send_request: check_version
6
+ 2022-01-30 22:47:39,377 DEBUG SenderThread:23196 [sender.py:send():234] send: run
7
+ 2022-01-30 22:47:39,597 DEBUG HandlerThread:23196 [handler.py:handle_request():130] handle_request: run_start
8
+ 2022-01-30 22:47:39,599 INFO SenderThread:23196 [dir_watcher.py:__init__():169] watching files in: /workspace/xls-r-300m-fr/wandb/run-20220130_224738-2uzt3kt1/files
9
+ 2022-01-30 22:47:39,600 INFO SenderThread:23196 [sender.py:_start_run_threads():804] run started: 2uzt3kt1 with start time 1643582858
10
+ 2022-01-30 22:47:39,600 DEBUG SenderThread:23196 [sender.py:send():234] send: summary
11
+ 2022-01-30 22:47:39,600 INFO SenderThread:23196 [sender.py:_save_file():939] saving file wandb-summary.json with policy end
12
+ 2022-01-30 22:47:39,606 DEBUG HandlerThread:23196 [meta.py:__init__():40] meta init
13
+ 2022-01-30 22:47:39,606 DEBUG HandlerThread:23196 [meta.py:__init__():54] meta init done
14
+ 2022-01-30 22:47:39,606 DEBUG HandlerThread:23196 [meta.py:probe():214] probe
15
+ 2022-01-30 22:47:39,615 DEBUG HandlerThread:23196 [meta.py:_setup_git():204] setup git
16
+ 2022-01-30 22:47:39,653 DEBUG HandlerThread:23196 [meta.py:_setup_git():211] setup git done
17
+ 2022-01-30 22:47:39,653 DEBUG HandlerThread:23196 [meta.py:_save_pip():58] save pip
18
+ 2022-01-30 22:47:39,654 DEBUG HandlerThread:23196 [meta.py:_save_pip():72] save pip done
19
+ 2022-01-30 22:47:39,655 DEBUG HandlerThread:23196 [meta.py:_save_conda():79] save conda
20
+ 2022-01-30 22:47:40,176 DEBUG HandlerThread:23196 [meta.py:_save_conda():89] save conda done
21
+ 2022-01-30 22:47:40,176 DEBUG HandlerThread:23196 [meta.py:probe():252] probe done
22
+ 2022-01-30 22:47:40,185 DEBUG SenderThread:23196 [sender.py:send():234] send: files
23
+ 2022-01-30 22:47:40,186 INFO SenderThread:23196 [sender.py:_save_file():939] saving file wandb-metadata.json with policy now
24
+ 2022-01-30 22:47:40,197 DEBUG HandlerThread:23196 [handler.py:handle_request():130] handle_request: stop_status
25
+ 2022-01-30 22:47:40,198 DEBUG SenderThread:23196 [sender.py:send_request():248] send_request: stop_status
26
+ 2022-01-30 22:47:40,354 DEBUG SenderThread:23196 [sender.py:send():234] send: config
27
+ 2022-01-30 22:47:40,357 DEBUG SenderThread:23196 [sender.py:send():234] send: metric
28
+ 2022-01-30 22:47:40,357 DEBUG SenderThread:23196 [sender.py:send():234] send: metric
29
+ 2022-01-30 22:47:40,357 WARNING SenderThread:23196 [sender.py:send_metric():897] Seen metric with glob (shouldnt happen)
30
+ 2022-01-30 22:47:40,601 INFO Thread-8 :23196 [dir_watcher.py:_on_file_created():217] file/dir created: /workspace/xls-r-300m-fr/wandb/run-20220130_224738-2uzt3kt1/files/requirements.txt
31
+ 2022-01-30 22:47:40,601 INFO Thread-8 :23196 [dir_watcher.py:_on_file_created():217] file/dir created: /workspace/xls-r-300m-fr/wandb/run-20220130_224738-2uzt3kt1/files/wandb-summary.json
32
+ 2022-01-30 22:47:40,601 INFO Thread-8 :23196 [dir_watcher.py:_on_file_created():217] file/dir created: /workspace/xls-r-300m-fr/wandb/run-20220130_224738-2uzt3kt1/files/output.log
33
+ 2022-01-30 22:47:40,601 INFO Thread-8 :23196 [dir_watcher.py:_on_file_created():217] file/dir created: /workspace/xls-r-300m-fr/wandb/run-20220130_224738-2uzt3kt1/files/wandb-metadata.json
34
+ 2022-01-30 22:47:40,601 INFO Thread-8 :23196 [dir_watcher.py:_on_file_created():217] file/dir created: /workspace/xls-r-300m-fr/wandb/run-20220130_224738-2uzt3kt1/files/conda-environment.yaml
35
+ 2022-01-30 22:47:40,709 INFO Thread-11 :23196 [upload_job.py:push():137] Uploaded file /tmp/tmp51rrl_mrwandb/2urk4c1m-wandb-metadata.json
36
+ 2022-01-30 22:47:42,601 INFO Thread-8 :23196 [dir_watcher.py:_on_file_modified():230] file/dir modified: /workspace/xls-r-300m-fr/wandb/run-20220130_224738-2uzt3kt1/files/output.log
37
+ 2022-01-30 22:47:46,604 INFO Thread-8 :23196 [dir_watcher.py:_on_file_modified():230] file/dir modified: /workspace/xls-r-300m-fr/wandb/run-20220130_224738-2uzt3kt1/files/output.log
38
+ 2022-01-30 22:47:50,606 INFO Thread-8 :23196 [dir_watcher.py:_on_file_modified():230] file/dir modified: /workspace/xls-r-300m-fr/wandb/run-20220130_224738-2uzt3kt1/files/output.log
39
+ 2022-01-30 22:47:50,676 DEBUG SenderThread:23196 [sender.py:send():234] send: metric
40
+ 2022-01-30 22:47:50,677 DEBUG SenderThread:23196 [sender.py:send():234] send: metric
41
+ 2022-01-30 22:47:50,677 DEBUG SenderThread:23196 [sender.py:send():234] send: metric
42
+ 2022-01-30 22:47:50,677 DEBUG SenderThread:23196 [sender.py:send():234] send: metric
43
+ 2022-01-30 22:47:50,677 DEBUG SenderThread:23196 [sender.py:send():234] send: metric
44
+ 2022-01-30 22:47:50,677 DEBUG SenderThread:23196 [sender.py:send():234] send: metric
45
+ 2022-01-30 22:47:50,678 DEBUG SenderThread:23196 [sender.py:send():234] send: history
46
+ 2022-01-30 22:47:50,678 DEBUG SenderThread:23196 [sender.py:send():234] send: summary
47
+ 2022-01-30 22:47:50,679 INFO SenderThread:23196 [sender.py:_save_file():939] saving file wandb-summary.json with policy end
48
+ 2022-01-30 22:47:51,607 INFO Thread-8 :23196 [dir_watcher.py:_on_file_modified():230] file/dir modified: /workspace/xls-r-300m-fr/wandb/run-20220130_224738-2uzt3kt1/files/wandb-summary.json
49
+ 2022-01-30 22:47:52,608 INFO Thread-8 :23196 [dir_watcher.py:_on_file_modified():230] file/dir modified: /workspace/xls-r-300m-fr/wandb/run-20220130_224738-2uzt3kt1/files/output.log
50
+ 2022-01-30 22:47:54,610 INFO Thread-8 :23196 [dir_watcher.py:_on_file_modified():230] file/dir modified: /workspace/xls-r-300m-fr/wandb/run-20220130_224738-2uzt3kt1/files/output.log
51
+ 2022-01-30 22:47:55,488 DEBUG HandlerThread:23196 [handler.py:handle_request():130] handle_request: stop_status
52
+ 2022-01-30 22:47:55,488 DEBUG SenderThread:23196 [sender.py:send_request():248] send_request: stop_status
53
+ 2022-01-30 22:47:56,611 INFO Thread-8 :23196 [dir_watcher.py:_on_file_modified():230] file/dir modified: /workspace/xls-r-300m-fr/wandb/run-20220130_224738-2uzt3kt1/files/output.log
54
+ 2022-01-30 22:48:07,838 DEBUG SenderThread:23196 [sender.py:send():234] send: stats
55
+ 2022-01-30 22:48:10,621 INFO Thread-8 :23196 [dir_watcher.py:_on_file_modified():230] file/dir modified: /workspace/xls-r-300m-fr/wandb/run-20220130_224738-2uzt3kt1/files/config.yaml
56
+ 2022-01-30 22:48:10,741 DEBUG HandlerThread:23196 [handler.py:handle_request():130] handle_request: stop_status
57
+ 2022-01-30 22:48:10,742 DEBUG SenderThread:23196 [sender.py:send_request():248] send_request: stop_status
wandb/run-20220130_224738-2uzt3kt1/logs/debug.log ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2022-01-30 22:47:38,315 INFO MainThread:22602 [wandb_setup.py:_flush():71] setting env: {}
2
+ 2022-01-30 22:47:38,315 INFO MainThread:22602 [wandb_setup.py:_flush():71] setting login settings: {}
3
+ 2022-01-30 22:47:38,315 INFO MainThread:22602 [wandb_init.py:_log_setup():371] Logging user logs to /workspace/xls-r-300m-fr/wandb/run-20220130_224738-2uzt3kt1/logs/debug.log
4
+ 2022-01-30 22:47:38,315 INFO MainThread:22602 [wandb_init.py:_log_setup():372] Logging internal logs to /workspace/xls-r-300m-fr/wandb/run-20220130_224738-2uzt3kt1/logs/debug-internal.log
5
+ 2022-01-30 22:47:38,316 INFO MainThread:22602 [wandb_init.py:init():404] calling init triggers
6
+ 2022-01-30 22:47:38,316 INFO MainThread:22602 [wandb_init.py:init():409] wandb.init called with sweep_config: {}
7
+ config: {}
8
+ 2022-01-30 22:47:38,316 INFO MainThread:22602 [wandb_init.py:init():460] starting backend
9
+ 2022-01-30 22:47:38,316 INFO MainThread:22602 [backend.py:_multiprocessing_setup():99] multiprocessing start_methods=fork,spawn,forkserver, using: spawn
10
+ 2022-01-30 22:47:38,420 INFO MainThread:22602 [backend.py:ensure_launched():216] starting backend process...
11
+ 2022-01-30 22:47:38,497 INFO MainThread:22602 [backend.py:ensure_launched():221] started backend process with pid: 23196
12
+ 2022-01-30 22:47:38,499 INFO MainThread:22602 [wandb_init.py:init():469] backend started and connected
13
+ 2022-01-30 22:47:38,508 INFO MainThread:22602 [wandb_init.py:init():533] updated telemetry
14
+ 2022-01-30 22:47:38,663 INFO MainThread:22602 [wandb_init.py:init():563] communicating current version
15
+ 2022-01-30 22:47:39,375 INFO MainThread:22602 [wandb_init.py:init():568] got version response
16
+ 2022-01-30 22:47:39,375 INFO MainThread:22602 [wandb_init.py:init():578] communicating run to backend with 30 second timeout
17
+ 2022-01-30 22:47:39,596 INFO MainThread:22602 [wandb_init.py:init():606] starting run threads in backend
18
+ 2022-01-30 22:47:40,195 INFO MainThread:22602 [wandb_run.py:_console_start():1810] atexit reg
19
+ 2022-01-30 22:47:40,196 INFO MainThread:22602 [wandb_run.py:_redirect():1684] redirect: SettingsConsole.REDIRECT
20
+ 2022-01-30 22:47:40,197 INFO MainThread:22602 [wandb_run.py:_redirect():1689] Redirecting console.
21
+ 2022-01-30 22:47:40,203 INFO MainThread:22602 [wandb_run.py:_redirect():1745] Redirects installed.
22
+ 2022-01-30 22:47:40,203 INFO MainThread:22602 [wandb_init.py:init():633] run started, returning control to user process
23
+ 2022-01-30 22:47:40,206 INFO MainThread:22602 [wandb_run.py:_config_callback():956] config_cb None None {'return_dict': True, 'output_hidden_states': False, 'output_attentions': False, 'torchscript': False, 'torch_dtype': 'float32', 'use_bfloat16': False, 'pruned_heads': {}, 'tie_word_embeddings': True, 'is_encoder_decoder': False, 'is_decoder': False, 'cross_attention_hidden_size': None, 'add_cross_attention': False, 'tie_encoder_decoder': False, 'max_length': 20, 'min_length': 0, 'do_sample': False, 'early_stopping': False, 'num_beams': 1, 'num_beam_groups': 1, 'diversity_penalty': 0.0, 'temperature': 1.0, 'top_k': 50, 'top_p': 1.0, 'repetition_penalty': 1.0, 'length_penalty': 1.0, 'no_repeat_ngram_size': 0, 'encoder_no_repeat_ngram_size': 0, 'bad_words_ids': None, 'num_return_sequences': 1, 'chunk_size_feed_forward': 0, 'output_scores': False, 'return_dict_in_generate': False, 'forced_bos_token_id': None, 'forced_eos_token_id': None, 'remove_invalid_values': False, 'architectures': ['Wav2Vec2ForPreTraining'], 'finetuning_task': None, 'id2label': {0: 'LABEL_0', 1: 'LABEL_1'}, 'label2id': {'LABEL_0': 0, 'LABEL_1': 1}, 'tokenizer_class': None, 'prefix': None, 'bos_token_id': 1, 'pad_token_id': 40, 'eos_token_id': 2, 'sep_token_id': None, 'decoder_start_token_id': None, 'task_specific_params': None, 'problem_type': None, '_name_or_path': 'facebook/wav2vec2-xls-r-300m', 'transformers_version': '4.17.0.dev0', 'feat_extract_dropout': 0.0, 'model_type': 'wav2vec2', 'num_feat_extract_layers': 7, 'hidden_size': 1024, 'feat_extract_norm': 'layer', 'feat_extract_activation': 'gelu', 'conv_dim': [512, 512, 512, 512, 512, 512, 512], 'conv_stride': [5, 2, 2, 2, 2, 2, 2], 'conv_kernel': [10, 3, 3, 3, 3, 2, 2], 'conv_bias': True, 'num_conv_pos_embeddings': 128, 'num_conv_pos_embedding_groups': 16, 'num_hidden_layers': 24, 'intermediate_size': 4096, 'hidden_act': 'gelu', 'num_attention_heads': 16, 'hidden_dropout': 0.0, 'attention_dropout': 0.0, 'activation_dropout': 0.1, 'feat_proj_dropout': 0.0, 'final_dropout': 0.0, 'layerdrop': 0.0, 'layer_norm_eps': 1e-05, 'initializer_range': 0.02, 'vocab_size': 41, 'do_stable_layer_norm': True, 'use_weighted_layer_sum': False, 'apply_spec_augment': True, 'mask_time_prob': 0.75, 'mask_time_length': 10, 'mask_time_min_masks': 2, 'mask_feature_prob': 0.25, 'mask_feature_length': 64, 'mask_feature_min_masks': 0, 'num_codevectors_per_group': 320, 'num_codevector_groups': 2, 'contrastive_logits_temperature': 0.1, 'feat_quantizer_dropout': 0.0, 'num_negatives': 100, 'codevector_dim': 768, 'proj_codevector_dim': 768, 'diversity_loss_weight': 0.1, 'ctc_loss_reduction': 'mean', 'ctc_zero_infinity': False, 'add_adapter': False, 'adapter_kernel_size': 3, 'adapter_stride': 2, 'num_adapter_layers': 3, 'output_hidden_size': 1024, 'classifier_proj_size': 256, 'tdnn_dim': [512, 512, 512, 512, 1500], 'tdnn_kernel': [5, 3, 3, 1, 1], 'tdnn_dilation': [1, 2, 3, 1, 1], 'xvector_output_dim': 512, 'output_dir': './', 'overwrite_output_dir': True, 'do_train': True, 'do_eval': True, 'do_predict': False, 'evaluation_strategy': 'steps', 'prediction_loss_only': False, 'per_device_train_batch_size': 8, 'per_device_eval_batch_size': 8, 'per_gpu_train_batch_size': 'None', 'per_gpu_eval_batch_size': 'None', 'gradient_accumulation_steps': 8, 'eval_accumulation_steps': 'None', 'learning_rate': 7.5e-05, 'weight_decay': 0.0, 'adam_beta1': 0.9, 'adam_beta2': 0.999, 'adam_epsilon': 1e-08, 'max_grad_norm': 1.0, 'num_train_epochs': 0.2, 'max_steps': -1, 'lr_scheduler_type': 'linear', 'warmup_ratio': 0.0, 'warmup_steps': 2000, 'log_level': -1, 'log_level_replica': -1, 'log_on_each_node': True, 'logging_dir': './runs/Jan30_22-46-41_job-3261699b-76eb-4c28-8419-66a66c5c9199', 'logging_strategy': 'steps', 'logging_first_step': False, 'logging_steps': 100, 'logging_nan_inf_filter': True, 'save_strategy': 'steps', 'save_steps': 500, 'save_total_limit': 3, 'save_on_each_node': False, 'no_cuda': False, 'seed': 42, 'bf16': False, 'fp16': True, 'fp16_opt_level': 'O1', 'half_precision_backend': 'amp', 'bf16_full_eval': False, 'fp16_full_eval': False, 'tf32': 'None', 'local_rank': -1, 'xpu_backend': 'None', 'tpu_num_cores': 'None', 'tpu_metrics_debug': False, 'debug': '[]', 'dataloader_drop_last': False, 'eval_steps': 500, 'dataloader_num_workers': 0, 'past_index': -1, 'run_name': './', 'disable_tqdm': False, 'remove_unused_columns': True, 'label_names': 'None', 'load_best_model_at_end': True, 'metric_for_best_model': 'loss', 'greater_is_better': False, 'ignore_data_skip': False, 'sharded_ddp': '[]', 'deepspeed': 'None', 'label_smoothing_factor': 0.0, 'optim': 'adamw_hf', 'adafactor': False, 'group_by_length': True, 'length_column_name': 'input_length', 'report_to': "['wandb']", 'ddp_find_unused_parameters': 'None', 'ddp_bucket_cap_mb': 'None', 'dataloader_pin_memory': True, 'skip_memory_metrics': True, 'use_legacy_prediction_loop': False, 'push_to_hub': True, 'resume_from_checkpoint': 'None', 'hub_model_id': 'None', 'hub_strategy': 'every_save', 'hub_token': '<HUB_TOKEN>', 'gradient_checkpointing': True, 'fp16_backend': 'auto', 'push_to_hub_model_id': 'None', 'push_to_hub_organization': 'None', 'push_to_hub_token': '<PUSH_TO_HUB_TOKEN>', '_n_gpu': 1, 'mp_parameters': '', 'train_batch_size': 8, 'eval_batch_size': 8}
24
+ 2022-01-30 22:47:40,212 INFO MainThread:22602 [wandb_watch.py:watch():43] Watching
wandb/run-20220130_224738-2uzt3kt1/run-2uzt3kt1.wandb ADDED
Binary file (5.72 kB). View file