dmusingu commited on
Commit
53298d9
1 Parent(s): be07d8d

Pushing language model

Browse files
README.md CHANGED
@@ -1,3 +1,107 @@
1
- ---
2
- license: apache-2.0
3
- ---
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ base_model: facebook/wav2vec2-xls-r-300m
3
+ license: apache-2.0
4
+ metrics:
5
+ - wer
6
+ tags:
7
+ - generated_from_trainer
8
+ model-index:
9
+ - name: bambara-5-hours-bambara-asr-hf
10
+ results: []
11
+ ---
12
+
13
+ <!-- This model card has been generated automatically according to the information the Trainer had access to. You
14
+ should probably proofread and complete it, then remove this comment. -->
15
+
16
+ # bambara-5-hours-bambara-asr-hf
17
+
18
+ This model is a fine-tuned version of [facebook/wav2vec2-xls-r-300m](https://huggingface.co/facebook/wav2vec2-xls-r-300m) on an unknown dataset.
19
+ It achieves the following results on the evaluation set:
20
+ - Loss: 1.8042
21
+ - Wer: 0.5257
22
+ - Cer: 0.2399
23
+
24
+ ## Model description
25
+
26
+ More information needed
27
+
28
+ ## Intended uses & limitations
29
+
30
+ More information needed
31
+
32
+ ## Training and evaluation data
33
+
34
+ More information needed
35
+
36
+ ## Training procedure
37
+
38
+ ### Training hyperparameters
39
+
40
+ The following hyperparameters were used during training:
41
+ - learning_rate: 0.0003
42
+ - train_batch_size: 16
43
+ - eval_batch_size: 16
44
+ - seed: 42
45
+ - gradient_accumulation_steps: 2
46
+ - total_train_batch_size: 32
47
+ - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
48
+ - lr_scheduler_type: linear
49
+ - lr_scheduler_warmup_steps: 500
50
+ - num_epochs: 50
51
+
52
+ ### Training results
53
+
54
+ | Training Loss | Epoch | Step | Validation Loss | Wer | Cer |
55
+ |:-------------:|:-----:|:----:|:---------------:|:------:|:------:|
56
+ | 5.6981 | 1.13 | 200 | 2.9919 | 1.0 | 1.0 |
57
+ | 2.9496 | 2.27 | 400 | 2.9333 | 1.0 | 1.0 |
58
+ | 2.4081 | 3.4 | 600 | 1.4375 | 0.8869 | 0.4174 |
59
+ | 1.5268 | 4.53 | 800 | 1.1942 | 0.7734 | 0.3602 |
60
+ | 1.3291 | 5.67 | 1000 | 1.1261 | 0.6712 | 0.2994 |
61
+ | 1.2301 | 6.8 | 1200 | 1.0859 | 0.6470 | 0.2892 |
62
+ | 1.1379 | 7.93 | 1400 | 1.0552 | 0.6314 | 0.2813 |
63
+ | 1.0664 | 9.07 | 1600 | 1.0684 | 0.6079 | 0.2731 |
64
+ | 0.9866 | 10.2 | 1800 | 1.0584 | 0.6110 | 0.2756 |
65
+ | 0.9625 | 11.33 | 2000 | 1.0776 | 0.5757 | 0.2590 |
66
+ | 0.8828 | 12.46 | 2200 | 1.1372 | 0.5865 | 0.2596 |
67
+ | 0.8451 | 13.6 | 2400 | 1.0821 | 0.5645 | 0.2574 |
68
+ | 0.8016 | 14.73 | 2600 | 1.1293 | 0.5754 | 0.2608 |
69
+ | 0.7615 | 15.86 | 2800 | 1.1312 | 0.5586 | 0.2519 |
70
+ | 0.715 | 17.0 | 3000 | 1.1657 | 0.5635 | 0.2558 |
71
+ | 0.6792 | 18.13 | 3200 | 1.2197 | 0.5521 | 0.2517 |
72
+ | 0.6498 | 19.26 | 3400 | 1.1157 | 0.5606 | 0.2533 |
73
+ | 0.6221 | 20.4 | 3600 | 1.2632 | 0.5501 | 0.2507 |
74
+ | 0.5912 | 21.53 | 3800 | 1.1686 | 0.5520 | 0.2505 |
75
+ | 0.5614 | 22.66 | 4000 | 1.3080 | 0.5547 | 0.2542 |
76
+ | 0.5485 | 23.8 | 4200 | 1.2349 | 0.5601 | 0.2583 |
77
+ | 0.5235 | 24.93 | 4400 | 1.2541 | 0.5458 | 0.2489 |
78
+ | 0.5084 | 26.06 | 4600 | 1.2414 | 0.5500 | 0.2514 |
79
+ | 0.4741 | 27.2 | 4800 | 1.5119 | 0.5444 | 0.2470 |
80
+ | 0.4699 | 28.33 | 5000 | 1.2778 | 0.5525 | 0.2524 |
81
+ | 0.4511 | 29.46 | 5200 | 1.5488 | 0.5502 | 0.2494 |
82
+ | 0.4181 | 30.59 | 5400 | 1.3489 | 0.5522 | 0.2536 |
83
+ | 0.4149 | 31.73 | 5600 | 1.5685 | 0.5460 | 0.2454 |
84
+ | 0.3998 | 32.86 | 5800 | 1.4369 | 0.5434 | 0.2494 |
85
+ | 0.401 | 33.99 | 6000 | 1.5961 | 0.5376 | 0.2433 |
86
+ | 0.3744 | 35.13 | 6200 | 1.5695 | 0.5361 | 0.2452 |
87
+ | 0.36 | 36.26 | 6400 | 1.5968 | 0.5400 | 0.2445 |
88
+ | 0.3435 | 37.39 | 6600 | 1.6238 | 0.5334 | 0.2424 |
89
+ | 0.3337 | 38.53 | 6800 | 1.6619 | 0.5340 | 0.2440 |
90
+ | 0.3232 | 39.66 | 7000 | 1.6444 | 0.5345 | 0.2446 |
91
+ | 0.3224 | 40.79 | 7200 | 1.7175 | 0.5417 | 0.2451 |
92
+ | 0.303 | 41.93 | 7400 | 1.6966 | 0.5273 | 0.2417 |
93
+ | 0.3028 | 43.06 | 7600 | 1.7403 | 0.5325 | 0.2431 |
94
+ | 0.2899 | 44.19 | 7800 | 1.7688 | 0.5255 | 0.2398 |
95
+ | 0.2861 | 45.33 | 8000 | 1.7705 | 0.5269 | 0.2405 |
96
+ | 0.2744 | 46.46 | 8200 | 1.7792 | 0.5244 | 0.2396 |
97
+ | 0.2731 | 47.59 | 8400 | 1.7876 | 0.5286 | 0.2412 |
98
+ | 0.27 | 48.73 | 8600 | 1.8060 | 0.5264 | 0.2403 |
99
+ | 0.2643 | 49.86 | 8800 | 1.8042 | 0.5257 | 0.2399 |
100
+
101
+
102
+ ### Framework versions
103
+
104
+ - Transformers 4.38.1
105
+ - Pytorch 2.1.0+cu118
106
+ - Datasets 2.17.0
107
+ - Tokenizers 0.15.2
added_tokens.json ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {
2
+ "</s>": 39,
3
+ "<s>": 38
4
+ }
alphabet.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"labels": [" ", "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m", "n", "o", "p", "q", "r", "s", "t", "u", "v", "w", "x", "y", "z", "\u00e1", "\u00e7", "\u00ea", "\u00ed", "\u00fa", "\u014b", "\u0254", "\u025b", "\u0272", "\u2047", "", "<s>", "</s>"], "is_bpe": false}
config.json ADDED
@@ -0,0 +1,109 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "facebook/wav2vec2-xls-r-300m",
3
+ "activation_dropout": 0.0,
4
+ "adapter_attn_dim": null,
5
+ "adapter_kernel_size": 3,
6
+ "adapter_stride": 2,
7
+ "add_adapter": false,
8
+ "apply_spec_augment": true,
9
+ "architectures": [
10
+ "Wav2Vec2ForCTC"
11
+ ],
12
+ "attention_dropout": 0.1,
13
+ "bos_token_id": 1,
14
+ "classifier_proj_size": 256,
15
+ "codevector_dim": 768,
16
+ "contrastive_logits_temperature": 0.1,
17
+ "conv_bias": true,
18
+ "conv_dim": [
19
+ 512,
20
+ 512,
21
+ 512,
22
+ 512,
23
+ 512,
24
+ 512,
25
+ 512
26
+ ],
27
+ "conv_kernel": [
28
+ 10,
29
+ 3,
30
+ 3,
31
+ 3,
32
+ 3,
33
+ 2,
34
+ 2
35
+ ],
36
+ "conv_stride": [
37
+ 5,
38
+ 2,
39
+ 2,
40
+ 2,
41
+ 2,
42
+ 2,
43
+ 2
44
+ ],
45
+ "ctc_loss_reduction": "mean",
46
+ "ctc_zero_infinity": true,
47
+ "diversity_loss_weight": 0.1,
48
+ "do_stable_layer_norm": true,
49
+ "eos_token_id": 2,
50
+ "feat_extract_activation": "gelu",
51
+ "feat_extract_dropout": 0.0,
52
+ "feat_extract_norm": "layer",
53
+ "feat_proj_dropout": 0.1,
54
+ "feat_quantizer_dropout": 0.0,
55
+ "final_dropout": 0.0,
56
+ "gradient_checkpointing": false,
57
+ "hidden_act": "gelu",
58
+ "hidden_dropout": 0.1,
59
+ "hidden_size": 1024,
60
+ "initializer_range": 0.02,
61
+ "intermediate_size": 4096,
62
+ "layer_norm_eps": 1e-05,
63
+ "layerdrop": 0.1,
64
+ "mask_feature_length": 10,
65
+ "mask_feature_min_masks": 0,
66
+ "mask_feature_prob": 0.0,
67
+ "mask_time_length": 10,
68
+ "mask_time_min_masks": 2,
69
+ "mask_time_prob": 0.05,
70
+ "model_type": "wav2vec2",
71
+ "num_adapter_layers": 3,
72
+ "num_attention_heads": 16,
73
+ "num_codevector_groups": 2,
74
+ "num_codevectors_per_group": 320,
75
+ "num_conv_pos_embedding_groups": 16,
76
+ "num_conv_pos_embeddings": 128,
77
+ "num_feat_extract_layers": 7,
78
+ "num_hidden_layers": 24,
79
+ "num_negatives": 100,
80
+ "output_hidden_size": 1024,
81
+ "pad_token_id": 37,
82
+ "proj_codevector_dim": 768,
83
+ "tdnn_dilation": [
84
+ 1,
85
+ 2,
86
+ 3,
87
+ 1,
88
+ 1
89
+ ],
90
+ "tdnn_dim": [
91
+ 512,
92
+ 512,
93
+ 512,
94
+ 512,
95
+ 1500
96
+ ],
97
+ "tdnn_kernel": [
98
+ 5,
99
+ 3,
100
+ 3,
101
+ 1,
102
+ 1
103
+ ],
104
+ "torch_dtype": "float32",
105
+ "transformers_version": "4.38.1",
106
+ "use_weighted_layer_sum": false,
107
+ "vocab_size": 40,
108
+ "xvector_output_dim": 512
109
+ }
language_model/5gram.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2a2935b1b23655f32bf0b2f3152f5281180498b0e52c19ef6da1286a31ab7d58
3
+ size 16490821
language_model/attrs.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"alpha": 0.5, "beta": 1.5, "unk_score_offset": -10.0, "score_boundary": true}
language_model/unigrams.txt ADDED
The diff for this file is too large to render. See raw diff
 
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4ca936a926f6dbac2954aa2a699459f3c4533bc27a3edb1919d9ad4032079d2c
3
+ size 1261971480
preprocessor_config.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "do_normalize": true,
3
+ "feature_extractor_type": "Wav2Vec2FeatureExtractor",
4
+ "feature_size": 1,
5
+ "padding_side": "right",
6
+ "padding_value": 0.0,
7
+ "processor_class": "Wav2Vec2ProcessorWithLM",
8
+ "return_attention_mask": true,
9
+ "sampling_rate": 16000
10
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "</s>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "[PAD]",
18
+ "lstrip": true,
19
+ "normalized": false,
20
+ "rstrip": true,
21
+ "single_word": false
22
+ },
23
+ "unk_token": {
24
+ "content": "[UNK]",
25
+ "lstrip": true,
26
+ "normalized": false,
27
+ "rstrip": true,
28
+ "single_word": false
29
+ }
30
+ }
tokenizer_config.json ADDED
@@ -0,0 +1,48 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "36": {
4
+ "content": "[UNK]",
5
+ "lstrip": true,
6
+ "normalized": false,
7
+ "rstrip": true,
8
+ "single_word": false,
9
+ "special": false
10
+ },
11
+ "37": {
12
+ "content": "[PAD]",
13
+ "lstrip": true,
14
+ "normalized": false,
15
+ "rstrip": true,
16
+ "single_word": false,
17
+ "special": false
18
+ },
19
+ "38": {
20
+ "content": "<s>",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "39": {
28
+ "content": "</s>",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ }
35
+ },
36
+ "bos_token": "<s>",
37
+ "clean_up_tokenization_spaces": true,
38
+ "do_lower_case": false,
39
+ "eos_token": "</s>",
40
+ "model_max_length": 1000000000000000019884624838656,
41
+ "pad_token": "[PAD]",
42
+ "processor_class": "Wav2Vec2ProcessorWithLM",
43
+ "replace_word_delimiter_char": " ",
44
+ "target_lang": null,
45
+ "tokenizer_class": "Wav2Vec2CTCTokenizer",
46
+ "unk_token": "[UNK]",
47
+ "word_delimiter_token": "|"
48
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b243d2f49a567b285ff56051e569a3d608ca76aa03cc8f4119b7fd2bb7bdaab8
3
+ size 4984
vocab.json ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "[PAD]": 37,
3
+ "[UNK]": 36,
4
+ "a": 1,
5
+ "b": 2,
6
+ "c": 3,
7
+ "d": 4,
8
+ "e": 5,
9
+ "f": 6,
10
+ "g": 7,
11
+ "h": 8,
12
+ "i": 9,
13
+ "j": 10,
14
+ "k": 11,
15
+ "l": 12,
16
+ "m": 13,
17
+ "n": 14,
18
+ "o": 15,
19
+ "p": 16,
20
+ "q": 17,
21
+ "r": 18,
22
+ "s": 19,
23
+ "t": 20,
24
+ "u": 21,
25
+ "v": 22,
26
+ "w": 23,
27
+ "x": 24,
28
+ "y": 25,
29
+ "z": 26,
30
+ "|": 0,
31
+ "á": 27,
32
+ "ç": 28,
33
+ "ê": 29,
34
+ "í": 30,
35
+ "ú": 31,
36
+ "ŋ": 32,
37
+ "ɔ": 33,
38
+ "ɛ": 34,
39
+ "ɲ": 35
40
+ }