SinQQQ commited on
Commit
13964a9
1 Parent(s): d010dd2

Training in progress, step 5

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "openai/whisper-small",
3
  "activation_dropout": 0.0,
4
  "activation_function": "gelu",
5
  "apply_spec_augment": false,
@@ -13,17 +13,17 @@
13
  ],
14
  "bos_token_id": 50257,
15
  "classifier_proj_size": 256,
16
- "d_model": 768,
17
- "decoder_attention_heads": 12,
18
- "decoder_ffn_dim": 3072,
19
  "decoder_layerdrop": 0.0,
20
- "decoder_layers": 12,
21
  "decoder_start_token_id": 50258,
22
  "dropout": 0.0,
23
- "encoder_attention_heads": 12,
24
- "encoder_ffn_dim": 3072,
25
  "encoder_layerdrop": 0.0,
26
- "encoder_layers": 12,
27
  "eos_token_id": 50257,
28
  "forced_decoder_ids": null,
29
  "init_std": 0.02,
@@ -39,7 +39,7 @@
39
  "max_target_positions": 448,
40
  "median_filter_width": 7,
41
  "model_type": "whisper",
42
- "num_hidden_layers": 12,
43
  "num_mel_bins": 80,
44
  "pad_token_id": 50257,
45
  "scale_embedding": false,
 
1
  {
2
+ "_name_or_path": "openai/whisper-base",
3
  "activation_dropout": 0.0,
4
  "activation_function": "gelu",
5
  "apply_spec_augment": false,
 
13
  ],
14
  "bos_token_id": 50257,
15
  "classifier_proj_size": 256,
16
+ "d_model": 512,
17
+ "decoder_attention_heads": 8,
18
+ "decoder_ffn_dim": 2048,
19
  "decoder_layerdrop": 0.0,
20
+ "decoder_layers": 6,
21
  "decoder_start_token_id": 50258,
22
  "dropout": 0.0,
23
+ "encoder_attention_heads": 8,
24
+ "encoder_ffn_dim": 2048,
25
  "encoder_layerdrop": 0.0,
26
+ "encoder_layers": 6,
27
  "eos_token_id": 50257,
28
  "forced_decoder_ids": null,
29
  "init_std": 0.02,
 
39
  "max_target_positions": 448,
40
  "median_filter_width": 7,
41
  "model_type": "whisper",
42
+ "num_hidden_layers": 6,
43
  "num_mel_bins": 80,
44
  "pad_token_id": 50257,
45
  "scale_embedding": false,
generation_config.json CHANGED
@@ -1,44 +1,36 @@
1
  {
2
  "alignment_heads": [
3
  [
4
- 5,
5
- 3
6
- ],
7
- [
8
- 5,
9
- 9
10
  ],
11
  [
12
- 8,
13
- 0
14
  ],
15
  [
16
- 8,
17
- 4
18
  ],
19
  [
20
- 8,
21
  7
22
  ],
23
  [
24
- 8,
25
- 8
26
- ],
27
- [
28
- 9,
29
- 0
30
  ],
31
  [
32
- 9,
33
- 7
34
  ],
35
  [
36
- 9,
37
- 9
38
  ],
39
  [
40
- 10,
41
- 5
42
  ]
43
  ],
44
  "begin_suppress_tokens": [
 
1
  {
2
  "alignment_heads": [
3
  [
4
+ 3,
5
+ 1
 
 
 
 
6
  ],
7
  [
8
+ 4,
9
+ 2
10
  ],
11
  [
12
+ 4,
13
+ 3
14
  ],
15
  [
16
+ 4,
17
  7
18
  ],
19
  [
20
+ 5,
21
+ 1
 
 
 
 
22
  ],
23
  [
24
+ 5,
25
+ 2
26
  ],
27
  [
28
+ 5,
29
+ 4
30
  ],
31
  [
32
+ 5,
33
+ 6
34
  ]
35
  ],
36
  "begin_suppress_tokens": [
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:16ad75b9139ec08ecafa785f6b1978580d7522f5205ee600152432c5aca0339f
3
- size 966995080
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:72e93eb366ffe929be4632cac0603ee3838ec8c3d27c9bf31c1af6f8e78ea25c
3
+ size 290403936
runs/Jan03_09-10-34_743ecbc24e0e/events.out.tfevents.1704273035.743ecbc24e0e.176.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f531190e77899318e9be7fefacdb8b80f6ef5f5acd67efee044aaff64ee493a5
3
+ size 5784
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b26293378eb87c4fd493ddf5cf25977176fab5c80bd332e68eddbeac27fed71b
3
  size 4347
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9b9e99e4abbc62952ff50e75f567a9e768af05eb4a379bb97074a1ed7b62d52b
3
  size 4347