Izen78 commited on
Commit
64817ef
1 Parent(s): 0b4063c

Training in progress, step 1000

Browse files
config.json CHANGED
@@ -7,10 +7,7 @@
7
  "WhisperForConditionalGeneration"
8
  ],
9
  "attention_dropout": 0.0,
10
- "begin_suppress_tokens": [
11
- 220,
12
- 50257
13
- ],
14
  "bos_token_id": 50257,
15
  "classifier_proj_size": 256,
16
  "d_model": 768,
@@ -47,7 +44,7 @@
47
  "mask_time_length": 10,
48
  "mask_time_min_masks": 2,
49
  "mask_time_prob": 0.05,
50
- "max_length": 448,
51
  "max_source_positions": 1500,
52
  "max_target_positions": 448,
53
  "median_filter_width": 7,
@@ -56,96 +53,8 @@
56
  "num_mel_bins": 80,
57
  "pad_token_id": 50257,
58
  "scale_embedding": false,
59
- "suppress_tokens": [
60
- 1,
61
- 2,
62
- 7,
63
- 8,
64
- 9,
65
- 10,
66
- 14,
67
- 25,
68
- 26,
69
- 27,
70
- 28,
71
- 29,
72
- 31,
73
- 58,
74
- 59,
75
- 60,
76
- 61,
77
- 62,
78
- 63,
79
- 90,
80
- 91,
81
- 92,
82
- 93,
83
- 359,
84
- 503,
85
- 522,
86
- 542,
87
- 873,
88
- 893,
89
- 902,
90
- 918,
91
- 922,
92
- 931,
93
- 1350,
94
- 1853,
95
- 1982,
96
- 2460,
97
- 2627,
98
- 3246,
99
- 3253,
100
- 3268,
101
- 3536,
102
- 3846,
103
- 3961,
104
- 4183,
105
- 4667,
106
- 6585,
107
- 6647,
108
- 7273,
109
- 9061,
110
- 9383,
111
- 10428,
112
- 10929,
113
- 11938,
114
- 12033,
115
- 12331,
116
- 12562,
117
- 13793,
118
- 14157,
119
- 14635,
120
- 15265,
121
- 15618,
122
- 16553,
123
- 16604,
124
- 18362,
125
- 18956,
126
- 20075,
127
- 21675,
128
- 22520,
129
- 26130,
130
- 26161,
131
- 26435,
132
- 28279,
133
- 29464,
134
- 31650,
135
- 32302,
136
- 32470,
137
- 36865,
138
- 42863,
139
- 47425,
140
- 49870,
141
- 50254,
142
- 50258,
143
- 50360,
144
- 50361,
145
- 50362
146
- ],
147
  "torch_dtype": "float32",
148
- "transformers_version": "4.44.2",
149
  "use_cache": true,
150
  "use_weighted_layer_sum": false,
151
  "vocab_size": 51865
 
7
  "WhisperForConditionalGeneration"
8
  ],
9
  "attention_dropout": 0.0,
10
+ "begin_suppress_tokens": null,
 
 
 
11
  "bos_token_id": 50257,
12
  "classifier_proj_size": 256,
13
  "d_model": 768,
 
44
  "mask_time_length": 10,
45
  "mask_time_min_masks": 2,
46
  "mask_time_prob": 0.05,
47
+ "max_length": null,
48
  "max_source_positions": 1500,
49
  "max_target_positions": 448,
50
  "median_filter_width": 7,
 
53
  "num_mel_bins": 80,
54
  "pad_token_id": 50257,
55
  "scale_embedding": false,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
56
  "torch_dtype": "float32",
57
+ "transformers_version": "4.45.1",
58
  "use_cache": true,
59
  "use_weighted_layer_sum": false,
60
  "vocab_size": 51865
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c653f8983307ff9befb9890de66f36be13a9be7a28755efa4e711de182ef208c
3
  size 966995080
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ec7487d4488fd0637bad657c857b59625a5a548ff71882ce1644e91a8a1b1ca7
3
  size 966995080
runs/Sep27_09-33-33_27ae3a60d93c/events.out.tfevents.1727429700.27ae3a60d93c.532.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2ad95e7725b139459a7ca413cbeee7f4b417c1673b96555b1b7f03967ccb7a3e
3
+ size 15452
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5850e825ac003e6a4e365836b65a46782d1e3f324a173f966cafe82bc40f65e2
3
  size 5368
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:392ab83104532f5a3a8bff219df6428429ca7d4f0b56badd8af048e4ba81fbf1
3
  size 5368