emilios commited on
Commit
c67520a
1 Parent(s): 3784228
config.json CHANGED
@@ -34,7 +34,7 @@
34
  "num_mel_bins": 80,
35
  "pad_token_id": 50257,
36
  "scale_embedding": false,
37
- "torch_dtype": "float16",
38
  "transformers_version": "4.26.0.dev0",
39
  "use_cache": false,
40
  "vocab_size": 51865
 
34
  "num_mel_bins": 80,
35
  "pad_token_id": 50257,
36
  "scale_embedding": false,
37
+ "torch_dtype": "float32",
38
  "transformers_version": "4.26.0.dev0",
39
  "use_cache": false,
40
  "vocab_size": 51865
run.sh CHANGED
@@ -12,7 +12,7 @@ deepspeed run_speech_recognition_seq2seq_streaming-farsipal.py \
12
  --torch_compile_mode="reduce-overhead" \
13
  --torch_compile_mode="max-autotune" \
14
  --max_steps="10000" \
15
- --resume_from_checkpoint="checkpoint-4000" \
16
  --output_dir="./" \
17
  --per_device_train_batch_size="32" \
18
  --gradient_accumulation_steps="1" \
 
12
  --torch_compile_mode="reduce-overhead" \
13
  --torch_compile_mode="max-autotune" \
14
  --max_steps="10000" \
15
+ --resume_from_checkpoint="checkpoint-6000" \
16
  --output_dir="./" \
17
  --per_device_train_batch_size="32" \
18
  --gradient_accumulation_steps="1" \
runs/Dec17_22-33-02_129-146-176-120/events.out.tfevents.1671317315.129-146-176-120.1102971.0 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b7e9d6ba2e6b67e4ffa281e2c6f41fce3b8ee132cd4e45f3b483c061ad9f4395
3
- size 10922
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2060292f5085682e66c0f37431096c4f396d2f9f5dc66f34e4c69ccc255ceb27
3
+ size 17520