Saving weights and logs of step 480000
Browse files
flax_model.msgpack
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 891548548
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4f38acdc66d0d9dd7e6f7f9e1af3e281fe54a25be30002b509e5df27a5855267
|
3 |
size 891548548
|
run_t5.sh
ADDED
@@ -0,0 +1,36 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/bin/bash
|
2 |
+
|
3 |
+
python run_t5_mlm_flax.py \
|
4 |
+
--output_dir="${MODEL_PATH}" \
|
5 |
+
--model_type="t5" \
|
6 |
+
--config_name="${MODEL_PATH}" \
|
7 |
+
--tokenizer_name="${MODEL_PATH}" \
|
8 |
+
--preprocessing_num_workers="96" \
|
9 |
+
--do_train --do_eval \
|
10 |
+
--dataset_name="${DATASET}" \
|
11 |
+
--dataset_config_name="${DATASET_CONFIG}" \
|
12 |
+
--max_seq_length="512" \
|
13 |
+
--per_device_train_batch_size="16" \
|
14 |
+
--per_device_eval_batch_size="16" \
|
15 |
+
--adafactor \
|
16 |
+
--learning_rate="0.005" \
|
17 |
+
--overwrite_output_dir \
|
18 |
+
--num_train_epochs="1" \
|
19 |
+
--logging_steps="500" \
|
20 |
+
--save_steps="80000" \
|
21 |
+
--eval_steps="2500" \
|
22 |
+
--weight_decay="0.01" \
|
23 |
+
--warmup_steps="10000" \
|
24 |
+
--validation_split_count="15000" \
|
25 |
+
--push_to_hub \
|
26 |
+
# --adam_beta1="0.9" \
|
27 |
+
# --adam_beta2="0.98" \
|
28 |
+
# --resume_from_checkpoint="${MODEL_DIR}" \ # Uncomment to resume from ckpt
|
29 |
+
# --max_train_samples 100000 \
|
30 |
+
# --max_eval_samples 1000 \
|
31 |
+
# --adafactor \
|
32 |
+
# --save_steps="80000" \
|
33 |
+
|
34 |
+
|
35 |
+
# Instead of adafactor: adamw
|
36 |
+
|
runs/Dec12_13-32-15_t1v-n-aa1c2160-w-0/events.out.tfevents.1639325974.t1v-n-aa1c2160-w-0.15396.0.v2
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:56a1019d63fc5417fea18e1ffa05a7d16acda66f09c60cb2d1f78c8a27044cca
|
3 |
+
size 71578452
|