Saving weights and logs of step 40000
Browse files- config.json +4 -1
- events.out.tfevents.1632400875.t1v-n-5878b477-w-0.1777207.0.v2 +3 -0
- flax_model.msgpack +3 -0
- opt_state.msgpack +3 -0
- training_state.json +1 -0
config.json
CHANGED
@@ -9,6 +9,7 @@
|
|
9 |
"dropout_rate": 0.1,
|
10 |
"eos_token_id": 1,
|
11 |
"feed_forward_proj": "gated-gelu",
|
|
|
12 |
"initializer_factor": 1.0,
|
13 |
"is_encoder_decoder": true,
|
14 |
"layer_norm_epsilon": 1e-06,
|
@@ -20,5 +21,7 @@
|
|
20 |
"pad_token_id": 0,
|
21 |
"relative_attention_num_buckets": 32,
|
22 |
"tie_word_embeddings": false,
|
23 |
-
"
|
|
|
|
|
24 |
}
|
|
|
9 |
"dropout_rate": 0.1,
|
10 |
"eos_token_id": 1,
|
11 |
"feed_forward_proj": "gated-gelu",
|
12 |
+
"gradient_checkpointing": false,
|
13 |
"initializer_factor": 1.0,
|
14 |
"is_encoder_decoder": true,
|
15 |
"layer_norm_epsilon": 1e-06,
|
|
|
21 |
"pad_token_id": 0,
|
22 |
"relative_attention_num_buckets": 32,
|
23 |
"tie_word_embeddings": false,
|
24 |
+
"transformers_version": "4.10.0.dev0",
|
25 |
+
"use_cache": true,
|
26 |
+
"vocab_size": 32103
|
27 |
}
|
events.out.tfevents.1632400875.t1v-n-5878b477-w-0.1777207.0.v2
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:530b3dac8be94e2f7155347b48437485c399b242dbc22b9dc8fa46f72baa3502
|
3 |
+
size 5934989
|
flax_model.msgpack
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f445d7d0c454420298f5744a06163d3ed982bab526aa3c1bb6a8bbbc98206b50
|
3 |
+
size 307750439
|
opt_state.msgpack
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:644c488c08704aaf5817419435fd4ee6a7b32880f2fe2564c32bdc47f12ef849
|
3 |
+
size 1016008
|
training_state.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"step": 40001}
|