625k step, 6e-4lr, batch4x8, training data rev1
Browse files- config.json +1 -1
- flax_model.msgpack +3 -0
- tokenizer_config.json +1 -1
config.json
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
{
|
2 |
-
"_name_or_path": "heegyu/kogpt-j-base",
|
3 |
"activation_function": "gelu_new",
|
4 |
"architectures": [
|
5 |
"GPTJForCausalLM"
|
|
|
1 |
{
|
2 |
+
"_name_or_path": "heegyu/kogpt-j-base-24L",
|
3 |
"activation_function": "gelu_new",
|
4 |
"architectures": [
|
5 |
"GPTJForCausalLM"
|
flax_model.msgpack
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:21a7eefe15868d842b8a84efcc21db03ba1d1bd3b225dfb242a7440867e0c7a8
|
3 |
+
size 994786268
|
tokenizer_config.json
CHANGED
@@ -19,7 +19,7 @@
|
|
19 |
},
|
20 |
"errors": "replace",
|
21 |
"model_max_length": 1000000000000000019884624838656,
|
22 |
-
"name_or_path": "heegyu/kogpt-j-base",
|
23 |
"pad_token": null,
|
24 |
"special_tokens_map_file": null,
|
25 |
"tokenizer_class": "GPT2Tokenizer",
|
|
|
19 |
},
|
20 |
"errors": "replace",
|
21 |
"model_max_length": 1000000000000000019884624838656,
|
22 |
+
"name_or_path": "heegyu/kogpt-j-base-24L",
|
23 |
"pad_token": null,
|
24 |
"special_tokens_map_file": null,
|
25 |
"tokenizer_class": "GPT2Tokenizer",
|