heegyu commited on
Commit
bc0cf1d
1 Parent(s): 97fcc3e

medium model 학습습, rev3

Browse files
Files changed (3) hide show
  1. config.json +8 -1
  2. flax_model.msgpack +3 -0
  3. tokenizer_config.json +1 -1
config.json CHANGED
@@ -1,4 +1,5 @@
1
  {
 
2
  "activation_function": "gelu_new",
3
  "architectures": [
4
  "GPT2LMHeadModel"
@@ -13,9 +14,13 @@
13
  "n_ctx": 1024,
14
  "n_embd": 1024,
15
  "n_head": 16,
 
16
  "n_layer": 24,
17
  "n_positions": 1024,
 
18
  "resid_pdrop": 0.1,
 
 
19
  "summary_activation": null,
20
  "summary_first_dropout": 0.1,
21
  "summary_proj_to_labels": true,
@@ -27,5 +32,7 @@
27
  "max_length": 50
28
  }
29
  },
 
 
30
  "vocab_size": 51200
31
- }
 
1
  {
2
+ "_name_or_path": "heegyu/ajoublue-gpt2-medium",
3
  "activation_function": "gelu_new",
4
  "architectures": [
5
  "GPT2LMHeadModel"
 
14
  "n_ctx": 1024,
15
  "n_embd": 1024,
16
  "n_head": 16,
17
+ "n_inner": null,
18
  "n_layer": 24,
19
  "n_positions": 1024,
20
+ "reorder_and_upcast_attn": false,
21
  "resid_pdrop": 0.1,
22
+ "scale_attn_by_inverse_layer_idx": false,
23
+ "scale_attn_weights": true,
24
  "summary_activation": null,
25
  "summary_first_dropout": 0.1,
26
  "summary_proj_to_labels": true,
 
32
  "max_length": 50
33
  }
34
  },
35
+ "transformers_version": "4.25.1",
36
+ "use_cache": true,
37
  "vocab_size": 51200
38
+ }
flax_model.msgpack ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:04c7640c4ef6bd5097771047737aba334669b00c73dda0c7dc5a50ae0936618c
3
+ size 1423164830
tokenizer_config.json CHANGED
@@ -19,7 +19,7 @@
19
  },
20
  "errors": "replace",
21
  "model_max_length": 1000000000000000019884624838656,
22
- "name_or_path": "heegyu/ajoublue-gpt2-base",
23
  "pad_token": null,
24
  "special_tokens_map_file": null,
25
  "tokenizer_class": "GPT2Tokenizer",
 
19
  },
20
  "errors": "replace",
21
  "model_max_length": 1000000000000000019884624838656,
22
+ "name_or_path": "heegyu/ajoublue-gpt2-medium",
23
  "pad_token": null,
24
  "special_tokens_map_file": null,
25
  "tokenizer_class": "GPT2Tokenizer",