wujohns commited on
Commit
40ade00
1 Parent(s): b4a149d

Upload config

Browse files
Files changed (1) hide show
  1. config.json +9 -4
config.json CHANGED
@@ -1,5 +1,4 @@
1
  {
2
- "_name_or_path": "model/epoch29",
3
  "activation_function": "gelu_new",
4
  "architectures": [
5
  "GPT2LMHeadModel"
@@ -9,15 +8,21 @@
9
  "embd_pdrop": 0.1,
10
  "eos_token_id": 50256,
11
  "gradient_checkpointing": false,
 
 
 
12
  "initializer_range": 0.02,
 
 
 
13
  "layer_norm_epsilon": 1e-05,
14
  "model_type": "gpt2",
15
- "n_ctx": 1024,
16
  "n_embd": 768,
17
  "n_head": 12,
18
  "n_inner": null,
19
- "n_layer": 12,
20
- "n_positions": 1024,
21
  "output_past": true,
22
  "reorder_and_upcast_attn": false,
23
  "resid_pdrop": 0.1,
 
1
  {
 
2
  "activation_function": "gelu_new",
3
  "architectures": [
4
  "GPT2LMHeadModel"
 
8
  "embd_pdrop": 0.1,
9
  "eos_token_id": 50256,
10
  "gradient_checkpointing": false,
11
+ "id2label": {
12
+ "0": "LABEL_0"
13
+ },
14
  "initializer_range": 0.02,
15
+ "label2id": {
16
+ "LABEL_0": 0
17
+ },
18
  "layer_norm_epsilon": 1e-05,
19
  "model_type": "gpt2",
20
+ "n_ctx": 300,
21
  "n_embd": 768,
22
  "n_head": 12,
23
  "n_inner": null,
24
+ "n_layer": 10,
25
+ "n_positions": 300,
26
  "output_past": true,
27
  "reorder_and_upcast_attn": false,
28
  "resid_pdrop": 0.1,