FinancialSupport commited on
Commit
a373d46
1 Parent(s): 345aef4
Files changed (3) hide show
  1. config.json +9 -1
  2. generation_config.json +6 -0
  3. pytorch_model.bin +2 -2
config.json CHANGED
@@ -1,4 +1,5 @@
1
  {
 
2
  "activation_function": "gelu_new",
3
  "architectures": [
4
  "GPT2LMHeadModel"
@@ -13,9 +14,13 @@
13
  "n_ctx": 1024,
14
  "n_embd": 768,
15
  "n_head": 12,
 
16
  "n_layer": 12,
17
  "n_positions": 1024,
 
18
  "resid_pdrop": 0.1,
 
 
19
  "summary_activation": null,
20
  "summary_first_dropout": 0.1,
21
  "summary_proj_to_labels": true,
@@ -27,5 +32,8 @@
27
  "max_length": 50
28
  }
29
  },
 
 
 
30
  "vocab_size": 50257
31
- }
 
1
  {
2
+ "_name_or_path": "FinancialSupport/NanoGPT",
3
  "activation_function": "gelu_new",
4
  "architectures": [
5
  "GPT2LMHeadModel"
 
14
  "n_ctx": 1024,
15
  "n_embd": 768,
16
  "n_head": 12,
17
+ "n_inner": null,
18
  "n_layer": 12,
19
  "n_positions": 1024,
20
+ "reorder_and_upcast_attn": false,
21
  "resid_pdrop": 0.1,
22
+ "scale_attn_by_inverse_layer_idx": false,
23
+ "scale_attn_weights": true,
24
  "summary_activation": null,
25
  "summary_first_dropout": 0.1,
26
  "summary_proj_to_labels": true,
 
32
  "max_length": 50
33
  }
34
  },
35
+ "torch_dtype": "float32",
36
+ "transformers_version": "4.30.2",
37
+ "use_cache": true,
38
  "vocab_size": 50257
39
+ }
generation_config.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 50256,
4
+ "eos_token_id": 50256,
5
+ "transformers_version": "4.30.2"
6
+ }
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5cb107f46ecb49dbe5228fe911762c7d6e906863c18a20a3c3e3df5f6b8c1b93
3
- size 497802202
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1e2b46dce21fa5f3a92525ef6fec0a61166665f54ff753b80d36f68c512cf79e
3
+ size 497805594