ruggsea commited on
Commit
21ce69d
1 Parent(s): 23348de

Upload LlamaForCausalLM

Browse files
config.json CHANGED
@@ -5,8 +5,8 @@
5
  ],
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
8
- "bos_token_id": 128000,
9
- "eos_token_id": 128001,
10
  "hidden_act": "silu",
11
  "hidden_size": 4096,
12
  "initializer_range": 0.02,
@@ -16,7 +16,7 @@
16
  "num_attention_heads": 32,
17
  "num_hidden_layers": 32,
18
  "num_key_value_heads": 8,
19
- "pad_token_id": 128256,
20
  "pretraining_tp": 1,
21
  "rms_norm_eps": 1e-05,
22
  "rope_scaling": null,
@@ -25,5 +25,5 @@
25
  "torch_dtype": "float16",
26
  "transformers_version": "4.40.1",
27
  "use_cache": true,
28
- "vocab_size": 128257
29
  }
 
5
  ],
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
8
+ "bos_token_id": 128256,
9
+ "eos_token_id": 128257,
10
  "hidden_act": "silu",
11
  "hidden_size": 4096,
12
  "initializer_range": 0.02,
 
16
  "num_attention_heads": 32,
17
  "num_hidden_layers": 32,
18
  "num_key_value_heads": 8,
19
+ "pad_token_id": 128257,
20
  "pretraining_tp": 1,
21
  "rms_norm_eps": 1e-05,
22
  "rope_scaling": null,
 
25
  "torch_dtype": "float16",
26
  "transformers_version": "4.40.1",
27
  "use_cache": true,
28
+ "vocab_size": 128258
29
  }
generation_config.json CHANGED
@@ -1,8 +1,9 @@
1
  {
2
- "bos_token_id": 128000,
3
  "do_sample": true,
4
- "eos_token_id": 128001,
5
  "max_length": 4096,
 
6
  "temperature": 0.6,
7
  "top_p": 0.9,
8
  "transformers_version": "4.40.1"
 
1
  {
2
+ "bos_token_id": 128256,
3
  "do_sample": true,
4
+ "eos_token_id": 128257,
5
  "max_length": 4096,
6
+ "pad_token_id": 128257,
7
  "temperature": 0.6,
8
  "top_p": 0.9,
9
  "transformers_version": "4.40.1"
model-00001-of-00004.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:8779d82640d496cf007d561993e4ddad8ab9f256f0dd968b885c9b8be4adaf9b
3
- size 4976706784
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1d05f2f70d369d6ed17d13b3209f139e171774a0916ea27727b94fca53455d14
3
+ size 4976714976
model-00002-of-00004.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b6924f6ef906f881e1ab899268a803654731c12151fda9860176f1aa111518ed
3
  size 4999802616
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3b65ab4814ac295885807b5c1a636a44d63eeb3948adbf3f3c19b6f11411cfcf
3
  size 4999802616
model-00003-of-00004.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:cf4865befc9ac037a9aaf38bd65ab7e2473767a4240113a0e830708f12245c9f
3
  size 4915916080
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:13c57e567720dfc2c009c8e5ae4bb1ab557d97621a693b11e69725a0d57ce77b
3
  size 4915916080
model-00004-of-00004.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e988979978db99892f31479def2a8127cb6fc028fca232df228f2b69ef96c9f9
3
- size 1168147000
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dd38a60788ff272eb91dff39f9b7426545b614bd5900801e46ad048b0f206a61
3
+ size 1168155192
model.safetensors.index.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "metadata": {
3
- "total_size": 16060538880
4
  },
5
  "weight_map": {
6
  "lm_head.weight": "model-00004-of-00004.safetensors",
 
1
  {
2
  "metadata": {
3
+ "total_size": 16060555264
4
  },
5
  "weight_map": {
6
  "lm_head.weight": "model-00004-of-00004.safetensors",