ddobokki commited on
Commit
e3ba203
1 Parent(s): 1a02ae3

Upload LlamaForCausalLM

Browse files
Files changed (3) hide show
  1. config.json +3 -3
  2. generation_config.json +1 -1
  3. model.safetensors +2 -2
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "cowshed2_base/checkpoint-1400",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
@@ -17,7 +17,7 @@
17
  "num_attention_heads": 16,
18
  "num_hidden_layers": 16,
19
  "num_key_value_heads": 16,
20
- "pad_token_id": 50258,
21
  "pretraining_tp": 1,
22
  "rms_norm_eps": 1e-06,
23
  "rope_scaling": null,
@@ -27,5 +27,5 @@
27
  "torch_dtype": "float32",
28
  "transformers_version": "4.42.3",
29
  "use_cache": true,
30
- "vocab_size": 50304
31
  }
 
1
  {
2
+ "_name_or_path": "cowshed3_base/checkpoint-800",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
 
17
  "num_attention_heads": 16,
18
  "num_hidden_layers": 16,
19
  "num_key_value_heads": 16,
20
+ "pad_token_id": 1,
21
  "pretraining_tp": 1,
22
  "rms_norm_eps": 1e-06,
23
  "rope_scaling": null,
 
27
  "torch_dtype": "float32",
28
  "transformers_version": "4.42.3",
29
  "use_cache": true,
30
+ "vocab_size": 51200
31
  }
generation_config.json CHANGED
@@ -2,6 +2,6 @@
2
  "_from_model_config": true,
3
  "bos_token_id": 50257,
4
  "eos_token_id": 50256,
5
- "pad_token_id": 50258,
6
  "transformers_version": "4.42.3"
7
  }
 
2
  "_from_model_config": true,
3
  "bos_token_id": 50257,
4
  "eos_token_id": 50256,
5
+ "pad_token_id": 1,
6
  "transformers_version": "4.42.3"
7
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:84fa3df5ba4b708579c3165a0ad2cd9fb13d9a97c9fbd40273a6810c8524e056
3
- size 1064160256
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a3b914d1ebbdb0116b922efa206f23eb68c2ade59a3d5afb64ff5bb83c0c6bab
3
+ size 1069665280