huseinzol05 commited on
Commit
7a4f01f
1 Parent(s): 370a50c

Upload MistralForCausalLM

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "fpf-7b/checkpoint-23800",
3
  "architectures": [
4
  "MistralForCausalLM"
5
  ],
@@ -18,7 +18,7 @@
18
  "rope_theta": 10000.0,
19
  "sliding_window": 4096,
20
  "tie_word_embeddings": false,
21
- "torch_dtype": "float16",
22
  "transformers_version": "4.34.0",
23
  "use_cache": true,
24
  "vocab_size": 32000
 
1
  {
2
+ "_name_or_path": "fpf-7b/checkpoint-29600",
3
  "architectures": [
4
  "MistralForCausalLM"
5
  ],
 
18
  "rope_theta": 10000.0,
19
  "sliding_window": 4096,
20
  "tie_word_embeddings": false,
21
+ "torch_dtype": "bfloat16",
22
  "transformers_version": "4.34.0",
23
  "use_cache": true,
24
  "vocab_size": 32000
model-00001-of-00002.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d4e311c83761914f786b775c06a79f031ccb64f8ce821534362677ee57885861
3
- size 9942981496
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b8da8c182a943056a1b56d7f40141327742b2354084ba5f9643160e4dbd5c468
3
+ size 9942981696
model-00002-of-00002.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5f8afeb05584a2d46aa21a961dfd2817430f9daa96245c9d29e9026fa74cb9dd
3
- size 4540516256
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:44d8d5cf16dd97388041fa799dc62bbd4586c232e56774c503b3f88e758478ef
3
+ size 4540516344