KathirKs commited on
Commit
43dd272
1 Parent(s): 34d0f5f

Upload MistralForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +3 -3
  2. model.safetensors +2 -2
config.json CHANGED
@@ -10,9 +10,9 @@
10
  "intermediate_size": 1024,
11
  "max_position_embeddings": 256,
12
  "model_type": "mistral",
13
- "num_attention_heads": 4,
14
- "num_hidden_layers": 4,
15
- "num_key_value_heads": 1,
16
  "rms_norm_eps": 1e-06,
17
  "rope_theta": 10000.0,
18
  "sliding_window": 64,
 
10
  "intermediate_size": 1024,
11
  "max_position_embeddings": 256,
12
  "model_type": "mistral",
13
+ "num_attention_heads": 8,
14
+ "num_hidden_layers": 6,
15
+ "num_key_value_heads": 2,
16
  "rms_norm_eps": 1e-06,
17
  "rope_theta": 10000.0,
18
  "sliding_window": 64,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:787f4e6e8c6ff7ac1a02c9e34886cf18202b0453fbd884d6721fe0136bddb66c
3
- size 47985704
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:78999a81d308924b22cb7ef567f70596486bb28650299bba1efdd6954c60b0fa
3
+ size 55593936