simmo commited on
Commit
d9406c6
1 Parent(s): 90ee5c1

Upload MistralForCausalLM

Browse files
config.json CHANGED
@@ -16,18 +16,6 @@
16
  "num_hidden_layers": 32,
17
  "num_key_value_heads": 8,
18
  "pad_token_id": 2,
19
- "quantization_config": {
20
- "bnb_4bit_compute_dtype": "float16",
21
- "bnb_4bit_quant_type": "nf4",
22
- "bnb_4bit_use_double_quant": true,
23
- "llm_int8_enable_fp32_cpu_offload": false,
24
- "llm_int8_has_fp16_weight": false,
25
- "llm_int8_skip_modules": "null",
26
- "llm_int8_threshold": 6.0,
27
- "load_in_4bit": true,
28
- "load_in_8bit": false,
29
- "quant_method": "bitsandbytes"
30
- },
31
  "rms_norm_eps": 1e-05,
32
  "rope_theta": 10000.0,
33
  "sliding_window": 4096,
 
16
  "num_hidden_layers": 32,
17
  "num_key_value_heads": 8,
18
  "pad_token_id": 2,
 
 
 
 
 
 
 
 
 
 
 
 
19
  "rms_norm_eps": 1e-05,
20
  "rope_theta": 10000.0,
21
  "sliding_window": 4096,
pytorch_model-00001-of-00003.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:dc6812e737f75e0e0ac2215c4242f6dea6835ff77a26a30d9876dc6e1d219805
3
- size 4943185632
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:71ec8d0a0b768afcb6c34dc30562c4fa9bcf48ae59f60a7ea61696aef36c4e60
3
+ size 4943184544
pytorch_model-00002-of-00003.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ef0252a904e2dc9bb9ab257ff6b117971e0d9767dc5560728b63b38fa70a29a1
3
- size 4999844616
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:632ef578999ba286dd3c1c017a96cd1ce38eff5ca1204ac2c295ad9c59ae94ad
3
+ size 4999843656
pytorch_model-00003-of-00003.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:23256c417cc33170e527d6f4feca652349714758495145019e61518e3ad6eff7
3
- size 4540536518
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bd89e2ea821bfa25f7a83caef799f70c273b68ca34088101344d040d612b1952
3
+ size 4540536454