geonmin-kim
commited on
Commit
•
2dc7c88
1
Parent(s):
9537a09
Upload folder using huggingface_hub
Browse files- mlc-chat-config.json +4 -4
mlc-chat-config.json
CHANGED
@@ -12,16 +12,16 @@
|
|
12 |
"position_embedding_base": 10000.0,
|
13 |
"partial_rotary_factor": 0.4,
|
14 |
"num_key_value_heads": 32,
|
15 |
-
"context_window_size":
|
16 |
-
"prefill_chunk_size":
|
17 |
"head_dim": 80,
|
18 |
"tensor_parallel_shards": 1,
|
19 |
"max_batch_size": 128
|
20 |
},
|
21 |
"vocab_size": 51200,
|
22 |
-
"context_window_size":
|
23 |
"sliding_window_size": -1,
|
24 |
-
"prefill_chunk_size":
|
25 |
"attention_sink_size": -1,
|
26 |
"tensor_parallel_shards": 1,
|
27 |
"pipeline_parallel_stages": 1,
|
|
|
12 |
"position_embedding_base": 10000.0,
|
13 |
"partial_rotary_factor": 0.4,
|
14 |
"num_key_value_heads": 32,
|
15 |
+
"context_window_size": 512,
|
16 |
+
"prefill_chunk_size": 512,
|
17 |
"head_dim": 80,
|
18 |
"tensor_parallel_shards": 1,
|
19 |
"max_batch_size": 128
|
20 |
},
|
21 |
"vocab_size": 51200,
|
22 |
+
"context_window_size": 512,
|
23 |
"sliding_window_size": -1,
|
24 |
+
"prefill_chunk_size": 512,
|
25 |
"attention_sink_size": -1,
|
26 |
"tensor_parallel_shards": 1,
|
27 |
"pipeline_parallel_stages": 1,
|