set use_cache to true
Browse filesas discussed in this [thread](https://huggingface.co/HuggingFaceH4/starchat-alpha/discussions/3#645fde7625a4075bcf9d2e15) this should make inference much faster
- config.json +1 -1
config.json
CHANGED
@@ -33,7 +33,7 @@
|
|
33 |
"summary_use_proj": true,
|
34 |
"torch_dtype": "float16",
|
35 |
"transformers_version": "4.28.1",
|
36 |
-
"use_cache":
|
37 |
"validate_runner_input": true,
|
38 |
"vocab_size": 49156
|
39 |
}
|
|
|
33 |
"summary_use_proj": true,
|
34 |
"torch_dtype": "float16",
|
35 |
"transformers_version": "4.28.1",
|
36 |
+
"use_cache": true,
|
37 |
"validate_runner_input": true,
|
38 |
"vocab_size": 49156
|
39 |
}
|