Update configuration_llama.py, required to get model to Load as `rope_scaling` needs to be None, or else a dictionary
#1
by
TheBloke
- opened
- configuration_llama.py +1 -1
configuration_llama.py
CHANGED
@@ -124,7 +124,7 @@ class LlamaConfig(PretrainedConfig):
|
|
124 |
pretraining_tp=1,
|
125 |
tie_word_embeddings=False,
|
126 |
rope_theta=10000,
|
127 |
-
rope_scaling=
|
128 |
attention_bias=False,
|
129 |
**kwargs,
|
130 |
):
|
|
|
124 |
pretraining_tp=1,
|
125 |
tie_word_embeddings=False,
|
126 |
rope_theta=10000,
|
127 |
+
rope_scaling=None,
|
128 |
attention_bias=False,
|
129 |
**kwargs,
|
130 |
):
|