add default model_type
Browse filesfix lora config loading bug
- configuration_chatglm.py +1 -0
configuration_chatglm.py
CHANGED
@@ -2,6 +2,7 @@ from transformers import PretrainedConfig
|
|
2 |
|
3 |
|
4 |
class ChatGLMConfig(PretrainedConfig):
|
|
|
5 |
def __init__(
|
6 |
self,
|
7 |
num_layers=28,
|
|
|
2 |
|
3 |
|
4 |
class ChatGLMConfig(PretrainedConfig):
|
5 |
+
model_type = "chatglm"
|
6 |
def __init__(
|
7 |
self,
|
8 |
num_layers=28,
|