DeciLMForCausalLM(DeciLMPreTrainedModel, GenerationMixin) for v4.50
#16
by
itlevy
- opened
- modeling_decilm.py +2 -2
modeling_decilm.py
CHANGED
@@ -25,7 +25,7 @@ import torch.utils.checkpoint
|
|
25 |
from torch import nn
|
26 |
from torch.nn import BCEWithLogitsLoss, CrossEntropyLoss, MSELoss
|
27 |
from transformers import GenerationConfig
|
28 |
-
from transformers.generation.utils import NEED_SETUP_CACHE_CLASSES_MAPPING
|
29 |
from transformers.modeling_utils import PreTrainedModel
|
30 |
from transformers.utils import (
|
31 |
add_start_docstrings,
|
@@ -1131,7 +1131,7 @@ class DeciLMModel(DeciLMPreTrainedModel):
|
|
1131 |
return causal_mask
|
1132 |
|
1133 |
|
1134 |
-
class DeciLMForCausalLM(DeciLMPreTrainedModel):
|
1135 |
_tied_weights_keys = ["lm_head.weight"]
|
1136 |
|
1137 |
def __init__(self, config):
|
|
|
25 |
from torch import nn
|
26 |
from torch.nn import BCEWithLogitsLoss, CrossEntropyLoss, MSELoss
|
27 |
from transformers import GenerationConfig
|
28 |
+
from transformers.generation.utils import GenerationMixin, NEED_SETUP_CACHE_CLASSES_MAPPING
|
29 |
from transformers.modeling_utils import PreTrainedModel
|
30 |
from transformers.utils import (
|
31 |
add_start_docstrings,
|
|
|
1131 |
return causal_mask
|
1132 |
|
1133 |
|
1134 |
+
class DeciLMForCausalLM(DeciLMPreTrainedModel, GenerationMixin):
|
1135 |
_tied_weights_keys = ["lm_head.weight"]
|
1136 |
|
1137 |
def __init__(self, config):
|