RanchiZhao commited on
Commit
d6ca5a3
1 Parent(s): 0f2929a

change name to minicpm3

Browse files
Files changed (1) hide show
  1. modeling_minicpm.py +4 -4
modeling_minicpm.py CHANGED
@@ -997,7 +997,7 @@ MINICPM_START_DOCSTRING = r"""
997
  "The bare MiniCPM Model outputting raw hidden-states without any specific head on top.",
998
  MINICPM_START_DOCSTRING,
999
  )
1000
- class MiniCPMPreTrainedModel(PreTrainedModel):
1001
  config_class = MiniCPMConfig
1002
  base_model_prefix = "model"
1003
  supports_gradient_checkpointing = True
@@ -1093,7 +1093,7 @@ MINICPM_INPUTS_DOCSTRING = r"""
1093
  "The bare MiniCPM Model outputting raw hidden-states without any specific head on top.",
1094
  MINICPM_START_DOCSTRING,
1095
  )
1096
- class MiniCPMModel(MiniCPMPreTrainedModel):
1097
  """
1098
  Transformer decoder consisting of *config.num_hidden_layers* layers. Each layer is a [`MiniCPMDecoderLayer`]
1099
 
@@ -1259,7 +1259,7 @@ class MiniCPMModel(MiniCPMPreTrainedModel):
1259
  )
1260
 
1261
 
1262
- class MiniCPMForCausalLM(MiniCPMPreTrainedModel):
1263
  _tied_weights_keys = ["lm_head.weight"]
1264
 
1265
  def __init__(self, config):
@@ -1489,7 +1489,7 @@ class MiniCPMForCausalLM(MiniCPMPreTrainedModel):
1489
  """,
1490
  MINICPM_START_DOCSTRING,
1491
  )
1492
- class MiniCPMForSequenceClassification(MiniCPMPreTrainedModel):
1493
  def __init__(self, config):
1494
  super().__init__(config)
1495
  self.num_labels = config.num_labels
 
997
  "The bare MiniCPM Model outputting raw hidden-states without any specific head on top.",
998
  MINICPM_START_DOCSTRING,
999
  )
1000
+ class MiniCPM3PreTrainedModel(PreTrainedModel):
1001
  config_class = MiniCPMConfig
1002
  base_model_prefix = "model"
1003
  supports_gradient_checkpointing = True
 
1093
  "The bare MiniCPM Model outputting raw hidden-states without any specific head on top.",
1094
  MINICPM_START_DOCSTRING,
1095
  )
1096
+ class MiniCPM3Model(MiniCPM3PreTrainedModel):
1097
  """
1098
  Transformer decoder consisting of *config.num_hidden_layers* layers. Each layer is a [`MiniCPMDecoderLayer`]
1099
 
 
1259
  )
1260
 
1261
 
1262
+ class MiniCPM3ForCausalLM(MiniCPM3PreTrainedModel):
1263
  _tied_weights_keys = ["lm_head.weight"]
1264
 
1265
  def __init__(self, config):
 
1489
  """,
1490
  MINICPM_START_DOCSTRING,
1491
  )
1492
+ class MiniCPM3ForSequenceClassification(MiniCPM3PreTrainedModel):
1493
  def __init__(self, config):
1494
  super().__init__(config)
1495
  self.num_labels = config.num_labels