Update modeling_fake_custom.py

#1
by Rocketknight1 HF staff - opened
config.json CHANGED
@@ -9,7 +9,7 @@
9
  "auto_map": {
10
  "AutoConfig": "configuration_fake_custom.FakeCustomConfig",
11
  "AutoModel": "modeling_fake_custom.FakeCustomModel",
12
- "AutoModelForCausalLM": "modeling_fake_custom.FakeCustomModelForCausalLM"
13
  },
14
  "bos_token_id": 98,
15
  "embd_pdrop": 0.1,
 
9
  "auto_map": {
10
  "AutoConfig": "configuration_fake_custom.FakeCustomConfig",
11
  "AutoModel": "modeling_fake_custom.FakeCustomModel",
12
+ "AutoModelForCausalLM": "modeling_fake_custom.FakeCustomForCausalLM"
13
  },
14
  "bos_token_id": 98,
15
  "embd_pdrop": 0.1,
configuration_fake_custom.py CHANGED
@@ -1,5 +1,5 @@
1
- from transformers import GPT2Config
2
 
3
 
4
- class FakeCustomConfig(GPT2Config):
5
- model_type = "fakecustom"
 
1
+ from transformers import PretrainedConfig
2
 
3
 
4
+ class FakeCustomConfig(PretrainedConfig):
5
+ model_type = "fakecustom"
modeling_fake_custom.py CHANGED
@@ -1,9 +1,13 @@
1
- from transformers import GPT2Model, GPT2LMHeadModel
2
  from .configuration_fake_custom import FakeCustomConfig
3
 
4
- class FakeCustomModel(GPT2Model):
5
  config_class = FakeCustomConfig
6
 
7
 
8
- class FakeCustomModelForCausalLM(GPT2LMHeadModel):
9
- config_class = FakeCustomConfig
 
 
 
 
 
1
+ from transformers import PreTrainedModel
2
  from .configuration_fake_custom import FakeCustomConfig
3
 
4
+ class FakeCustomPreTrainedModel(PreTrainedModel):
5
  config_class = FakeCustomConfig
6
 
7
 
8
+ class FakeCustomModel(FakeCustomPreTrainedModel):
9
+ _auto_class = "AutoModel"
10
+
11
+
12
+ class FakeCustomForCausalLM(FakeCustomPreTrainedModel):
13
+ _auto_class = "AutoModelForCausalLM"