ydshieh
commited on
Commit
•
70a6103
1
Parent(s):
9275feb
add model
Browse files- config.json +2 -1
- modeling_my_model.py +15 -0
config.json
CHANGED
@@ -2,6 +2,7 @@
|
|
2 |
"n_layers": 2,
|
3 |
"model_type": "my_model",
|
4 |
"auto_map": {
|
5 |
-
"AutoConfig": "configuration_my_model.MyModelConfig"
|
|
|
6 |
}
|
7 |
}
|
|
|
2 |
"n_layers": 2,
|
3 |
"model_type": "my_model",
|
4 |
"auto_map": {
|
5 |
+
"AutoConfig": "configuration_my_model.MyModelConfig",
|
6 |
+
"AutoModel": "modeling_my_model.MyMode"
|
7 |
}
|
8 |
}
|
modeling_my_model.py
CHANGED
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from transformers.modeling_utils import PreTrainedModel
|
2 |
+
|
3 |
+
from .configuration_my_model import MyModelConfig
|
4 |
+
|
5 |
+
|
6 |
+
class MyModelPretrainedModel(PreTrainedModel):
|
7 |
+
pass
|
8 |
+
|
9 |
+
|
10 |
+
class MyModel(MyModelPretrainedModel):
|
11 |
+
|
12 |
+
def __init__(self, config: MyModelConfig):
|
13 |
+
super().__init__(config)
|
14 |
+
|
15 |
+
self.n_layers = config.n_layers
|