Capstone-lpx commited on
Commit
0c45ca3
1 Parent(s): 17d77e3

Upload MyBertClassifier

Browse files
config.json ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "MyBertClassifier"
4
+ ],
5
+ "auto_map": {
6
+ "AutoConfig": "configuration_my_bert_classifier.MyBertClassifierConfig",
7
+ "AutoModel": "modeling_my_bert_classifier.MyBertClassifier"
8
+ },
9
+ "model_type": "my_bert_classifier",
10
+ "torch_dtype": "float32",
11
+ "transformers_version": "4.33.3"
12
+ }
configuration_my_bert_classifier.py ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from transformers import PretrainedConfig, BertModel
2
+
3
+ class MyBertClassifierConfig(PretrainedConfig):
4
+
5
+ model_type = "my_bert_classifier"
6
+
7
+ def __init__(
8
+ self,
9
+ **kwargs,
10
+ ):
11
+
12
+ super().__init__(**kwargs)
modeling_my_bert_classifier.py ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from transformers import BertTokenizer, BertModel
2
+ from .configuration_my_bert_classifier import MyBertClassifierConfig
3
+ from torch import nn
4
+ from transformers.modeling_utils import PreTrainedModel
5
+
6
+ class MyBertClassifier(PreTrainedModel):
7
+
8
+ config_class = MyBertClassifierConfig
9
+
10
+ def __init__(self, config):
11
+
12
+ super(MyBertClassifier, self).__init__(config)
13
+
14
+ self.bert = BertModel.from_pretrained('bert-base-cased')
15
+ self.dropout = nn.Dropout(0.5)
16
+ self.linear = nn.Linear(768, 5)
17
+ self.relu = nn.ReLU()
18
+
19
+ def forward(self, input_id, mask):
20
+
21
+ _, pooled_output = self.bert(input_ids= input_id, attention_mask=mask,return_dict=False)
22
+ dropout_output = self.dropout(pooled_output)
23
+ linear_output = self.linear(dropout_output)
24
+ final_layer = self.relu(linear_output)
25
+
26
+ return final_layer
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aea6287e038b6df823b11bdebb06b3195bb6cd07b55dae9fc5f2b432bb57093a
3
+ size 433322033