|
from transformers import AutoModel, AutoConfig |
|
import torch.nn as nn |
|
from transformers import BertPreTrainedModel, AutoModel, PreTrainedModel |
|
from model_config import PragFormerConfig |
|
|
|
|
|
|
|
class BERT_Arch(PreTrainedModel): |
|
config_class = PragFormerConfig |
|
|
|
def __init__(self, config): |
|
super().__init__(config) |
|
print(config.bert) |
|
self.bert = AutoModel.from_pretrained(config.bert['_name_or_path']) |
|
|
|
|
|
self.dropout = nn.Dropout(config.dropout) |
|
|
|
|
|
self.relu = nn.ReLU() |
|
|
|
|
|
self.fc1 = nn.Linear(self.config.bert['hidden_size'], config.fc1) |
|
|
|
|
|
|
|
self.fc2 = nn.Linear(config.fc1, config.fc2) |
|
|
|
|
|
self.softmax = nn.LogSoftmax(dim = config.softmax_dim) |
|
|
|
|
|
def forward(self, input_ids, attention_mask): |
|
|
|
_, cls_hs = self.bert(input_ids, attention_mask = attention_mask, return_dict=False) |
|
|
|
x = self.fc1(cls_hs) |
|
|
|
x = self.relu(x) |
|
|
|
x = self.dropout(x) |
|
|
|
|
|
x = self.fc2(x) |
|
|
|
|
|
x = self.softmax(x) |
|
return x |
|
|