File size: 1,612 Bytes
c08e521 e72a9a2 c08e521 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from transformers.configuration_utils import PretrainedConfig
class TAASConfig(PretrainedConfig):
model_type = "TAAS"
def __init__(
self,
hidd_dropout=0.1,
intermediate_size=3072,
initialize_range=0.02,
max_pos_embeddings=2048,
hidd_act="gelu",
attention_dropout=0.1,
using_task_id=True,
vocabulary_size=40000,
hidd_size=768,
num_hidd_layers=12,
layer_norm_rate=1e-05,
num_atten_heads=12,
pad_token_id=0,
task_vocab_size=3,
classifier_drop=None,
pos_embedding="absolute",
use_cache=True,
vocab_size=4,
**kwargs
):
super().__init__(pad_token_id=pad_token_id, **kwargs)
self.vocab_size = vocabulary_size
self.max_position_embeddings = max_pos_embeddings
self.type_vocab_size = vocab_size
self.use_task_id = using_task_id
self.layer_norm_eps = layer_norm_rate
self.position_embedding_type = pos_embedding
self.num_attention_heads = num_atten_heads
self.hidden_size = hidd_size
self.attention_probs_dropout_prob = attention_dropout
self.initializer_range = initialize_range
self.hidden_act = hidd_act
self.intermediate_size = intermediate_size
self.hidden_dropout_prob = hidd_dropout
self.use_cache = use_cache
self.classifier_dropout = classifier_drop
self.num_hidden_layers = num_hidd_layers
self.task_type_vocab_size = task_vocab_size
|