File size: 1,032 Bytes
19428c9 e357352 19428c9 e357352 19428c9 e357352 19428c9 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 |
# -----Configurations of the Transformer model----- #
# Model name
MODEL_NAME=TRANS_BASE_KE
## Path to training data of source language
CONTEXT_DATA_PATH=dataset/FULL_DATA.gom
## Path to training data of target language
TARGET_DATA_PATH=dataset/FULL_DATA.en
## Path to vocabulary of source language
CONTEXT_TOKEN_PATH=bert_kok.vocab
## Path to vocabulary data of target language
TARGET_TOKEN_PATH=bert_en.vocab
# Reloading weights from pretrained model (Comment out or leave empty or set to 'None' if not using)
WEIGHTS_PATH=tbase_kok-en.hdf5
# Set model configurations
BATCH_SIZE=128
MAX_TOKENS=128
## number of encoder and decoder layers
NUM_LAYERS=6
## dimensionality of the embeddings
D_MODEL=512
## internal dimensionality of the FeedForward layer
DFF=2048
## The number of self-attention heads
NUM_HEADS=8
## Residual Dropout
DROPOUT_RATE=0.3
# Set Training parameters
epochs=10
## save only the best weight? default = True
save_best_only=True
## save epochs after every `save_freq`, default = 1
save_freq=1
|