nroggendorff commited on
Commit
ee095db
1 Parent(s): 521ec9b

Update train.py

Browse files
Files changed (1) hide show
  1. train.py +6 -3
train.py CHANGED
@@ -10,7 +10,7 @@ from tokenizers import ByteLevelBPETokenizer
10
  MAX_SEQ_LENGTH = 512
11
  BATCH_SIZE = 16
12
  EPOCHS = 4
13
- LEARNING_RATE = 2e-5
14
  FACTOR = 128
15
  VOCAB_SIZE = 32000
16
  INPUT_DATASET = "nroggendorff/oak"
@@ -23,7 +23,7 @@ CLIPPING = 1.0
23
  PUSH_TO_HUB = True
24
 
25
  def load_data():
26
- dataset = load_dataset(INPUT_DATASET, split="train").select(range(int(2e+4)))
27
  return dataset
28
 
29
  def create_tokenizer(training_corpus):
@@ -104,7 +104,10 @@ def train_model(model, tokenizer, dataset, push):
104
  weight_decay=DECAY,
105
  gradient_accumulation_steps=GRADIENT_ACCUMULATION_STEPS,
106
  fp16=FP16,
107
- max_grad_norm=CLIPPING
 
 
 
108
  )
109
 
110
  optimizer = AdamW(model.parameters(), lr=args.learning_rate)
 
10
  MAX_SEQ_LENGTH = 512
11
  BATCH_SIZE = 16
12
  EPOCHS = 4
13
+ LEARNING_RATE = 2e-4
14
  FACTOR = 128
15
  VOCAB_SIZE = 32000
16
  INPUT_DATASET = "nroggendorff/oak"
 
23
  PUSH_TO_HUB = True
24
 
25
  def load_data():
26
+ dataset = load_dataset(INPUT_DATASET, split="train")
27
  return dataset
28
 
29
  def create_tokenizer(training_corpus):
 
104
  weight_decay=DECAY,
105
  gradient_accumulation_steps=GRADIENT_ACCUMULATION_STEPS,
106
  fp16=FP16,
107
+ max_grad_norm=CLIPPING,
108
+ evaluation_strategy="steps",
109
+ eval_steps=10,
110
+ logging_steps=10
111
  )
112
 
113
  optimizer = AdamW(model.parameters(), lr=args.learning_rate)