File size: 215 Bytes
234828b
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
# evaluate the base gpt2
# n_layer=36, n_head=20, n_embd=1280
# 774M parameters
batch_size = 8
eval_iters = 500 # use more iterations to get good estimate
eval_only = True
wandb_log = False
init_from = 'gpt2-large'