File size: 901 Bytes
73a5025
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
experiment:
  seed: 42
  name: gpt2_19M-bpe-text-dyn
  group: babylm-small
  dry_run: false
  offline_run: false
  evaluate_segmentation: false
  evaluate_babyslm: true
  blimp_tasks: blimp_filtered,blimp_supplement
  resume_checkpoint_path: null
  resume_run_id: null
dataset:
  name: transformersegmentation/BabyLM-phonemized
  subconfig: strict_small
  text_column: text
  is_phonemes: false
  max_age: null
tokenizer:
  name: transformersegmentation/BabyLM-BPE-ortho-tokenizer
data_preprocessing:
  max_input_length: 128
  join_utts: dynamic
  remove_word_boundaries: false
  subsample: null
  subsample_type: examples
model:
  name: gpt2_lm
  model_kwargs:
    n_layer: 6
    n_head: 8
    n_embd: 512
    n_positions: 256
    n_inner: 2048
trainer:
  batch_size: 32
  lr: 0.001
  num_warmup_steps: 90000
  max_training_steps: 400000
  logging_steps: 4000
  save_steps: 50000
  eval_steps: 50000