abuelnasr commited on
Commit
b82855f
1 Parent(s): b223c1d

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +1 -1
config.json CHANGED
@@ -13,7 +13,7 @@
13
  "layer_norm_epsilon": 1e-05,
14
  "dropout": 0.0,
15
  "max_sequence_length": 4096,
16
- "training_sequence_length": 4096,
17
  "rope_max_wavelength": 10000.0,
18
  "rope_scaling_type": null,
19
  "rope_scaling_short_factor": null,
 
13
  "layer_norm_epsilon": 1e-05,
14
  "dropout": 0.0,
15
  "max_sequence_length": 4096,
16
+ "pretraining_sequence_length": 4096,
17
  "rope_max_wavelength": 10000.0,
18
  "rope_scaling_type": null,
19
  "rope_scaling_short_factor": null,