Qinbin Li commited on
Commit
a4b91f0
1 Parent(s): a5dda87

remove pad_token_id

Browse files
Files changed (1) hide show
  1. config.json +0 -5
config.json CHANGED
@@ -21,11 +21,6 @@
21
  "num_attention_heads": 32,
22
  "num_hidden_layers": 32,
23
  "num_key_value_heads": 8,
24
- "pad_token_id": [
25
- 128001,
26
- 128008,
27
- 128009
28
- ],
29
  "pretraining_tp": 1,
30
  "rms_norm_eps": 1e-05,
31
  "rope_scaling": {
 
21
  "num_attention_heads": 32,
22
  "num_hidden_layers": 32,
23
  "num_key_value_heads": 8,
 
 
 
 
 
24
  "pretraining_tp": 1,
25
  "rms_norm_eps": 1e-05,
26
  "rope_scaling": {