pharaouk commited on
Commit
bd05eb9
1 Parent(s): 24e4151

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +1 -0
config.json CHANGED
@@ -6,6 +6,7 @@
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
8
  "auto_map": {
 
9
  "AutoModelForCausalLM": "modeling_llama.LlamaForCausalLM"
10
  },
11
  "bos_token_id": 128000,
 
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
8
  "auto_map": {
9
+ "AutoConfig": "configuration_llama.LlamaConfig",
10
  "AutoModelForCausalLM": "modeling_llama.LlamaForCausalLM"
11
  },
12
  "bos_token_id": 128000,