sippycoder commited on
Commit
49bb1a4
1 Parent(s): 3516859

initial commit

Browse files
Files changed (1) hide show
  1. config.json +2 -6
config.json CHANGED
@@ -1,11 +1,7 @@
1
  {
2
  "architectures": [
3
- "NucleusForCausalLM"
4
  ],
5
- "auto_map": {
6
- "AutoConfig": "configuration_nucleus.NucleusConfig",
7
- "AutoModelForCausalLM": "modeling_nucleus.NucleusForCausalLM"
8
- },
9
  "bos_token_id": 1,
10
  "eos_token_id": 2,
11
  "hidden_act": "silu",
@@ -14,7 +10,7 @@
14
  "intermediate_size": 17920,
15
  "max_position_embeddings": 2048,
16
  "max_sequence_length": 2048,
17
- "model_type": "nucleus",
18
  "num_attention_heads": 52,
19
  "num_hidden_layers": 40,
20
  "num_key_value_heads": 52,
 
1
  {
2
  "architectures": [
3
+ "LlamaForCausalLM"
4
  ],
 
 
 
 
5
  "bos_token_id": 1,
6
  "eos_token_id": 2,
7
  "hidden_act": "silu",
 
10
  "intermediate_size": 17920,
11
  "max_position_embeddings": 2048,
12
  "max_sequence_length": 2048,
13
+ "model_type": "llama",
14
  "num_attention_heads": 52,
15
  "num_hidden_layers": 40,
16
  "num_key_value_heads": 52,