whizzzzkid commited on
Commit
f74f523
1 Parent(s): 30321cc

Upload StableLmForCausalLM

Browse files
Files changed (3) hide show
  1. config.json +2 -2
  2. generation_config.json +4 -4
  3. model.safetensors +1 -1
config.json CHANGED
@@ -4,8 +4,8 @@
4
  "StableLmForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
7
- "bos_token_id": 100257,
8
- "eos_token_id": 100257,
9
  "hidden_act": "silu",
10
  "hidden_dropout": 0.0,
11
  "hidden_size": 2048,
 
4
  "StableLmForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
7
+ "bos_token_id": 100289,
8
+ "eos_token_id": 100290,
9
  "hidden_act": "silu",
10
  "hidden_dropout": 0.0,
11
  "hidden_size": 2048,
generation_config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "_from_model_config": true,
3
- "bos_token_id": 100257,
4
- "do_sample": true,
5
- "eos_token_id": 100257,
6
- "transformers_version": "4.38.2"
7
  }
 
1
  {
2
  "_from_model_config": true,
3
+ "bos_token_id": 100289,
4
+ "eos_token_id": 100290,
5
+ "transformers_version": "4.38.2",
6
+ "use_cache": false
7
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:67de9cf5dab3ac7db23acea78a37050eddf7c2c22b968aae6cae7f1af839928e
3
  size 3289069520
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5dd3cbc496a16fa66f217476627fafb8935a1be5f0db34ad9786cb205bbcbf4d
3
  size 3289069520