Upload folder using huggingface_hub

#2
config.json CHANGED
@@ -1,10 +1,7 @@
1
  {
2
  "architectures": [
3
- "EurusRewardModel"
4
  ],
5
- "auto_map": {
6
- "AutoModel": "modeling_eurus_rm.EurusRewardModel"
7
- },
8
  "bos_token_id": 1,
9
  "eos_token_id": 2,
10
  "hidden_act": "silu",
@@ -20,7 +17,7 @@
20
  "rope_theta": 10000.0,
21
  "sliding_window": 4096,
22
  "tie_word_embeddings": false,
23
- "torch_dtype": "bfloat16",
24
  "transformers_version": "4.34.0.dev0",
25
  "use_cache": true,
26
  "vocab_size": 32000
 
1
  {
2
  "architectures": [
3
+ "MistralForCausalLM"
4
  ],
 
 
 
5
  "bos_token_id": 1,
6
  "eos_token_id": 2,
7
  "hidden_act": "silu",
 
17
  "rope_theta": 10000.0,
18
  "sliding_window": 4096,
19
  "tie_word_embeddings": false,
20
+ "torch_dtype": "float16",
21
  "transformers_version": "4.34.0.dev0",
22
  "use_cache": true,
23
  "vocab_size": 32000
model-00001-of-00003.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:713905e32a4ba1b2500773443f3be6e68995147a64a42ad1521f27933c7eee28
3
  size 4943162240
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d0665b585b6408d634845e7692b5291ced360ae61ef66c9441539fdf616d96db
3
  size 4943162240
model-00002-of-00003.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:19f9bca444bdd26f0751800ca47e5d0a4e2d63412c9d1edc04c6d5d2ebd5b388
3
  size 4999819232
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c367a782d46a7d6335d727fdb1ff0046ef0ceebf4dbcc47dcdec461a7ea79e5c
3
  size 4999819232
model-00003-of-00003.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f6e34136df618545f3afd720842c488b34aea653a0c1e6693741b49af6a3021c
3
  size 4278380432
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4ddfbd163947982245f668a02c603d219c5eaff01759ae9b7af070edf0de4d0e
3
  size 4278380432