Upload folder using huggingface_hub
Browse files- added_tokens.json +7 -0
- config.json +27 -0
- generation_config.json +6 -0
- pytorch_model-00001-of-00024.bin +3 -0
- pytorch_model-00002-of-00024.bin +3 -0
- pytorch_model-00003-of-00024.bin +3 -0
- pytorch_model-00004-of-00024.bin +3 -0
- pytorch_model-00005-of-00024.bin +3 -0
- pytorch_model-00006-of-00024.bin +3 -0
- pytorch_model-00007-of-00024.bin +3 -0
- pytorch_model-00008-of-00024.bin +3 -0
- pytorch_model-00009-of-00024.bin +3 -0
- pytorch_model-00010-of-00024.bin +3 -0
- pytorch_model-00011-of-00024.bin +3 -0
- pytorch_model-00012-of-00024.bin +3 -0
- pytorch_model-00013-of-00024.bin +3 -0
- pytorch_model-00014-of-00024.bin +3 -0
- pytorch_model-00015-of-00024.bin +3 -0
- pytorch_model-00016-of-00024.bin +3 -0
- pytorch_model-00017-of-00024.bin +3 -0
- pytorch_model-00018-of-00024.bin +3 -0
- pytorch_model-00019-of-00024.bin +3 -0
- pytorch_model-00020-of-00024.bin +3 -0
- pytorch_model-00021-of-00024.bin +3 -0
- pytorch_model-00022-of-00024.bin +3 -0
- pytorch_model-00023-of-00024.bin +3 -0
- pytorch_model-00024-of-00024.bin +3 -0
- pytorch_model.bin.index.json +0 -0
- special_tokens_map.json +11 -0
- tokenizer.model +3 -0
- tokenizer_config.json +65 -0
added_tokens.json
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"</s>": 2,
|
3 |
+
"<s>": 1,
|
4 |
+
"<unk>": 0,
|
5 |
+
"<|im_end|>": 32000,
|
6 |
+
"<|im_start|>": 32001
|
7 |
+
}
|
config.json
ADDED
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_name_or_path": "alpindale/goliath-120b",
|
3 |
+
"architectures": [
|
4 |
+
"LlamaForCausalLM"
|
5 |
+
],
|
6 |
+
"attention_bias": false,
|
7 |
+
"bos_token_id": 1,
|
8 |
+
"eos_token_id": 2,
|
9 |
+
"hidden_act": "silu",
|
10 |
+
"hidden_size": 8192,
|
11 |
+
"initializer_range": 0.02,
|
12 |
+
"intermediate_size": 28672,
|
13 |
+
"max_position_embeddings": 4096,
|
14 |
+
"model_type": "llama",
|
15 |
+
"num_attention_heads": 64,
|
16 |
+
"num_hidden_layers": 137,
|
17 |
+
"num_key_value_heads": 8,
|
18 |
+
"pretraining_tp": 1,
|
19 |
+
"rms_norm_eps": 1e-05,
|
20 |
+
"rope_scaling": null,
|
21 |
+
"rope_theta": 10000.0,
|
22 |
+
"tie_word_embeddings": false,
|
23 |
+
"torch_dtype": "float16",
|
24 |
+
"transformers_version": "4.34.0",
|
25 |
+
"use_cache": true,
|
26 |
+
"vocab_size": 32032
|
27 |
+
}
|
generation_config.json
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_from_model_config": true,
|
3 |
+
"bos_token_id": 1,
|
4 |
+
"eos_token_id": 2,
|
5 |
+
"transformers_version": "4.34.0"
|
6 |
+
}
|
pytorch_model-00001-of-00024.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:703e6a21f5f80340c340e61e5cd8528e1ab376b36a7f1605d5c01c087886bc77
|
3 |
+
size 9853125605
|
pytorch_model-00002-of-00024.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0b9aad1976a13a8ea067990910aa053a931309f22125e97120d5f18016f6f5d1
|
3 |
+
size 9798108893
|
pytorch_model-00003-of-00024.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4843f6af6e7b11443f4ce8386e7e90183146785e6e5f486d765b508a52dedbbb
|
3 |
+
size 9965880015
|
pytorch_model-00004-of-00024.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:bbed432e03f0a3e7bbbfcc06d80fb776247b30aa19214868c9462f3c9328b510
|
3 |
+
size 9798075365
|
pytorch_model-00005-of-00024.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1b2cf32d7b276b39815c1d9468fdc7d6e9a3e9dba07dcfa915f0418fc6e575c8
|
3 |
+
size 9798108809
|
pytorch_model-00006-of-00024.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:cc4b1a8780a0fc894c39398e3c161bd0b38f07d5fed483bf63c89abd1299c5c0
|
3 |
+
size 9798108893
|
pytorch_model-00007-of-00024.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:110dfb6eb79136d2c16993a367b7ee0eafff4a320567a782947c55ba2b4f9606
|
3 |
+
size 9965880015
|
pytorch_model-00008-of-00024.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0888f80ce1d7404395956f21e03552aa2daf7e7cdea8e9eb69b59c16471ad424
|
3 |
+
size 9798075365
|
pytorch_model-00009-of-00024.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5128e084c9f91ca5d8ecfd42da6ea35682d01f8a97fc0072596bac770b2c0a1c
|
3 |
+
size 9798108809
|
pytorch_model-00010-of-00024.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f35a06fca3b79e2797b05539b0400ab8697b88f51f0b34fceb5536101319a0e8
|
3 |
+
size 9798108893
|
pytorch_model-00011-of-00024.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6aa66f9cd7ae407deac3fea37e18bdb81da6cd915df56cdb127b350341db0548
|
3 |
+
size 9965880015
|
pytorch_model-00012-of-00024.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9ab5aaf07f7bc0920dca5f27614f1d2248cf6d4cb33556b387cfbbbaf587da11
|
3 |
+
size 9798075365
|
pytorch_model-00013-of-00024.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:97617b7f47ec43934b15f7f3fbd288bf2c8184bda232b902c3528405dedea818
|
3 |
+
size 9798108809
|
pytorch_model-00014-of-00024.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:68f64eeb00dda4392039f2efcd6b555ee49ed4fd7b5f731163fb4579a3f1b850
|
3 |
+
size 9798108893
|
pytorch_model-00015-of-00024.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b19bfb818160cb56dffb7434a4ed9c783123775ede9e276bfc88e8615f87042a
|
3 |
+
size 9965880015
|
pytorch_model-00016-of-00024.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c0df2ab8bc5f809636c1c53c3e9acd26d64601b145cc61f809f590f139158469
|
3 |
+
size 9798075365
|
pytorch_model-00017-of-00024.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d18f0ae5507c17837ed0c20257548a26332185743ee9ab0a87a6ac75f8609dd3
|
3 |
+
size 9798108809
|
pytorch_model-00018-of-00024.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0f14867fefdefc4173822977fb585168e15be49deb0047db40bafa386e55d3c6
|
3 |
+
size 9798108957
|
pytorch_model-00019-of-00024.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:bd7b90be4c3613e22ed1f0a0505dba610020aa135f956a838817f89aaedb142b
|
3 |
+
size 9965880079
|
pytorch_model-00020-of-00024.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7543e3e5972532ebfb2131f1248c8ab818cd4159863425fa67b2df48b4c98ec8
|
3 |
+
size 9798075429
|
pytorch_model-00021-of-00024.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:fa0e4d0af437d853965e64017f7fac8eb52f075d07f1ac04e3973f60026d58ed
|
3 |
+
size 9798108873
|
pytorch_model-00022-of-00024.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:83064cfa842547bb784d7d6178fd049311c4aff42feb0ada891c7585a0decda8
|
3 |
+
size 9798108957
|
pytorch_model-00023-of-00024.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a967860faaed0db1e6edb41c4cb82ee25012e26f178aafc1a8145c3278d24762
|
3 |
+
size 9965880079
|
pytorch_model-00024-of-00024.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:58d4ce94410f315c86f15f7439ed4d68e5341f1ff136f502edc974244a43ec53
|
3 |
+
size 9081388653
|
pytorch_model.bin.index.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
special_tokens_map.json
ADDED
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"additional_special_tokens": [
|
3 |
+
"<unk>",
|
4 |
+
"<s>",
|
5 |
+
"</s>"
|
6 |
+
],
|
7 |
+
"bos_token": "<s>",
|
8 |
+
"eos_token": "<|im_end|>",
|
9 |
+
"pad_token": "</s>",
|
10 |
+
"unk_token": "<unk>"
|
11 |
+
}
|
tokenizer.model
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
|
3 |
+
size 499723
|
tokenizer_config.json
ADDED
@@ -0,0 +1,65 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"add_bos_token": true,
|
3 |
+
"add_eos_token": false,
|
4 |
+
"added_tokens_decoder": {
|
5 |
+
"0": {
|
6 |
+
"content": "<unk>",
|
7 |
+
"lstrip": false,
|
8 |
+
"normalized": false,
|
9 |
+
"rstrip": false,
|
10 |
+
"single_word": false,
|
11 |
+
"special": true
|
12 |
+
},
|
13 |
+
"1": {
|
14 |
+
"content": "<s>",
|
15 |
+
"lstrip": false,
|
16 |
+
"normalized": false,
|
17 |
+
"rstrip": false,
|
18 |
+
"single_word": false,
|
19 |
+
"special": true
|
20 |
+
},
|
21 |
+
"2": {
|
22 |
+
"content": "</s>",
|
23 |
+
"lstrip": false,
|
24 |
+
"normalized": false,
|
25 |
+
"rstrip": false,
|
26 |
+
"single_word": false,
|
27 |
+
"special": true
|
28 |
+
},
|
29 |
+
"32000": {
|
30 |
+
"content": "<|im_end|>",
|
31 |
+
"lstrip": false,
|
32 |
+
"normalized": false,
|
33 |
+
"rstrip": false,
|
34 |
+
"single_word": false,
|
35 |
+
"special": true
|
36 |
+
},
|
37 |
+
"32001": {
|
38 |
+
"content": "<|im_start|>",
|
39 |
+
"lstrip": false,
|
40 |
+
"normalized": false,
|
41 |
+
"rstrip": false,
|
42 |
+
"single_word": false,
|
43 |
+
"special": false
|
44 |
+
}
|
45 |
+
},
|
46 |
+
"additional_special_tokens": [
|
47 |
+
"<unk>",
|
48 |
+
"<s>",
|
49 |
+
"</s>"
|
50 |
+
],
|
51 |
+
"bos_token": "<s>",
|
52 |
+
"clean_up_tokenization_spaces": false,
|
53 |
+
"eos_token": "<|im_end|>",
|
54 |
+
"legacy": false,
|
55 |
+
"model_max_length": 1000000000000000019884624838656,
|
56 |
+
"pad_token": "</s>",
|
57 |
+
"sp_model_kwargs": {},
|
58 |
+
"spaces_between_special_tokens": false,
|
59 |
+
"tokenizer_class": "LlamaTokenizer",
|
60 |
+
"tokenizer_file": null,
|
61 |
+
"trust_remote_code": true,
|
62 |
+
"unk_token": "<unk>",
|
63 |
+
"use_default_system_prompt": true,
|
64 |
+
"use_fast": false
|
65 |
+
}
|