OpenLeecher commited on
Commit
d6542fa
1 Parent(s): 48d80b5

Upload 5 files

Browse files
config.json CHANGED
@@ -1,30 +1,29 @@
1
- {
2
- "_name_or_path": "meta-llama/Meta-Llama-3-8B",
3
- "architectures": [
4
- "LlamaForCausalLM"
5
- ],
6
- "attention_bias": false,
7
- "attention_dropout": 0.0,
8
- "bos_token_id": 128000,
9
- "eos_token_id": 128001,
10
- "head_dim": 128,
11
- "hidden_act": "silu",
12
- "hidden_size": 4096,
13
- "initializer_range": 0.02,
14
- "intermediate_size": 14336,
15
- "max_position_embeddings": 8192,
16
- "mlp_bias": false,
17
- "model_type": "llama",
18
- "num_attention_heads": 32,
19
- "num_hidden_layers": 32,
20
- "num_key_value_heads": 8,
21
- "pretraining_tp": 1,
22
- "rms_norm_eps": 1e-05,
23
- "rope_scaling": null,
24
- "rope_theta": 500000.0,
25
- "tie_word_embeddings": false,
26
- "torch_dtype": "bfloat16",
27
- "transformers_version": "4.45.0",
28
- "use_cache": false,
29
- "vocab_size": 128256
30
- }
 
1
+ {
2
+ "_name_or_path": "/home/hidelord/text-generation-webui-snapshot-2024-04-14/models/meta-llama_Meta-Llama-3-8B",
3
+ "architectures": [
4
+ "LlamaForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
+ "bos_token_id": 128000,
9
+ "eos_token_id": 128001,
10
+ "hidden_act": "silu",
11
+ "hidden_size": 4096,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 14336,
14
+ "max_position_embeddings": 8192,
15
+ "mlp_bias": false,
16
+ "model_type": "llama",
17
+ "num_attention_heads": 32,
18
+ "num_hidden_layers": 32,
19
+ "num_key_value_heads": 8,
20
+ "pretraining_tp": 1,
21
+ "rms_norm_eps": 1e-05,
22
+ "rope_scaling": null,
23
+ "rope_theta": 500000.0,
24
+ "tie_word_embeddings": false,
25
+ "torch_dtype": "bfloat16",
26
+ "transformers_version": "4.44.2",
27
+ "use_cache": true,
28
+ "vocab_size": 128256
29
+ }
 
generation_config.json CHANGED
@@ -1,9 +1,9 @@
1
- {
2
- "bos_token_id": 128000,
3
- "do_sample": true,
4
- "eos_token_id": 128001,
5
- "max_length": 4096,
6
- "temperature": 0.6,
7
- "top_p": 0.9,
8
- "transformers_version": "4.45.0"
9
- }
 
1
+ {
2
+ "bos_token_id": 128000,
3
+ "do_sample": true,
4
+ "eos_token_id": 128001,
5
+ "max_length": 4096,
6
+ "temperature": 0.6,
7
+ "top_p": 0.9,
8
+ "transformers_version": "4.44.2"
9
+ }
special_tokens_map.json CHANGED
@@ -1,17 +1,17 @@
1
- {
2
- "bos_token": {
3
- "content": "<|begin_of_text|>",
4
- "lstrip": false,
5
- "normalized": false,
6
- "rstrip": false,
7
- "single_word": false
8
- },
9
- "eos_token": {
10
- "content": "<|eot_id|>",
11
- "lstrip": false,
12
- "normalized": false,
13
- "rstrip": false,
14
- "single_word": false
15
- },
16
- "pad_token": "<|eot_id|>"
17
- }
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<|begin_of_text|>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "<|eot_id|>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": "<|eot_id|>"
17
+ }
tokenizer.json CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:3c5cf44023714fb39b05e71e425f8d7b92805ff73f7988b083b8c87f0bf87393
3
- size 17209961
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5f51cbc4bf6932b07d2343850604db61aa9ae0561a5117a56a672a6e53d41239
3
+ size 9496260
tokenizer_config.json CHANGED
The diff for this file is too large to render. See raw diff