Upload folder using huggingface_hub

#3
This view is limited to 50 files because it contains too many changes.  See the raw diff here.
Files changed (50) hide show
  1. .ipynb_checkpoints/config-checkpoint.json +31 -0
  2. .ipynb_checkpoints/tokenizer_config-checkpoint.json +61 -0
  3. adapter/.ipynb_checkpoints/config-checkpoint.json +46 -0
  4. adapter/.ipynb_checkpoints/tokenizer_config-checkpoint.json +61 -0
  5. adapter/adapter_config.json +38 -0
  6. adapter/adapter_model.safetensors +3 -0
  7. adapter/added_tokens.json +4 -0
  8. adapter/config.json +46 -0
  9. adapter/special_tokens_map.json +24 -0
  10. adapter/tokenizer.model +3 -0
  11. adapter/tokenizer_config.json +61 -0
  12. added_tokens.json +4 -0
  13. config.json +31 -0
  14. generation_config.json +7 -0
  15. model-00001-of-00117.safetensors +3 -0
  16. model-00002-of-00117.safetensors +3 -0
  17. model-00003-of-00117.safetensors +3 -0
  18. model-00004-of-00117.safetensors +3 -0
  19. model-00005-of-00117.safetensors +3 -0
  20. model-00006-of-00117.safetensors +3 -0
  21. model-00007-of-00117.safetensors +3 -0
  22. model-00008-of-00117.safetensors +3 -0
  23. model-00009-of-00117.safetensors +3 -0
  24. model-00010-of-00117.safetensors +3 -0
  25. model-00011-of-00117.safetensors +3 -0
  26. model-00012-of-00117.safetensors +3 -0
  27. model-00013-of-00117.safetensors +3 -0
  28. model-00014-of-00117.safetensors +3 -0
  29. model-00015-of-00117.safetensors +3 -0
  30. model-00016-of-00117.safetensors +3 -0
  31. model-00017-of-00117.safetensors +3 -0
  32. model-00018-of-00117.safetensors +3 -0
  33. model-00019-of-00117.safetensors +3 -0
  34. model-00020-of-00117.safetensors +3 -0
  35. model-00021-of-00117.safetensors +3 -0
  36. model-00022-of-00117.safetensors +3 -0
  37. model-00023-of-00117.safetensors +3 -0
  38. model-00024-of-00117.safetensors +3 -0
  39. model-00025-of-00117.safetensors +3 -0
  40. model-00026-of-00117.safetensors +3 -0
  41. model-00027-of-00117.safetensors +3 -0
  42. model-00028-of-00117.safetensors +3 -0
  43. model-00029-of-00117.safetensors +3 -0
  44. model-00030-of-00117.safetensors +3 -0
  45. model-00031-of-00117.safetensors +3 -0
  46. model-00032-of-00117.safetensors +3 -0
  47. model-00033-of-00117.safetensors +3 -0
  48. model-00034-of-00117.safetensors +3 -0
  49. model-00035-of-00117.safetensors +3 -0
  50. model-00036-of-00117.safetensors +3 -0
.ipynb_checkpoints/config-checkpoint.json ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "v2ray/Mixtral-8x22B-v0.1",
3
+ "architectures": [
4
+ "MixtralForCausalLM"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 1,
8
+ "eos_token_id": 32000,
9
+ "hidden_act": "silu",
10
+ "hidden_size": 6144,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 16384,
13
+ "max_position_embeddings": 65536,
14
+ "model_type": "mixtral",
15
+ "num_attention_heads": 48,
16
+ "num_experts_per_tok": 2,
17
+ "num_hidden_layers": 56,
18
+ "num_key_value_heads": 8,
19
+ "num_local_experts": 8,
20
+ "output_router_logits": false,
21
+ "rms_norm_eps": 1e-05,
22
+ "rope_theta": 1000000,
23
+ "router_aux_loss_coef": 0.001,
24
+ "router_jitter_noise": 0.0,
25
+ "sliding_window": null,
26
+ "tie_word_embeddings": false,
27
+ "torch_dtype": "float32",
28
+ "transformers_version": "4.40.0.dev0",
29
+ "use_cache": false,
30
+ "vocab_size": 32002
31
+ }
.ipynb_checkpoints/tokenizer_config-checkpoint.json ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": true,
3
+ "add_eos_token": false,
4
+ "add_prefix_space": true,
5
+ "added_tokens_decoder": {
6
+ "0": {
7
+ "content": "<unk>",
8
+ "lstrip": false,
9
+ "normalized": false,
10
+ "rstrip": false,
11
+ "single_word": false,
12
+ "special": true
13
+ },
14
+ "1": {
15
+ "content": "<s>",
16
+ "lstrip": false,
17
+ "normalized": false,
18
+ "rstrip": false,
19
+ "single_word": false,
20
+ "special": true
21
+ },
22
+ "2": {
23
+ "content": "</s>",
24
+ "lstrip": false,
25
+ "normalized": false,
26
+ "rstrip": false,
27
+ "single_word": false,
28
+ "special": true
29
+ },
30
+ "32000": {
31
+ "content": "<|im_end|>",
32
+ "lstrip": false,
33
+ "normalized": false,
34
+ "rstrip": false,
35
+ "single_word": false,
36
+ "special": true
37
+ },
38
+ "32001": {
39
+ "content": "<|im_start|>",
40
+ "lstrip": false,
41
+ "normalized": false,
42
+ "rstrip": false,
43
+ "single_word": false,
44
+ "special": false
45
+ }
46
+ },
47
+ "additional_special_tokens": [],
48
+ "bos_token": "<s>",
49
+ "chat_template": "{% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
50
+ "clean_up_tokenization_spaces": false,
51
+ "eos_token": "<|im_end|>",
52
+ "legacy": true,
53
+ "model_max_length": 1000000000000000019884624838656,
54
+ "pad_token": "</s>",
55
+ "sp_model_kwargs": {},
56
+ "spaces_between_special_tokens": false,
57
+ "tokenizer_class": "LlamaTokenizer",
58
+ "unk_token": "<unk>",
59
+ "use_default_system_prompt": false,
60
+ "use_fast": true
61
+ }
adapter/.ipynb_checkpoints/config-checkpoint.json ADDED
@@ -0,0 +1,46 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "v2ray/Mixtral-8x22B-v0.1",
3
+ "architectures": [
4
+ "MixtralForCausalLM"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 1,
8
+ "eos_token_id": 32000,
9
+ "hidden_act": "silu",
10
+ "hidden_size": 6144,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 16384,
13
+ "max_position_embeddings": 65536,
14
+ "model_type": "mixtral",
15
+ "num_attention_heads": 48,
16
+ "num_experts_per_tok": 2,
17
+ "num_hidden_layers": 56,
18
+ "num_key_value_heads": 8,
19
+ "num_local_experts": 8,
20
+ "output_router_logits": false,
21
+ "quantization_config": {
22
+ "_load_in_4bit": true,
23
+ "_load_in_8bit": false,
24
+ "bnb_4bit_compute_dtype": "bfloat16",
25
+ "bnb_4bit_quant_storage": "bfloat16",
26
+ "bnb_4bit_quant_type": "nf4",
27
+ "bnb_4bit_use_double_quant": true,
28
+ "llm_int8_enable_fp32_cpu_offload": false,
29
+ "llm_int8_has_fp16_weight": false,
30
+ "llm_int8_skip_modules": null,
31
+ "llm_int8_threshold": 6.0,
32
+ "load_in_4bit": true,
33
+ "load_in_8bit": false,
34
+ "quant_method": "bitsandbytes"
35
+ },
36
+ "rms_norm_eps": 1e-05,
37
+ "rope_theta": 1000000,
38
+ "router_aux_loss_coef": 0.001,
39
+ "router_jitter_noise": 0.0,
40
+ "sliding_window": null,
41
+ "tie_word_embeddings": false,
42
+ "torch_dtype": "bfloat16",
43
+ "transformers_version": "4.40.0.dev0",
44
+ "use_cache": false,
45
+ "vocab_size": 32002
46
+ }
adapter/.ipynb_checkpoints/tokenizer_config-checkpoint.json ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": true,
3
+ "add_eos_token": false,
4
+ "add_prefix_space": true,
5
+ "added_tokens_decoder": {
6
+ "0": {
7
+ "content": "<unk>",
8
+ "lstrip": false,
9
+ "normalized": false,
10
+ "rstrip": false,
11
+ "single_word": false,
12
+ "special": true
13
+ },
14
+ "1": {
15
+ "content": "<s>",
16
+ "lstrip": false,
17
+ "normalized": false,
18
+ "rstrip": false,
19
+ "single_word": false,
20
+ "special": true
21
+ },
22
+ "2": {
23
+ "content": "</s>",
24
+ "lstrip": false,
25
+ "normalized": false,
26
+ "rstrip": false,
27
+ "single_word": false,
28
+ "special": true
29
+ },
30
+ "32000": {
31
+ "content": "<|im_end|>",
32
+ "lstrip": false,
33
+ "normalized": false,
34
+ "rstrip": false,
35
+ "single_word": false,
36
+ "special": true
37
+ },
38
+ "32001": {
39
+ "content": "<|im_start|>",
40
+ "lstrip": false,
41
+ "normalized": false,
42
+ "rstrip": false,
43
+ "single_word": false,
44
+ "special": false
45
+ }
46
+ },
47
+ "additional_special_tokens": [],
48
+ "bos_token": "<s>",
49
+ "chat_template": "{% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
50
+ "clean_up_tokenization_spaces": false,
51
+ "eos_token": "<|im_end|>",
52
+ "legacy": true,
53
+ "model_max_length": 1000000000000000019884624838656,
54
+ "pad_token": "</s>",
55
+ "sp_model_kwargs": {},
56
+ "spaces_between_special_tokens": false,
57
+ "tokenizer_class": "LlamaTokenizer",
58
+ "unk_token": "<unk>",
59
+ "use_default_system_prompt": false,
60
+ "use_fast": true
61
+ }
adapter/adapter_config.json ADDED
@@ -0,0 +1,38 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "alpha_pattern": {},
3
+ "auto_mapping": null,
4
+ "base_model_name_or_path": "v2ray/Mixtral-8x22B-v0.1",
5
+ "bias": "none",
6
+ "fan_in_fan_out": null,
7
+ "inference_mode": true,
8
+ "init_lora_weights": true,
9
+ "layer_replication": null,
10
+ "layers_pattern": null,
11
+ "layers_to_transform": null,
12
+ "loftq_config": {},
13
+ "lora_alpha": 8,
14
+ "lora_dropout": 0.05,
15
+ "megatron_config": null,
16
+ "megatron_core": "megatron.core",
17
+ "modules_to_save": [
18
+ "embed_tokens",
19
+ "lm_head"
20
+ ],
21
+ "peft_type": "LORA",
22
+ "r": 16,
23
+ "rank_pattern": {},
24
+ "revision": null,
25
+ "target_modules": [
26
+ "q_proj",
27
+ "w2",
28
+ "w3",
29
+ "o_proj",
30
+ "v_proj",
31
+ "k_proj",
32
+ "gate",
33
+ "w1"
34
+ ],
35
+ "task_type": "CAUSAL_LM",
36
+ "use_dora": false,
37
+ "use_rslora": false
38
+ }
adapter/adapter_model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e7c3e6eac571b2034dd287ea2c037b3187e9ea2a77b00a0166a08708fc006e87
3
+ size 3409573984
adapter/added_tokens.json ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {
2
+ "<|im_end|>": 32000,
3
+ "<|im_start|>": 32001
4
+ }
adapter/config.json ADDED
@@ -0,0 +1,46 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "v2ray/Mixtral-8x22B-v0.1",
3
+ "architectures": [
4
+ "MixtralForCausalLM"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 1,
8
+ "eos_token_id": 32000,
9
+ "hidden_act": "silu",
10
+ "hidden_size": 6144,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 16384,
13
+ "max_position_embeddings": 65536,
14
+ "model_type": "mixtral",
15
+ "num_attention_heads": 48,
16
+ "num_experts_per_tok": 2,
17
+ "num_hidden_layers": 56,
18
+ "num_key_value_heads": 8,
19
+ "num_local_experts": 8,
20
+ "output_router_logits": false,
21
+ "quantization_config": {
22
+ "_load_in_4bit": true,
23
+ "_load_in_8bit": false,
24
+ "bnb_4bit_compute_dtype": "bfloat16",
25
+ "bnb_4bit_quant_storage": "bfloat16",
26
+ "bnb_4bit_quant_type": "nf4",
27
+ "bnb_4bit_use_double_quant": true,
28
+ "llm_int8_enable_fp32_cpu_offload": false,
29
+ "llm_int8_has_fp16_weight": false,
30
+ "llm_int8_skip_modules": null,
31
+ "llm_int8_threshold": 6.0,
32
+ "load_in_4bit": true,
33
+ "load_in_8bit": false,
34
+ "quant_method": "bitsandbytes"
35
+ },
36
+ "rms_norm_eps": 1e-05,
37
+ "rope_theta": 1000000,
38
+ "router_aux_loss_coef": 0.001,
39
+ "router_jitter_noise": 0.0,
40
+ "sliding_window": null,
41
+ "tie_word_embeddings": false,
42
+ "torch_dtype": "bfloat16",
43
+ "transformers_version": "4.40.0.dev0",
44
+ "use_cache": false,
45
+ "vocab_size": 32002
46
+ }
adapter/special_tokens_map.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "<|im_end|>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": "</s>",
17
+ "unk_token": {
18
+ "content": "<unk>",
19
+ "lstrip": false,
20
+ "normalized": false,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ }
24
+ }
adapter/tokenizer.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dadfd56d766715c61d2ef780a525ab43b8e6da4de6865bda3d95fdef5e134055
3
+ size 493443
adapter/tokenizer_config.json ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": true,
3
+ "add_eos_token": false,
4
+ "add_prefix_space": true,
5
+ "added_tokens_decoder": {
6
+ "0": {
7
+ "content": "<unk>",
8
+ "lstrip": false,
9
+ "normalized": false,
10
+ "rstrip": false,
11
+ "single_word": false,
12
+ "special": true
13
+ },
14
+ "1": {
15
+ "content": "<s>",
16
+ "lstrip": false,
17
+ "normalized": false,
18
+ "rstrip": false,
19
+ "single_word": false,
20
+ "special": true
21
+ },
22
+ "2": {
23
+ "content": "</s>",
24
+ "lstrip": false,
25
+ "normalized": false,
26
+ "rstrip": false,
27
+ "single_word": false,
28
+ "special": true
29
+ },
30
+ "32000": {
31
+ "content": "<|im_end|>",
32
+ "lstrip": false,
33
+ "normalized": false,
34
+ "rstrip": false,
35
+ "single_word": false,
36
+ "special": true
37
+ },
38
+ "32001": {
39
+ "content": "<|im_start|>",
40
+ "lstrip": false,
41
+ "normalized": false,
42
+ "rstrip": false,
43
+ "single_word": false,
44
+ "special": false
45
+ }
46
+ },
47
+ "additional_special_tokens": [],
48
+ "bos_token": "<s>",
49
+ "chat_template": "{% if not add_generation_prompt is defined %}{% set add_generation_prompt = false %}{% endif %}{% for message in messages %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
50
+ "clean_up_tokenization_spaces": false,
51
+ "eos_token": "<|im_end|>",
52
+ "legacy": true,
53
+ "model_max_length": 1000000000000000019884624838656,
54
+ "pad_token": "</s>",
55
+ "sp_model_kwargs": {},
56
+ "spaces_between_special_tokens": false,
57
+ "tokenizer_class": "LlamaTokenizer",
58
+ "unk_token": "<unk>",
59
+ "use_default_system_prompt": false,
60
+ "use_fast": true
61
+ }
added_tokens.json ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {
2
+ "<|im_end|>": 32000,
3
+ "<|im_start|>": 32001
4
+ }
config.json ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "v2ray/Mixtral-8x22B-v0.1",
3
+ "architectures": [
4
+ "MixtralForCausalLM"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 1,
8
+ "eos_token_id": 32000,
9
+ "hidden_act": "silu",
10
+ "hidden_size": 6144,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 16384,
13
+ "max_position_embeddings": 65536,
14
+ "model_type": "mixtral",
15
+ "num_attention_heads": 48,
16
+ "num_experts_per_tok": 2,
17
+ "num_hidden_layers": 56,
18
+ "num_key_value_heads": 8,
19
+ "num_local_experts": 8,
20
+ "output_router_logits": false,
21
+ "rms_norm_eps": 1e-05,
22
+ "rope_theta": 1000000,
23
+ "router_aux_loss_coef": 0.001,
24
+ "router_jitter_noise": 0.0,
25
+ "sliding_window": null,
26
+ "tie_word_embeddings": false,
27
+ "torch_dtype": "float32",
28
+ "transformers_version": "4.40.0.dev0",
29
+ "use_cache": false,
30
+ "vocab_size": 32002
31
+ }
generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "do_sample": true,
5
+ "eos_token_id": 2,
6
+ "transformers_version": "4.40.0.dev0"
7
+ }
model-00001-of-00117.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ed9ccee4b063e10c87cc2dfceff015e0a3e678e2fb90f3abbfb1fc8e7915aa9c
3
+ size 4762879840
model-00002-of-00117.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f22d549ed12f075f97104a759327e1f500f3d22693c2d1fde680317b123223ac
3
+ size 4831839800
model-00003-of-00117.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:69c6b76c9cce92c00ba519b615a8f06fbe59dafc4e3de4b79a06408c2a9c1514
3
+ size 4781754592
model-00004-of-00117.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:243be14b5542d668490df8848ff2b1589c6486165bf69c38cf8b48e47cb5d8f3
3
+ size 4831839800
model-00005-of-00117.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:570b13e32ac883b39d079ac4c34c836f13d681e67963d21f5b3f9c6404a39b22
3
+ size 4781754592
model-00006-of-00117.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3a8a5c88af32c56a7ca1dc2082402effcae75e4ea341241cbc56009820614845
3
+ size 4831839800
model-00007-of-00117.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1ca84f92b0c4bb27248d8e51a02cc5664fe3dc3f4121eed732b25a9ec53e3220
3
+ size 4781754592
model-00008-of-00117.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c5045952f7053f129a938a4c1f92f271d8662bf3302dc2544a309d015a244717
3
+ size 4831839800
model-00009-of-00117.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3d49115ba7f3eb498d8e90f46ee39fca11b8e8cd46e3b3650f192291a49e1e79
3
+ size 4781754592
model-00010-of-00117.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9c24577203ef245bf9c9c4733263ff803955aa45198ac695ef058e5223ec50c8
3
+ size 4831839800
model-00011-of-00117.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:61eb41a21b93c2762da0381320e8a2815ef22c72da0204217d05c064105b606b
3
+ size 4781754592
model-00012-of-00117.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cb9dbb091b3bff6126fd5cb482672d64df73f549e2f35e0ee0a0b05b6cef6114
3
+ size 4831839800
model-00013-of-00117.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:50fadd1d36aeaf2c6c97157c3668695113e86bc823a9510d29eff3d26685920d
3
+ size 4781754592
model-00014-of-00117.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e63553794ae97e292ccabcf8c279aa58e96fa7ffc366888f6fd28b61bf728094
3
+ size 4831839800
model-00015-of-00117.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9836e419e59e2997b020c82fabf89431def9fbeb30454fe472f1b245f9c280cf
3
+ size 4781754592
model-00016-of-00117.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:33bf7f116407b576afcaf16a7aef0a7daa43694ff49a90e6444ffb78d65819be
3
+ size 4831839800
model-00017-of-00117.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:29a9d2cf8345f044fba69fa5e3db916b922f23de90b33f1ca20af7c9a5888ed3
3
+ size 4781754592
model-00018-of-00117.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1aadacd71c89329cbb00956f5184a44e2b00ec70d990186fee188942faae39e3
3
+ size 4831839800
model-00019-of-00117.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fa2aa34ae8d24e7b01224c11e4d5d8fc68d824d56e948ac9329d670394442107
3
+ size 4781754592
model-00020-of-00117.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2e6e48d5f00e00a9722ade9d3aca8b11f984314c724fd1ed6f46009af47dd687
3
+ size 4831839800
model-00021-of-00117.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:044656ef715db71f006c4e9cdd6a790f0b90017d3378dc0ef44a75aa7dfa62f8
3
+ size 4982884240
model-00022-of-00117.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dc0bfa03e064a941ad41dfff8359414bce4ba670c065b42bd52bdcec3d18dfcd
3
+ size 4630710168
model-00023-of-00117.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b43e20babd6871217657425e7690a64407f62740150529ca6c7e27774a531c19
3
+ size 4831839808
model-00024-of-00117.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fcfff7ffae614189f50adff5519690df74290d122b241afd6de26319e051f011
3
+ size 4781754608
model-00025-of-00117.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:86ebf3ad56678d6860cac36cfcf9c42061129e91ebe8af6ac34c68ebefdece0b
3
+ size 4831839808
model-00026-of-00117.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3743756ea336fec881893a1236f70fc75d635783e42a574bfac763a7495f7d01
3
+ size 4781754608
model-00027-of-00117.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:41a7d915254a7191916791c7d26a700a3c9043f87e82b7244bdb88d554aff7f9
3
+ size 4831839808
model-00028-of-00117.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:804c38ba54981e0cc73ab557ed2e61676dc66802865b70d4363001ad53898ccf
3
+ size 4781754608
model-00029-of-00117.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8f9a91e9b92885c8f316f8a5bb966fdc7293bc82ebeb0f3e867f9d2ea8870623
3
+ size 4831839808
model-00030-of-00117.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fd9a6501726d7f0616eef570b17a26c9ff30a1295b7b2ffb7ffebb635f54347f
3
+ size 4781754608
model-00031-of-00117.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8f3ce74a0727d90631ffe4aa90a5c3c43caa32d3e8c7f0400f0bf18ed4ad6caf
3
+ size 4831839808
model-00032-of-00117.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:acd28605ac4b054904ed535da531cbd965e812b2b2d0a6a8fa5825a8973d4d11
3
+ size 4781754608
model-00033-of-00117.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c8ec7fee4a70220c3dae069c26858b6a8fa0ea8da756199ea005c77274800452
3
+ size 4831839808
model-00034-of-00117.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4cc54c2e044db500106bfff731c7d1ae0aa3c5cfa38e366f7b999c8fbf8f4cf5
3
+ size 4781754608
model-00035-of-00117.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:725e9151be7acebc2d1f40e10d8e11688254f540cbfa2d06effb0d19c43410e4
3
+ size 4831839808
model-00036-of-00117.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cdbc7a37fa36bea7902e2af4f7846f1be91b13900a5f6646589b2d16ac376058
3
+ size 4781754608