Upload folder using huggingface_hub
Browse files- README.md +34 -0
- config.json +5 -1
- mergekit_moe_config.yml +2 -2
- model-00001-of-00001.safetensors +1 -1
README.md
ADDED
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
license: apache-2.0
|
3 |
+
tags:
|
4 |
+
- moe
|
5 |
+
- merge
|
6 |
+
- mergekit
|
7 |
+
- lazymergekit
|
8 |
+
- cognitivecomputations/dolphin-2_6-phi-2
|
9 |
+
- lxuechen/phi-2-dpo
|
10 |
+
---
|
11 |
+
|
12 |
+
![](https://i.imgur.com/UOb2fvh.jpg)
|
13 |
+
|
14 |
+
# phixtral-2x2.8
|
15 |
+
|
16 |
+
phixtral-2x2.8 is a Mixure of Experts (MoE) made with the following models using a custom version of mergekit:
|
17 |
+
* [cognitivecomputations/dolphin-2_6-phi-2](https://huggingface.co/cognitivecomputations/dolphin-2_6-phi-2)
|
18 |
+
* [lxuechen/phi-2-dpo](https://huggingface.co/lxuechen/phi-2-dpo)
|
19 |
+
|
20 |
+
## 🧩 Configuration
|
21 |
+
|
22 |
+
```yaml
|
23 |
+
base_model: cognitivecomputations/dolphin-2_6-phi-2
|
24 |
+
gate_mode: cheap_embed
|
25 |
+
experts:
|
26 |
+
- source_model: cognitivecomputations/dolphin-2_6-phi-2
|
27 |
+
positive_prompts: [""]
|
28 |
+
- source_model: lxuechen/phi-2-dpo
|
29 |
+
positive_prompts: [""]
|
30 |
+
```
|
31 |
+
|
32 |
+
## 💻 Usage
|
33 |
+
|
34 |
+
This architecture is not compatible with the transformers library. I'm working on hacking something to run it. Contact me if you're interested!
|
config.json
CHANGED
@@ -1,11 +1,15 @@
|
|
1 |
{
|
2 |
-
"_name_or_path": "
|
3 |
"activation_function": "gelu_new",
|
4 |
"architectures": [
|
5 |
"MixtralForCausalLM"
|
6 |
],
|
7 |
"attention_dropout": 0.0,
|
8 |
"attn_pdrop": 0.0,
|
|
|
|
|
|
|
|
|
9 |
"bos_token_id": null,
|
10 |
"embd_pdrop": 0.0,
|
11 |
"eos_token_id": null,
|
|
|
1 |
{
|
2 |
+
"_name_or_path": "cognitivecomputations/dolphin-2_6-phi-2",
|
3 |
"activation_function": "gelu_new",
|
4 |
"architectures": [
|
5 |
"MixtralForCausalLM"
|
6 |
],
|
7 |
"attention_dropout": 0.0,
|
8 |
"attn_pdrop": 0.0,
|
9 |
+
"auto_map": {
|
10 |
+
"AutoConfig": "cognitivecomputations/dolphin-2_6-phi-2--configuration_phi.PhiConfig",
|
11 |
+
"AutoModelForCausalLM": "cognitivecomputations/dolphin-2_6-phi-2--modeling_phi.PhiForCausalLM"
|
12 |
+
},
|
13 |
"bos_token_id": null,
|
14 |
"embd_pdrop": 0.0,
|
15 |
"eos_token_id": null,
|
mergekit_moe_config.yml
CHANGED
@@ -3,5 +3,5 @@ gate_mode: cheap_embed
|
|
3 |
experts:
|
4 |
- source_model: cognitivecomputations/dolphin-2_6-phi-2
|
5 |
positive_prompts: [""]
|
6 |
-
- source_model:
|
7 |
-
positive_prompts: [""]
|
|
|
3 |
experts:
|
4 |
- source_model: cognitivecomputations/dolphin-2_6-phi-2
|
5 |
positive_prompts: [""]
|
6 |
+
- source_model: lxuechen/phi-2-dpo
|
7 |
+
positive_prompts: [""]
|
model-00001-of-00001.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 8916015136
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ca5d85304bec3b0f81ea5c166c72f14208ff1640e02c8cdf9f23572ee949a821
|
3 |
size 8916015136
|