raosharjeel commited on
Commit
dc679c1
1 Parent(s): 9aa250a

Upload 3 files

Browse files
Files changed (3) hide show
  1. README.md +22 -0
  2. config.json +34 -0
  3. model.safetensors +3 -0
README.md ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ language:
3
+ - en
4
+ license: apache-2.0
5
+ tags:
6
+ - text-generation-inference
7
+ - transformers
8
+ - unsloth
9
+ - mistral
10
+ - trl
11
+ base_model: unsloth/mistral-7b-bnb-4bit
12
+ ---
13
+
14
+ # Uploaded model
15
+
16
+ - **Developed by:** raosharjeel
17
+ - **License:** apache-2.0
18
+ - **Finetuned from model :** unsloth/mistral-7b-bnb-4bit
19
+
20
+ This mistral model was trained 2x faster with [Unsloth](https://github.com/unslothai/unsloth) and Huggingface's TRL library.
21
+
22
+ [<img src="https://raw.githubusercontent.com/unslothai/unsloth/main/images/unsloth%20made%20with%20love.png" width="200"/>](https://github.com/unslothai/unsloth)
config.json ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "alpha_pattern": {},
3
+ "auto_mapping": null,
4
+ "base_model_name_or_path": "unsloth/mistral-7b-bnb-4bit",
5
+ "bias": "none",
6
+ "fan_in_fan_out": false,
7
+ "inference_mode": true,
8
+ "init_lora_weights": true,
9
+ "layer_replication": null,
10
+ "layers_pattern": null,
11
+ "layers_to_transform": null,
12
+ "loftq_config": {},
13
+ "lora_alpha": 16,
14
+ "lora_dropout": 0,
15
+ "megatron_config": null,
16
+ "megatron_core": "megatron.core",
17
+ "modules_to_save": null,
18
+ "peft_type": "LORA",
19
+ "r": 16,
20
+ "rank_pattern": {},
21
+ "revision": "unsloth",
22
+ "target_modules": [
23
+ "down_proj",
24
+ "k_proj",
25
+ "up_proj",
26
+ "v_proj",
27
+ "o_proj",
28
+ "q_proj",
29
+ "gate_proj"
30
+ ],
31
+ "task_type": "CAUSAL_LM",
32
+ "use_dora": false,
33
+ "use_rslora": false
34
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3c51b8a1db44c30f0c3773251608c6238cf6fe14e7394718484dd885d5bf5d48
3
+ size 167832240