BoyaWu10 commited on
Commit
a53c846
1 Parent(s): 8807d4f

Update model

Browse files
adapter_config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
- "base_model_name_or_path": "susnato/phi-1_5_dev",
5
  "bias": "none",
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
@@ -19,10 +19,12 @@
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
22
- "query_key_value",
23
  "fc1",
24
- "dense",
25
- "fc2"
 
 
26
  ],
27
  "task_type": "CAUSAL_LM"
28
  }
 
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
+ "base_model_name_or_path": "microsoft/phi-1_5",
5
  "bias": "none",
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
 
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
22
+ "k_proj",
23
  "fc1",
24
+ "fc2",
25
+ "v_proj",
26
+ "q_proj",
27
+ "dense"
28
  ],
29
  "task_type": "CAUSAL_LM"
30
  }
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:58aeb34022d805bffee5f7bcb2754d2e1754d6090c801066b6969fb479f4a261
3
- size 201352544
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5a6f542e75de4096ffc5ee10ebf67be01391457a499ca15b2fa29968cdc8f21f
3
+ size 226531176
config.json CHANGED
@@ -4,6 +4,10 @@
4
  "PhiForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
 
 
 
 
7
  "bos_token_id": 50256,
8
  "embd_pdrop": 0.0,
9
  "eos_token_id": 50256,
@@ -18,13 +22,12 @@
18
  "mm_hidden_size": 1152,
19
  "mm_projector_lr": 2e-05,
20
  "mm_projector_type": "mlp2x_gelu",
21
- "mm_vision_select_feature": "patch",
22
  "mm_vision_tower": "google/siglip-so400m-patch14-384",
23
  "model_type": "bunny-phi",
24
  "num_attention_heads": 32,
25
  "num_hidden_layers": 24,
 
26
  "partial_rotary_factor": 0.5,
27
- "pretraining_tp": 1,
28
  "qk_layernorm": false,
29
  "resid_pdrop": 0.0,
30
  "rope_scaling": null,
@@ -32,6 +35,7 @@
32
  "tie_word_embeddings": false,
33
  "tokenizer_model_max_length": 2048,
34
  "tokenizer_padding_side": "right",
 
35
  "transformers_version": "4.36.2",
36
  "tune_mm_mlp_adapter": false,
37
  "use_cache": true,
 
4
  "PhiForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
7
+ "auto_map": {
8
+ "AutoConfig": "configuration_phi.PhiConfig",
9
+ "AutoModelForCausalLM": "modeling_phi.PhiForCausalLM"
10
+ },
11
  "bos_token_id": 50256,
12
  "embd_pdrop": 0.0,
13
  "eos_token_id": 50256,
 
22
  "mm_hidden_size": 1152,
23
  "mm_projector_lr": 2e-05,
24
  "mm_projector_type": "mlp2x_gelu",
 
25
  "mm_vision_tower": "google/siglip-so400m-patch14-384",
26
  "model_type": "bunny-phi",
27
  "num_attention_heads": 32,
28
  "num_hidden_layers": 24,
29
+ "num_key_value_heads": 32,
30
  "partial_rotary_factor": 0.5,
 
31
  "qk_layernorm": false,
32
  "resid_pdrop": 0.0,
33
  "rope_scaling": null,
 
35
  "tie_word_embeddings": false,
36
  "tokenizer_model_max_length": 2048,
37
  "tokenizer_padding_side": "right",
38
+ "torch_dtype": "float16",
39
  "transformers_version": "4.36.2",
40
  "tune_mm_mlp_adapter": false,
41
  "use_cache": true,
non_lora_trainables.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e15ba8b306361eeb950fd8b2d3ebc2cf82bedec84be54cd742c04547cf48a3bd
3
  size 13117616
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d16e23f2f342c41d69dd110c0ee9d185adc4a0d78a751a72971c61aab117b94f
3
  size 13117616
trainer_state.json CHANGED
The diff for this file is too large to render. See raw diff