wyu1 commited on
Commit
263deea
1 Parent(s): b4f320e

Add Leopard-LLaVA

Browse files
Files changed (1) hide show
  1. config.json +46 -0
config.json ADDED
@@ -0,0 +1,46 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "LlavaForConditionalGeneration"
4
+ ],
5
+ "ignore_index": -100,
6
+ "image_token_index": 128200,
7
+ "model_type": "llava",
8
+ "projector_hidden_act": "gelu",
9
+ "text_config": {
10
+ "architectures": [
11
+ "LlavaLlamaForCausalLM"
12
+ ],
13
+ "bos_token_id": 128000,
14
+ "eos_token_id": 128001,
15
+ "intermediate_size": 14336,
16
+ "max_position_embeddings": 16384,
17
+ "max_sequence_length": 16384,
18
+ "model_type": "llama",
19
+ "num_key_value_heads": 8,
20
+ "rms_norm_eps": 1e-05,
21
+ "rope_scaling": {
22
+ "factor": 8.0,
23
+ "high_freq_factor": 4.0,
24
+ "low_freq_factor": 1.0,
25
+ "original_max_position_embeddings": 8192,
26
+ "rope_type": "llama3"
27
+ },
28
+ "rope_theta": 500000,
29
+ "torch_dtype": "float16",
30
+ "vocab_size": 128256
31
+ },
32
+ "transformers_version": "4.44.2",
33
+ "vision_config": {
34
+ "hidden_act": "gelu_pytorch_tanh",
35
+ "hidden_size": 1152,
36
+ "image_size": 364,
37
+ "intermediate_size": 4304,
38
+ "layer_norm_eps": 1e-06,
39
+ "model_type": "siglip_vision_model",
40
+ "num_attention_heads": 16,
41
+ "num_hidden_layers": 27,
42
+ "patch_size": 14
43
+ },
44
+ "vision_feature_layer": -2,
45
+ "vision_feature_select_strategy": "default"
46
+ }