LongVU_Llama3_2_3B / config.json
Vision-CAIR's picture
Upload 6 files
a141e08 verified
{
"_name_or_path": "/tmp/iopath_cache/manifold_cache/tree/users/shenx/finetune/09281004-cambrian_llama3_2_t576_ov",
"architectures": [
"CambrianLlamaForCausalLM"
],
"attention_bias": false,
"attention_dropout": 0.0,
"bos_token_id": 128000,
"connect_layer": 2,
"connector_depth": 3,
"connector_only": true,
"dino_threshold": 0.83,
"drop_threshold": 0.8,
"eos_token_id": [
128001,
128008,
128009
],
"frame_pos": false,
"freeze_mm_mlp_adapter": false,
"hidden_act": "silu",
"hidden_size": 3072,
"highres": true,
"highres_connect": false,
"image_aspect_ratio": "pad",
"image_position": 91,
"image_token_len": 144,
"initializer_range": 0.02,
"intermediate_size": 8192,
"is_image_newline": true,
"is_st_sampler": false,
"lowres_token": 8,
"max_position_embeddings": 131072,
"mlp_bias": false,
"mm_patch_merge_type": "flat",
"mm_projector_lr": null,
"mm_projector_type": "sva",
"mm_use_im_patch_token": false,
"mm_use_im_start_end": false,
"mm_vision_sampler_lr": null,
"mm_vision_select_feature": "patch",
"mm_vision_select_layer": -2,
"mm_vision_tower_aux_list": [
"siglip/CLIP-ViT-SO400M-14-384",
"facebook/dinov2-giant-res378"
],
"mm_vision_tower_aux_token_len_list": [
576,
576
],
"mm_vision_tower_lr": null,
"model_type": "cambrian_llama",
"num_attention_heads": 24,
"num_hidden_layers": 28,
"num_key_value_heads": 8,
"num_of_vision_sampler_layers": 10,
"num_query_group": 1,
"pretraining_tp": 1,
"query_num_list": [
144
],
"rms_norm_eps": 1e-05,
"rope_scaling": {
"factor": 32.0,
"high_freq_factor": 4.0,
"low_freq_factor": 1.0,
"original_max_position_embeddings": 8192,
"rope_type": "llama3"
},
"rope_theta": 500000.0,
"spmd_debug": null,
"spmd_fsdp_sharding": null,
"spmd_mesh": null,
"start_of_vision_sampler_layers": 0,
"stride_of_vision_sampler_layers": 3,
"tie_word_embeddings": false,
"tokenizer_model_max_length": 8192,
"tokenizer_padding_side": "right",
"torch_dtype": "float32",
"transformers_version": "4.44.2",
"tune_mm_mlp_adapter": false,
"unfreeze_mm_vision_tower": false,
"use_cache": false,
"use_mm_proj": true,
"vision_hidden_size": 1024,
"vision_tower_aux_token_len_list": [
576,
576
],
"vocab_size": 128256
}