{ "architectures": [ "TaiVisionForCausalLM" ], "auto_map": { "AutoConfig": "configuration_taivisionlm.TaiVisionLMConfig", "AutoModelForCausalLM": "modeling_taivisionlm.TaiVisionForCausalLM" }, "hidden_size": 2048, "ignore_index": -100, "image_token_index": 32000, "model_type": "taivisionlm", "num_image_tokens": 196, "projection_dim": 768, "text_config": { "architecture": [ "LlamaForCausalLM" ], "hidden_size": 2048, "intermediate_size": 5632, "model_type": "llama", "num_hidden_layers": 22, "num_key_value_heads": 4, "rms_norm_eps": 1e-05, "torch_dtype": "bfloat16", "vocab_size": 32001 }, "torch_dtype": "float32", "transformers_version": "4.44.0", "vision_config": { "model_type": "siglip_vision_model", "projection_dim": 768 } }