benchang1110 commited on
Commit
361679a
1 Parent(s): a8afc95

Upload config

Browse files
Files changed (2) hide show
  1. config.json +2 -1
  2. configuration_taivisionlm.py +1 -2
config.json CHANGED
@@ -18,7 +18,8 @@
18
  "num_hidden_layers": 22,
19
  "num_key_value_heads": 4,
20
  "rms_norm_eps": 1e-05,
21
- "torch_dtype": "bfloat16"
 
22
  },
23
  "transformers_version": "4.44.0",
24
  "vision_config": {
 
18
  "num_hidden_layers": 22,
19
  "num_key_value_heads": 4,
20
  "rms_norm_eps": 1e-05,
21
+ "torch_dtype": "bfloat16",
22
+ "vocab_size": 32001
23
  },
24
  "transformers_version": "4.44.0",
25
  "vision_config": {
configuration_taivisionlm.py CHANGED
@@ -79,12 +79,11 @@ class TaiVisionLMConfig(PretrainedConfig):
79
  torch_dtype = "bfloat16",
80
  transformers_version = "4.40.2",
81
  use_cache = True,
82
- vocab_size = 32000
83
  )
84
  self.num_image_tokens = (self.vision_config.image_size // self.vision_config.patch_size) ** 2
85
  self.pad_token_id = self.text_config.pad_token_id
86
  self.vision_config.projection_dim = projection_dim
87
- self._attn_implementation = None
88
  super().__init__(**kwargs)
89
 
90
  @property
 
79
  torch_dtype = "bfloat16",
80
  transformers_version = "4.40.2",
81
  use_cache = True,
82
+ vocab_size = 32001
83
  )
84
  self.num_image_tokens = (self.vision_config.image_size // self.vision_config.patch_size) ** 2
85
  self.pad_token_id = self.text_config.pad_token_id
86
  self.vision_config.projection_dim = projection_dim
 
87
  super().__init__(**kwargs)
88
 
89
  @property