gmastrapas commited on
Commit
7d2a362
1 Parent(s): 83560ca

fix: throw warnings if xformers or flash-attn cant be used

Browse files
Files changed (1) hide show
  1. configuration_clip.py +5 -1
configuration_clip.py CHANGED
@@ -295,7 +295,11 @@ class JinaCLIPConfig(PretrainedConfig):
295
  else:
296
  self.torch_dtype = torch_dtype
297
 
298
- if not self.use_text_flash_attn or not torch.cuda.is_available():
 
 
 
 
299
  self.torch_dtype = torch.float32
300
 
301
  @classmethod
 
295
  else:
296
  self.torch_dtype = torch_dtype
297
 
298
+ use_text_flash_attn = (
299
+ self.use_text_flash_attn if self.use_text_flash_attn is not None
300
+ else self.text_config.hf_model_config_kwargs.get('use_flash_attn', False)
301
+ )
302
+ if not use_text_flash_attn or not torch.cuda.is_available():
303
  self.torch_dtype = torch.float32
304
 
305
  @classmethod