gmastrapas commited on
Commit
6e76918
1 Parent(s): 4f6f082

fix: throw warnings if xformers or flash-attn cant be used

Browse files
Files changed (1) hide show
  1. modeling_clip.py +2 -2
modeling_clip.py CHANGED
@@ -136,7 +136,7 @@ def _resolve_attention_libs(config: JinaCLIPConfig):
136
  if not torch.cuda.is_available():
137
  warnings.warn('Flash attention requires CUDA, disabling')
138
  return False
139
- if not importlib.util.find_spec('flash_attn') is None:
140
  warnings.warn(
141
  'Flash attention is not installed. Check '
142
  'https://github.com/Dao-AILab/flash-attention?'
@@ -152,7 +152,7 @@ def _resolve_attention_libs(config: JinaCLIPConfig):
152
  if not torch.cuda.is_available():
153
  warnings.warn('xFormers requires CUDA, disabling')
154
  return False
155
- if not importlib.util.find_spec('xformers') is None:
156
  warnings.warn(
157
  'xFormers is not installed. Check '
158
  'https://github.com/facebookresearch/xformers?'
 
136
  if not torch.cuda.is_available():
137
  warnings.warn('Flash attention requires CUDA, disabling')
138
  return False
139
+ if importlib.util.find_spec('flash_attn') is None:
140
  warnings.warn(
141
  'Flash attention is not installed. Check '
142
  'https://github.com/Dao-AILab/flash-attention?'
 
152
  if not torch.cuda.is_available():
153
  warnings.warn('xFormers requires CUDA, disabling')
154
  return False
155
+ if importlib.util.find_spec('xformers') is None:
156
  warnings.warn(
157
  'xFormers is not installed. Check '
158
  'https://github.com/facebookresearch/xformers?'