thomasgauthier commited on
Commit
22b63ff
1 Parent(s): a5d57e1

flash attention version fix

Browse files
Files changed (1) hide show
  1. app.py +1 -2
app.py CHANGED
@@ -1,12 +1,11 @@
1
  import spaces
2
- import torch
3
  from model_loader import load_model_and_processor
4
  from image_generator import process_and_generate
5
  from gradio_interface import create_gradio_interface
6
 
7
  if __name__ == "__main__":
8
  import subprocess
9
- subprocess.run('pip install flash-attn --no-build-isolation', env={'FLASH_ATTENTION_SKIP_CUDA_BUILD': "TRUE"}, shell=True)
10
 
11
 
12
 
 
1
  import spaces
 
2
  from model_loader import load_model_and_processor
3
  from image_generator import process_and_generate
4
  from gradio_interface import create_gradio_interface
5
 
6
  if __name__ == "__main__":
7
  import subprocess
8
+ subprocess.run('pip install "flash-attn>=2.1.0" --no-build-isolation', env={'FLASH_ATTENTION_SKIP_CUDA_BUILD': "TRUE"}, shell=True)
9
 
10
 
11