ChandimaPrabath commited on
Commit
7a8d779
1 Parent(s): 30085b1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -1
app.py CHANGED
@@ -4,7 +4,9 @@ import spaces
4
  import re
5
  from PIL import Image
6
 
7
- # No need to install flash-attn since it's GPU-specific
 
 
8
 
9
  model = AutoModelForCausalLM.from_pretrained('gokaygokay/Florence-2-SD3-Captioner', trust_remote_code=True).eval()
10
 
 
4
  import re
5
  from PIL import Image
6
 
7
+ # Install the necessary packages
8
+ import subprocess
9
+ subprocess.run('pip install flash-attn einops --no-build-isolation', env={'FLASH_ATTENTION_SKIP_CUDA_BUILD': "TRUE"}, shell=True)
10
 
11
  model = AutoModelForCausalLM.from_pretrained('gokaygokay/Florence-2-SD3-Captioner', trust_remote_code=True).eval()
12