thomasgauthier's picture
put flash attention in requirements instead
ea9e4b8
raw
history blame contribute delete
498 Bytes
import spaces
from model_loader import load_model_and_processor
from image_generator import process_and_generate
from gradio_interface import create_gradio_interface
if __name__ == "__main__":
# import subprocess
# subprocess.run('pip install "flash-attn>=2.1.0" --no-build-isolation', env={'FLASH_ATTENTION_SKIP_CUDA_BUILD': "TRUE"}, shell=True)
# Create and launch the Gradio interface
demo = create_gradio_interface(process_and_generate)
demo.launch(allowed_paths=["/"])