Spaces:
Running
on
A10G
Running
on
A10G
Commit
•
47a017a
1
Parent(s):
4ac3513
Remove stuff from dev env
Browse files
app.py
CHANGED
@@ -9,15 +9,14 @@ from constants import *
|
|
9 |
from inversion_utils import *
|
10 |
from modified_pipeline_semantic_stable_diffusion import SemanticStableDiffusionPipeline
|
11 |
from torch import autocast, inference_mode
|
12 |
-
from diffusers import
|
13 |
from diffusers import DDIMScheduler
|
14 |
from transformers import AutoProcessor, BlipForConditionalGeneration
|
15 |
|
16 |
-
torch.cuda.empty_cache()
|
17 |
# load pipelines
|
18 |
sd_model_id = "stabilityai/stable-diffusion-2-1-base"
|
19 |
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
20 |
-
sd_pipe =
|
21 |
sd_pipe.scheduler = DDIMScheduler.from_config(sd_model_id, subfolder = "scheduler")
|
22 |
sem_pipe = SemanticStableDiffusionPipeline.from_pretrained(sd_model_id).to(device)
|
23 |
blip_processor = AutoProcessor.from_pretrained("Salesforce/blip-image-captioning-base")
|
@@ -792,4 +791,4 @@ with gr.Blocks(css="style.css") as demo:
|
|
792 |
|
793 |
|
794 |
demo.queue()
|
795 |
-
demo.launch(
|
|
|
9 |
from inversion_utils import *
|
10 |
from modified_pipeline_semantic_stable_diffusion import SemanticStableDiffusionPipeline
|
11 |
from torch import autocast, inference_mode
|
12 |
+
from diffusers import StableDiffusionPipeline
|
13 |
from diffusers import DDIMScheduler
|
14 |
from transformers import AutoProcessor, BlipForConditionalGeneration
|
15 |
|
|
|
16 |
# load pipelines
|
17 |
sd_model_id = "stabilityai/stable-diffusion-2-1-base"
|
18 |
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
19 |
+
sd_pipe = StableDiffusionPipeline.from_pretrained(sd_model_id).to(device)
|
20 |
sd_pipe.scheduler = DDIMScheduler.from_config(sd_model_id, subfolder = "scheduler")
|
21 |
sem_pipe = SemanticStableDiffusionPipeline.from_pretrained(sd_model_id).to(device)
|
22 |
blip_processor = AutoProcessor.from_pretrained("Salesforce/blip-image-captioning-base")
|
|
|
791 |
|
792 |
|
793 |
demo.queue()
|
794 |
+
demo.launch()
|