Spaces:
Running
on
Zero
Running
on
Zero
unknown
commited on
Commit
•
7c9dc5d
1
Parent(s):
346394a
cuda
Browse files
app.py
CHANGED
@@ -64,7 +64,7 @@ class FoleyController:
|
|
64 |
self.savedir_sample = os.path.join(self.savedir, "sample")
|
65 |
os.makedirs(self.savedir, exist_ok=True)
|
66 |
|
67 |
-
self.device = "cuda"
|
68 |
|
69 |
self.pipeline = None
|
70 |
|
@@ -120,6 +120,12 @@ class FoleyController:
|
|
120 |
|
121 |
self.pipeline.load_ip_adapter(fc_ckpt, subfolder='semantic', weight_name='semantic_adapter.bin', image_encoder_folder=None)
|
122 |
|
|
|
|
|
|
|
|
|
|
|
|
|
123 |
gr.Info("Load Finish!")
|
124 |
print("Load Finish!")
|
125 |
self.loaded = True
|
@@ -138,12 +144,7 @@ class FoleyController:
|
|
138 |
sample_step_slider,
|
139 |
cfg_scale_slider,
|
140 |
seed_textbox,
|
141 |
-
):
|
142 |
-
# move to gpu
|
143 |
-
self.time_detector = self.time_detector.to(self.device)
|
144 |
-
self.pipeline = self.pipeline.to(self.device)
|
145 |
-
self.vocoder = self.vocoder.to(self.device)
|
146 |
-
self.image_encoder = self.image_encoder.to(self.device)
|
147 |
|
148 |
vision_transform_list = [
|
149 |
torchvision.transforms.Resize((128, 128)),
|
|
|
64 |
self.savedir_sample = os.path.join(self.savedir, "sample")
|
65 |
os.makedirs(self.savedir, exist_ok=True)
|
66 |
|
67 |
+
self.device = "cuda"
|
68 |
|
69 |
self.pipeline = None
|
70 |
|
|
|
120 |
|
121 |
self.pipeline.load_ip_adapter(fc_ckpt, subfolder='semantic', weight_name='semantic_adapter.bin', image_encoder_folder=None)
|
122 |
|
123 |
+
# move to gpu
|
124 |
+
self.time_detector = self.time_detector.to(self.device)
|
125 |
+
self.pipeline = self.pipeline.to(self.device)
|
126 |
+
self.vocoder = self.vocoder.to(self.device)
|
127 |
+
self.image_encoder = self.image_encoder.to(self.device)
|
128 |
+
|
129 |
gr.Info("Load Finish!")
|
130 |
print("Load Finish!")
|
131 |
self.loaded = True
|
|
|
144 |
sample_step_slider,
|
145 |
cfg_scale_slider,
|
146 |
seed_textbox,
|
147 |
+
):
|
|
|
|
|
|
|
|
|
|
|
148 |
|
149 |
vision_transform_list = [
|
150 |
torchvision.transforms.Resize((128, 128)),
|