Spaces:
Runtime error
Runtime error
Merge branch 'huggingface-spaces' into main
Browse files- .gitignore +4 -1
- Dockerfile +7 -0
- app.py +37 -18
- checkpoints/{.gitkeep β BK-SDM-Small_iter50000/unet/.gitkeep} +0 -0
- docker-compose.yml +16 -0
- requirements.txt +1 -1
.gitignore
CHANGED
@@ -1,3 +1,6 @@
|
|
1 |
__pycache__/
|
2 |
-
checkpoints/BK-SDM-Small_iter50000
|
3 |
checkpoints/BK-SDM-Small_iter50000.tar.gz
|
|
|
|
|
|
|
|
1 |
__pycache__/
|
2 |
+
checkpoints/BK-SDM-Small_iter50000/unet/*
|
3 |
checkpoints/BK-SDM-Small_iter50000.tar.gz
|
4 |
+
app.sh
|
5 |
+
|
6 |
+
!checkpoints/BK-SDM-Small_iter50000/unet/.gitkeep
|
Dockerfile
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
FROM nvcr.io/nvidia/pytorch:22.03-py3
|
2 |
+
|
3 |
+
ARG DEBIAN_FRONTEND=noninteractive
|
4 |
+
RUN apt-get update
|
5 |
+
RUN apt-get install ffmpeg libsm6 libxext6 tmux git -y
|
6 |
+
|
7 |
+
WORKDIR /workspace
|
app.py
CHANGED
@@ -1,7 +1,27 @@
|
|
1 |
-
import
|
|
|
2 |
from pathlib import Path
|
|
|
|
|
|
|
3 |
from demo import SdmCompressionDemo
|
4 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
5 |
if __name__ == "__main__":
|
6 |
servicer = SdmCompressionDemo()
|
7 |
example_list = servicer.get_example_list()
|
@@ -10,25 +30,25 @@ if __name__ == "__main__":
|
|
10 |
gr.Markdown(Path('docs/header.md').read_text())
|
11 |
gr.Markdown(Path('docs/description.md').read_text())
|
12 |
with gr.Row():
|
13 |
-
with gr.Column(variant='panel',scale=30):
|
14 |
|
15 |
text = gr.Textbox(label="Input Prompt", max_lines=5, placeholder="Enter your prompt")
|
16 |
-
|
17 |
with gr.Row().style(equal_height=True):
|
18 |
generate_original_button = gr.Button(value="Generate with Original Model", variant="primary")
|
19 |
generate_compressed_button = gr.Button(value="Generate with Compressed Model", variant="primary")
|
20 |
|
21 |
with gr.Accordion("Advanced Settings", open=False):
|
22 |
-
negative = gr.Textbox(label=f'Negative Prompt', placeholder=f'Enter aspects to remove (e.g., {"low quality"})')
|
23 |
-
with gr.Row():
|
24 |
guidance_scale = gr.Slider(label="Guidance Scale", value=7.5, minimum=4, maximum=11, step=0.5)
|
25 |
steps = gr.Slider(label="Denoising Steps", value=25, minimum=10, maximum=75, step=5)
|
26 |
seed = gr.Slider(0, 999999, label='Random Seed', value=1234, step=1)
|
27 |
|
28 |
with gr.Tab("Example Prompts"):
|
29 |
examples = gr.Examples(examples=example_list, inputs=[text])
|
30 |
-
|
31 |
-
with gr.Column(variant='panel',scale=35):
|
32 |
# Define original model output components
|
33 |
gr.Markdown('<h2 align="center">Original Stable Diffusion 1.4</h2>')
|
34 |
original_model_output = gr.Image(label="Original Model")
|
@@ -36,29 +56,28 @@ if __name__ == "__main__":
|
|
36 |
original_model_test_time = gr.Textbox(value="", label="Inference Time (sec)")
|
37 |
original_model_error = gr.Markdown()
|
38 |
|
39 |
-
with gr.Column(variant='panel',scale=35):
|
40 |
# Define compressed model output components
|
41 |
gr.Markdown('<h2 align="center">Compressed Stable Diffusion (Ours)</h2>')
|
42 |
-
compressed_model_output = gr.Image(label="Compressed Model")
|
43 |
with gr.Row().style(equal_height=True):
|
44 |
compressed_model_test_time = gr.Textbox(value="", label="Inference Time (sec)")
|
45 |
compressed_model_error = gr.Markdown()
|
46 |
-
|
47 |
-
inputs = [text, negative, guidance_scale, steps, seed]
|
48 |
|
49 |
-
|
50 |
-
|
51 |
-
|
|
|
|
|
52 |
generate_original_button.click(servicer.infer_original_model, inputs=inputs, outputs=original_model_outputs)
|
53 |
-
|
54 |
# Click the generate button for compressed model
|
55 |
compressed_model_outputs = [compressed_model_output, compressed_model_error, compressed_model_test_time]
|
56 |
-
text.submit(servicer.infer_compressed_model, inputs=inputs, outputs=compressed_model_outputs)
|
57 |
generate_compressed_button.click(servicer.infer_compressed_model, inputs=inputs, outputs=compressed_model_outputs)
|
58 |
|
59 |
gr.Markdown(Path('docs/footer.md').read_text())
|
60 |
|
61 |
demo.queue(concurrency_count=1)
|
62 |
# demo.launch()
|
63 |
-
demo.launch(
|
64 |
-
|
|
|
1 |
+
import os
|
2 |
+
import subprocess
|
3 |
from pathlib import Path
|
4 |
+
|
5 |
+
import gradio as gr
|
6 |
+
|
7 |
from demo import SdmCompressionDemo
|
8 |
|
9 |
+
dest_path_config = Path('checkpoints/BK-SDM-Small_iter50000/unet/config.json')
|
10 |
+
dest_path_torch_ckpt = Path('checkpoints/BK-SDM-Small_iter50000/unet/diffusion_pytorch_model.bin')
|
11 |
+
BK_SDM_CONFIG_URL: str = os.getenv('BK_SDM_CONFIG_URL', None)
|
12 |
+
BK_SDM_TORCH_CKPT_URL: str = os.getenv('BK_SDM_TORCH_CKPT_URL', None)
|
13 |
+
assert BK_SDM_CONFIG_URL is not None
|
14 |
+
assert BK_SDM_TORCH_CKPT_URL is not None
|
15 |
+
|
16 |
+
subprocess.call(
|
17 |
+
f"wget --no-check-certificate -O {dest_path_config} {BK_SDM_CONFIG_URL}",
|
18 |
+
shell=True
|
19 |
+
)
|
20 |
+
subprocess.call(
|
21 |
+
f"wget --no-check-certificate -O {dest_path_torch_ckpt} {BK_SDM_TORCH_CKPT_URL}",
|
22 |
+
shell=True
|
23 |
+
)
|
24 |
+
|
25 |
if __name__ == "__main__":
|
26 |
servicer = SdmCompressionDemo()
|
27 |
example_list = servicer.get_example_list()
|
|
|
30 |
gr.Markdown(Path('docs/header.md').read_text())
|
31 |
gr.Markdown(Path('docs/description.md').read_text())
|
32 |
with gr.Row():
|
33 |
+
with gr.Column(variant='panel', scale=30):
|
34 |
|
35 |
text = gr.Textbox(label="Input Prompt", max_lines=5, placeholder="Enter your prompt")
|
36 |
+
|
37 |
with gr.Row().style(equal_height=True):
|
38 |
generate_original_button = gr.Button(value="Generate with Original Model", variant="primary")
|
39 |
generate_compressed_button = gr.Button(value="Generate with Compressed Model", variant="primary")
|
40 |
|
41 |
with gr.Accordion("Advanced Settings", open=False):
|
42 |
+
negative = gr.Textbox(label=f'Negative Prompt', placeholder=f'Enter aspects to remove (e.g., {"low quality"})')
|
43 |
+
with gr.Row():
|
44 |
guidance_scale = gr.Slider(label="Guidance Scale", value=7.5, minimum=4, maximum=11, step=0.5)
|
45 |
steps = gr.Slider(label="Denoising Steps", value=25, minimum=10, maximum=75, step=5)
|
46 |
seed = gr.Slider(0, 999999, label='Random Seed', value=1234, step=1)
|
47 |
|
48 |
with gr.Tab("Example Prompts"):
|
49 |
examples = gr.Examples(examples=example_list, inputs=[text])
|
50 |
+
|
51 |
+
with gr.Column(variant='panel', scale=35):
|
52 |
# Define original model output components
|
53 |
gr.Markdown('<h2 align="center">Original Stable Diffusion 1.4</h2>')
|
54 |
original_model_output = gr.Image(label="Original Model")
|
|
|
56 |
original_model_test_time = gr.Textbox(value="", label="Inference Time (sec)")
|
57 |
original_model_error = gr.Markdown()
|
58 |
|
59 |
+
with gr.Column(variant='panel', scale=35):
|
60 |
# Define compressed model output components
|
61 |
gr.Markdown('<h2 align="center">Compressed Stable Diffusion (Ours)</h2>')
|
62 |
+
compressed_model_output = gr.Image(label="Compressed Model")
|
63 |
with gr.Row().style(equal_height=True):
|
64 |
compressed_model_test_time = gr.Textbox(value="", label="Inference Time (sec)")
|
65 |
compressed_model_error = gr.Markdown()
|
|
|
|
|
66 |
|
67 |
+
inputs = [text, negative, guidance_scale, steps, seed]
|
68 |
+
|
69 |
+
# Click the generate button for original model
|
70 |
+
original_model_outputs = [original_model_output, original_model_error, original_model_test_time]
|
71 |
+
text.submit(servicer.infer_original_model, inputs=inputs, outputs=original_model_outputs)
|
72 |
generate_original_button.click(servicer.infer_original_model, inputs=inputs, outputs=original_model_outputs)
|
73 |
+
|
74 |
# Click the generate button for compressed model
|
75 |
compressed_model_outputs = [compressed_model_output, compressed_model_error, compressed_model_test_time]
|
76 |
+
text.submit(servicer.infer_compressed_model, inputs=inputs, outputs=compressed_model_outputs)
|
77 |
generate_compressed_button.click(servicer.infer_compressed_model, inputs=inputs, outputs=compressed_model_outputs)
|
78 |
|
79 |
gr.Markdown(Path('docs/footer.md').read_text())
|
80 |
|
81 |
demo.queue(concurrency_count=1)
|
82 |
# demo.launch()
|
83 |
+
demo.launch(server_name="0.0.0.0", server_port=7861)
|
|
checkpoints/{.gitkeep β BK-SDM-Small_iter50000/unet/.gitkeep}
RENAMED
File without changes
|
docker-compose.yml
ADDED
@@ -0,0 +1,16 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
version: "3.9"
|
2 |
+
|
3 |
+
# docker compose run --service-ports --name bk-sdm-hksong bk-sdm bash
|
4 |
+
|
5 |
+
services:
|
6 |
+
bk-sdm:
|
7 |
+
image: bk-sdm:dev
|
8 |
+
build: ./
|
9 |
+
container_name: bk-sdm-hksong
|
10 |
+
ipc: host
|
11 |
+
ports:
|
12 |
+
- "7861:7861"
|
13 |
+
volumes:
|
14 |
+
- /data2/hksong/bk-sdm:/workspace
|
15 |
+
- /data2/hksong/DATA:/DATA
|
16 |
+
- /data2/hksong/LOG:/LOG
|
requirements.txt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
torch==1.13.1
|
2 |
gradio==3.31.0
|
3 |
-
diffusers==0.15.0
|
|
|
1 |
torch==1.13.1
|
2 |
gradio==3.31.0
|
3 |
+
diffusers==0.15.0
|