Blane187 commited on
Commit
6c0620c
β€’
1 Parent(s): 70149e3

from top to bottom

Browse files
Files changed (1) hide show
  1. app.py +104 -102
app.py CHANGED
@@ -1,102 +1,104 @@
1
- import gradio as gr
2
- from model import models
3
- from multit2i import (
4
- load_models,
5
- infer_multi,
6
- infer_multi_random,
7
- save_gallery_images,
8
- change_model,
9
- get_model_info_md,
10
- loaded_models,
11
- get_positive_prefix,
12
- get_positive_suffix,
13
- get_negative_prefix,
14
- get_negative_suffix,
15
- get_recom_prompt_type,
16
- set_recom_prompt_preset,
17
- get_tag_type,
18
- )
19
-
20
-
21
- load_models(models, 5)
22
- #load_models(models, 20) # Fetching 20 models at the same time. default: 5
23
-
24
-
25
- css = """
26
- #model_info { text-align: center; }
27
- """
28
-
29
- with gr.Blocks(theme="NoCrypt/miku@>=1.2.2", css=css) as demo:
30
- with gr.Column():
31
- with gr.Accordion("Recommended Prompt", open=False):
32
- recom_prompt_preset = gr.Radio(label="Set Presets", choices=get_recom_prompt_type(), value="Common")
33
- with gr.Row():
34
- positive_prefix = gr.CheckboxGroup(label="Use Positive Prefix", choices=get_positive_prefix(), value=[])
35
- positive_suffix = gr.CheckboxGroup(label="Use Positive Suffix", choices=get_positive_suffix(), value=["Common"])
36
- negative_prefix = gr.CheckboxGroup(label="Use Negative Prefix", choices=get_negative_prefix(), value=[], visible=False)
37
- negative_suffix = gr.CheckboxGroup(label="Use Negative Suffix", choices=get_negative_suffix(), value=["Common"], visible=False)
38
- with gr.Accordion("Model", open=True):
39
- model_name = gr.Dropdown(label="Select Model", show_label=False, choices=list(loaded_models.keys()), value=list(loaded_models.keys())[0], allow_custom_value=True)
40
- model_info = gr.Markdown(value=get_model_info_md(list(loaded_models.keys())[0]), elem_id="model_info")
41
- with gr.Group():
42
- clear_prompt = gr.Button(value="Clear Prompt πŸ—‘οΈ", size="sm", scale=1)
43
- prompt = gr.Text(label="Prompt", lines=1, max_lines=8, placeholder="1girl, solo, ...", show_copy_button=True)
44
- neg_prompt = gr.Text(label="Negative Prompt", lines=1, max_lines=8, placeholder="", visible=False)
45
- with gr.Row():
46
- run_button = gr.Button("Generate Image", scale=6)
47
- random_button = gr.Button("Random Model 🎲", scale=3)
48
- image_num = gr.Number(label="Count", minimum=1, maximum=16, value=1, step=1, interactive=True, scale=1)
49
- results = gr.Gallery(label="Gallery", interactive=False, show_download_button=True, show_share_button=False,
50
- container=True, format="png", object_fit="contain")
51
- image_files = gr.Files(label="Download", interactive=False)
52
- clear_results = gr.Button("Clear Gallery / Download")
53
- examples = gr.Examples(
54
- examples = [
55
- ["souryuu asuka langley, 1girl, neon genesis evangelion, plugsuit, pilot suit, red bodysuit, sitting, crossing legs, black eye patch, cat hat, throne, symmetrical, looking down, from bottom, looking at viewer, outdoors"],
56
- ["sailor moon, magical girl transformation, sparkles and ribbons, soft pastel colors, crescent moon motif, starry night sky background, shoujo manga style"],
57
- ["kafuu chino, 1girl, solo"],
58
- ["1girl"],
59
- ["beautiful sunset"],
60
- ],
61
- inputs=[prompt],
62
- )
63
- gr.Markdown(
64
- f"""This demo was created in reference to the following demos.
65
- - [Nymbo/Flood](https://huggingface.co/spaces/Nymbo/Flood).
66
- - [Yntec/ToyWorldXL](https://huggingface.co/spaces/Yntec/ToyWorldXL).
67
- """
68
- )
69
- gr.DuplicateButton(value="Duplicate Space")
70
-
71
- model_name.change(change_model, [model_name], [model_info], queue=False, show_api=False)
72
- gr.on(
73
- triggers=[run_button.click, prompt.submit],
74
- fn=infer_multi,
75
- inputs=[prompt, neg_prompt, results, image_num, model_name,
76
- positive_prefix, positive_suffix, negative_prefix, negative_suffix],
77
- outputs=[results],
78
- queue=True,
79
- trigger_mode="multiple",
80
- concurrency_limit=5,
81
- show_progress="full",
82
- show_api=True,
83
- ).then(save_gallery_images, [results], [results, image_files], queue=False, show_api=False)
84
- gr.on(
85
- triggers=[random_button.click],
86
- fn=infer_multi_random,
87
- inputs=[prompt, neg_prompt, results, image_num,
88
- positive_prefix, positive_suffix, negative_prefix, negative_suffix],
89
- outputs=[results],
90
- queue=True,
91
- trigger_mode="multiple",
92
- concurrency_limit=5,
93
- show_progress="full",
94
- show_api=True,
95
- ).then(save_gallery_images, [results], [results, image_files], queue=False, show_api=False)
96
- clear_prompt.click(lambda: None, None, [prompt], queue=False, show_api=False)
97
- clear_results.click(lambda: (None, None), None, [results, image_files], queue=False, show_api=False)
98
- recom_prompt_preset.change(set_recom_prompt_preset, [recom_prompt_preset],
99
- [positive_prefix, positive_suffix, negative_prefix, negative_suffix], queue=False, show_api=False)
100
-
101
- demo.queue()
102
- demo.launch()
 
 
 
1
+ import gradio as gr
2
+ from model import models
3
+ from multit2i import (
4
+ load_models,
5
+ infer_multi,
6
+ infer_multi_random,
7
+ save_gallery_images,
8
+ change_model,
9
+ get_model_info_md,
10
+ loaded_models,
11
+ get_positive_prefix,
12
+ get_positive_suffix,
13
+ get_negative_prefix,
14
+ get_negative_suffix,
15
+ get_recom_prompt_type,
16
+ set_recom_prompt_preset,
17
+ get_tag_type,
18
+ )
19
+
20
+
21
+ load_models(models, 5)
22
+ #load_models(models, 20) # Fetching 20 models at the same time. default: 5
23
+
24
+
25
+ css = """
26
+ #model_info { text-align: center; }
27
+ """
28
+
29
+ with gr.Blocks(theme="NoCrypt/miku@>=1.2.2", css=css) as demo:
30
+ with gr.Column():
31
+ with gr.Accordion("Model", open=True):
32
+ model_name = gr.Dropdown(label="Select Model", show_label=False, choices=list(loaded_models.keys()), value=list(loaded_models.keys())[0], allow_custom_value=True)
33
+ model_info = gr.Markdown(value=get_model_info_md(list(loaded_models.keys())[0]), elem_id="model_info")
34
+ with gr.Group():
35
+ clear_prompt = gr.Button(value="Clear Prompt πŸ—‘οΈ", size="sm", scale=1)
36
+ prompt = gr.Text(label="Prompt", lines=1, max_lines=8, placeholder="1girl, solo, ...", show_copy_button=True)
37
+ neg_prompt = gr.Text(label="Negative Prompt", lines=1, max_lines=8, placeholder="", visible=False)
38
+ with gr.Accordion("Recommended Prompt", open=False):
39
+ recom_prompt_preset = gr.Radio(label="Set Presets", choices=get_recom_prompt_type(), value="Common")
40
+ with gr.Row():
41
+ positive_prefix = gr.CheckboxGroup(label="Use Positive Prefix", choices=get_positive_prefix(), value=[])
42
+ positive_suffix = gr.CheckboxGroup(label="Use Positive Suffix", choices=get_positive_suffix(), value=["Common"])
43
+ negative_prefix = gr.CheckboxGroup(label="Use Negative Prefix", choices=get_negative_prefix(), value=[], visible=False)
44
+ negative_suffix = gr.CheckboxGroup(label="Use Negative Suffix", choices=get_negative_suffix(), value=["Common"], visible=False)
45
+
46
+
47
+ with gr.Row():
48
+ run_button = gr.Button("Generate Image", scale=6)
49
+ random_button = gr.Button("Random Model 🎲", scale=3)
50
+ image_num = gr.Number(label="Count", minimum=1, maximum=16, value=1, step=1, interactive=True, scale=1)
51
+ results = gr.Gallery(label="Gallery", interactive=False, show_download_button=True, show_share_button=False,
52
+ container=True, format="png", object_fit="contain")
53
+ image_files = gr.Files(label="Download", interactive=False)
54
+ clear_results = gr.Button("Clear Gallery / Download")
55
+ examples = gr.Examples(
56
+ examples = [
57
+ ["souryuu asuka langley, 1girl, neon genesis evangelion, plugsuit, pilot suit, red bodysuit, sitting, crossing legs, black eye patch, cat hat, throne, symmetrical, looking down, from bottom, looking at viewer, outdoors"],
58
+ ["sailor moon, magical girl transformation, sparkles and ribbons, soft pastel colors, crescent moon motif, starry night sky background, shoujo manga style"],
59
+ ["kafuu chino, 1girl, solo"],
60
+ ["1girl"],
61
+ ["beautiful sunset"],
62
+ ],
63
+ inputs=[prompt],
64
+ )
65
+ gr.Markdown(
66
+ f"""This demo was created in reference to the following demos.
67
+ - [Nymbo/Flood](https://huggingface.co/spaces/Nymbo/Flood).
68
+ - [Yntec/ToyWorldXL](https://huggingface.co/spaces/Yntec/ToyWorldXL).
69
+ """
70
+ )
71
+ gr.DuplicateButton(value="Duplicate Space")
72
+
73
+ model_name.change(change_model, [model_name], [model_info], queue=False, show_api=False)
74
+ gr.on(
75
+ triggers=[run_button.click, prompt.submit],
76
+ fn=infer_multi,
77
+ inputs=[prompt, neg_prompt, results, image_num, model_name,
78
+ positive_prefix, positive_suffix, negative_prefix, negative_suffix],
79
+ outputs=[results],
80
+ queue=True,
81
+ trigger_mode="multiple",
82
+ concurrency_limit=5,
83
+ show_progress="full",
84
+ show_api=True,
85
+ ).then(save_gallery_images, [results], [results, image_files], queue=False, show_api=False)
86
+ gr.on(
87
+ triggers=[random_button.click],
88
+ fn=infer_multi_random,
89
+ inputs=[prompt, neg_prompt, results, image_num,
90
+ positive_prefix, positive_suffix, negative_prefix, negative_suffix],
91
+ outputs=[results],
92
+ queue=True,
93
+ trigger_mode="multiple",
94
+ concurrency_limit=5,
95
+ show_progress="full",
96
+ show_api=True,
97
+ ).then(save_gallery_images, [results], [results, image_files], queue=False, show_api=False)
98
+ clear_prompt.click(lambda: None, None, [prompt], queue=False, show_api=False)
99
+ clear_results.click(lambda: (None, None), None, [results, image_files], queue=False, show_api=False)
100
+ recom_prompt_preset.change(set_recom_prompt_preset, [recom_prompt_preset],
101
+ [positive_prefix, positive_suffix, negative_prefix, negative_suffix], queue=False, show_api=False)
102
+
103
+ demo.queue()
104
+ demo.launch()