File size: 5,002 Bytes
6c0620c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
import gradio as gr
from model import models
from multit2i import (
    load_models,
    infer_multi,
    infer_multi_random,
    save_gallery_images,
    change_model,
    get_model_info_md,
    loaded_models,
    get_positive_prefix,
    get_positive_suffix,
    get_negative_prefix,
    get_negative_suffix,
    get_recom_prompt_type,
    set_recom_prompt_preset,
    get_tag_type,
)


load_models(models, 5)
#load_models(models, 20) # Fetching 20 models at the same time. default: 5


css = """
#model_info { text-align: center; }
"""

with gr.Blocks(theme="NoCrypt/miku@>=1.2.2", css=css) as demo:
    with gr.Column(): 
        with gr.Accordion("Model", open=True):
            model_name = gr.Dropdown(label="Select Model", show_label=False, choices=list(loaded_models.keys()), value=list(loaded_models.keys())[0], allow_custom_value=True)
            model_info = gr.Markdown(value=get_model_info_md(list(loaded_models.keys())[0]), elem_id="model_info")
        with gr.Group():
            clear_prompt = gr.Button(value="Clear Prompt πŸ—‘οΈ", size="sm", scale=1)
            prompt = gr.Text(label="Prompt", lines=1, max_lines=8, placeholder="1girl, solo, ...", show_copy_button=True)
            neg_prompt = gr.Text(label="Negative Prompt", lines=1, max_lines=8, placeholder="", visible=False)
        with gr.Accordion("Recommended Prompt", open=False):
            recom_prompt_preset = gr.Radio(label="Set Presets", choices=get_recom_prompt_type(), value="Common")
            with gr.Row():
                positive_prefix = gr.CheckboxGroup(label="Use Positive Prefix", choices=get_positive_prefix(), value=[])
                positive_suffix = gr.CheckboxGroup(label="Use Positive Suffix", choices=get_positive_suffix(), value=["Common"])
                negative_prefix = gr.CheckboxGroup(label="Use Negative Prefix", choices=get_negative_prefix(), value=[], visible=False)
                negative_suffix = gr.CheckboxGroup(label="Use Negative Suffix", choices=get_negative_suffix(), value=["Common"], visible=False)

        
        with gr.Row():
            run_button = gr.Button("Generate Image", scale=6)
            random_button = gr.Button("Random Model 🎲", scale=3)
            image_num = gr.Number(label="Count", minimum=1, maximum=16, value=1, step=1, interactive=True, scale=1)
        results = gr.Gallery(label="Gallery", interactive=False, show_download_button=True, show_share_button=False,
                              container=True, format="png", object_fit="contain")
        image_files = gr.Files(label="Download", interactive=False)
        clear_results = gr.Button("Clear Gallery / Download")
    examples = gr.Examples(
        examples = [
            ["souryuu asuka langley, 1girl, neon genesis evangelion, plugsuit, pilot suit, red bodysuit, sitting, crossing legs, black eye patch, cat hat, throne, symmetrical, looking down, from bottom, looking at viewer, outdoors"],
            ["sailor moon, magical girl transformation, sparkles and ribbons, soft pastel colors, crescent moon motif, starry night sky background, shoujo manga style"],
            ["kafuu chino, 1girl, solo"],
            ["1girl"],
            ["beautiful sunset"],
        ],
        inputs=[prompt],
    )
    gr.Markdown(
        f"""This demo was created in reference to the following demos.
- [Nymbo/Flood](https://huggingface.co/spaces/Nymbo/Flood).
- [Yntec/ToyWorldXL](https://huggingface.co/spaces/Yntec/ToyWorldXL).
            """
    )
    gr.DuplicateButton(value="Duplicate Space")

    model_name.change(change_model, [model_name], [model_info], queue=False, show_api=False)
    gr.on(
        triggers=[run_button.click, prompt.submit],
        fn=infer_multi,
        inputs=[prompt, neg_prompt, results, image_num, model_name,
                 positive_prefix, positive_suffix, negative_prefix, negative_suffix],
        outputs=[results],
        queue=True,
        trigger_mode="multiple",
        concurrency_limit=5,
        show_progress="full",
        show_api=True,
    ).then(save_gallery_images, [results], [results, image_files], queue=False, show_api=False)
    gr.on(
        triggers=[random_button.click],
        fn=infer_multi_random,
        inputs=[prompt, neg_prompt, results, image_num,
                 positive_prefix, positive_suffix, negative_prefix, negative_suffix],
        outputs=[results],
        queue=True,
        trigger_mode="multiple",
        concurrency_limit=5,
        show_progress="full",
        show_api=True,
    ).then(save_gallery_images, [results], [results, image_files], queue=False, show_api=False)
    clear_prompt.click(lambda: None, None, [prompt], queue=False, show_api=False)
    clear_results.click(lambda: (None, None), None, [results, image_files], queue=False, show_api=False)
    recom_prompt_preset.change(set_recom_prompt_preset, [recom_prompt_preset],
     [positive_prefix, positive_suffix, negative_prefix, negative_suffix], queue=False, show_api=False)

demo.queue()
demo.launch()