Spaces:
Runtime error
Runtime error
File size: 1,005 Bytes
d58ac58 ac94249 bce3703 ac94249 4b1eeea 4666901 2950d3f ac94249 2d19487 d58ac58 ac94249 4211211 ac94249 44fbbac 84bc0a5 ac94249 4211211 ce05317 56f32b6 ac94249 83b5c4e 56f32b6 ac94249 ce05317 ac94249 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 |
import gradio as gr
from diffusers import StableDiffusionInpaintPipeline
import torch
pipeline = StableDiffusionInpaintPipeline.from_pretrained(
"runwayml/stable-diffusion-inpainting",
torch_dtype=torch.float16,
use_safetensors=True,
variant="fp16"
)
pipeline = pipeline.to("cuda")
def predict(mask_img):
prompt = "a green frog, highly detailed, natural lighting"
# get size of the input image
size = mask_img["image"].size
image = pipeline(prompt=prompt,
num_inference_steps=35,
image=mask_img["image"].convert("RGB"),
mask_image=mask_img["mask"].convert("RGB"),
guidance_scale=9
).images[0]
return image.resize(size)
# def mirror(img):
# print(img['mask'])
# print(img['image'])
# return img['image'].rotate(45)
demo = gr.Interface(
fn=predict,
inputs=gr.Image(tool = 'sketch', type='pil'),
outputs=gr.Image()
)
demo.launch() |