Spaces:
Runtime error
Runtime error
File size: 1,045 Bytes
0513aaf dd1add1 c00162e dd1add1 0513aaf c00162e 0513aaf |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 |
import gradio as gr
import utils
from PIL import Image
import torch
import math
from torchvision import transforms
device = "cpu"
years = [str(y) for y in range(1880, 2020, 10)]
orig_models = {}
for year in years:
G, w_avg = utils.load_stylegan2(f"pretrained_models/{year}.pkl", device)
orig_models[year] = { "G": G.eval()}
def run_alignment(image_path,idx=None):
import dlib
from align_all_parallel import align_face
predictor = dlib.shape_predictor("pretrained_models/shape_predictor_68_face_landmarks.dat")
aligned_image = align_face(filepath=image_path, predictor=predictor, idx=idx)
print("Aligned image has shape: {}".format(aligned_image.size))
return aligned_image
def predict(inp):
#with torch.no_grad():
inp.save("imgs/input.png")
out = run_alignment("imgs/input.png", idx=0)
return out
gr.Interface(fn=predict,
inputs=gr.Image(type="pil"),
outputs=gr.Image(type="pil"),
#examples=["lion.jpg", "cheetah.jpg"]
).launch()
|