import gradio as gr import spaces import torch from diffusers import DiffusionPipeline, DPMSolverMultistepScheduler from diffusers.utils import export_to_video import cv2 import numpy as np pipe = DiffusionPipeline.from_pretrained("damo-vilab/text-to-video-ms-1.7b", torch_dtype=torch.float16, variant="fp16") pipe.scheduler = DPMSolverMultistepScheduler.from_config(pipe.scheduler.config) pipe.enable_model_cpu_offload() pipe.enable_vae_slicing() @spaces.GPU(duration=250) def generate(prompt, num_inference_steps, num_frames): video_frames = pipe(prompt, num_inference_steps=num_inference_steps, num_frames=num_frames).frames[0] video_path = export_to_video(video_frames, fps=10) return video_path prompt = gr.Textbox(label="Enter prompt to generate a video", info="Based on this prompt ai will generate a video") description=""" 🚀 This is **unofficial** demo of Openai's Sora that haven't been released yet.\n ✔ This space made using [ali-vilab/text-to-video-ms-1.7b](https://huggingface.co/ali-vilab/text-to-video-ms-1.7b)\n ⌚ Estimated generation time is **150 seconds**\n 🎁 Space is running on ZeroGPU, if you want faster generation, duplicate space and choose faster GPU """ num_inference_steps=gr.Slider(8, 128, step=1, value=24, label="Num Inference Steps", info="More steps then better quality") num_frames=gr.Slider(8, 1000, step=1, value=200, label="Num of Frames", info="It is duration of video") interface = gr.Interface( generate, inputs=[prompt], additional_inputs=[num_inference_steps, num_frames], examples=[ ["Astronaut riding a horse", 60, 100], ["Darth vader surfing in waves", 30, 200], ["A house in the woods in ocean", 70, 100], ["A car in the forest", 70, 100], ["A house firing", 60, 150], ["A plane firing and falling down", 100, 20], ["Campfire", 50, 50], ["Zombie apocalypse", 100, 20], ["A New York City", 100, 20], ["A man running in beautiful forest", 100, 20], ["A cup of tea with fog", 100, 20] ], outputs="video", title="Openai Sora (Unofficial)", description=description, cache_examples=False, theme="soft" ).launch()