Spaces:
Runtime error
Runtime error
File size: 999 Bytes
c472b15 7b36a3f c472b15 b0267fa c472b15 d632df8 c472b15 2eca5ee d632df8 c472b15 b047869 c472b15 3141b27 013d827 2eca5ee 3141b27 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 |
import gradio as gr
import spaces
import torch
from diffusers import DiffusionPipeline, DPMSolverMultistepScheduler
from diffusers.utils import export_to_video
import cv2
import numpy as np
pipe = DiffusionPipeline.from_pretrained("damo-vilab/text-to-video-ms-1.7b", torch_dtype=torch.float16, variant="fp16")
pipe.scheduler = DPMSolverMultistepScheduler.from_config(pipe.scheduler.config)
pipe.enable_model_cpu_offload()
pipe.enable_vae_slicing()
@spaces.GPU(duration=250)
def generate(prompt):
video_frames = pipe(prompt, num_inference_steps=25, num_frames=10).frames
video_path = export_to_video(video_frames)
return video_path
prompt = gr.Textbox(label="Enter prompt to generate a video", info="Based on this prompt ai will generate a video")
interface = gr.Interface(
generate,
inputs=[prompt, num_inference_steps, num_frames],
examples=[["Astronaut riding a horse"], ["Darth vader surfing in waves"]],
outputs="video",
cache_examples=False,
theme="soft"
).launch()
|