Spaces:
Running
on
Zero
Running
on
Zero
import torch | |
import gradio as gr | |
from diffusers import DiffusionPipeline | |
import time # For monitoring time | |
def load_video_model(): | |
try: | |
# Ensure all necessary libraries are imported | |
import sentencepiece | |
# Load the model with specific error handling | |
print("Loading model...") | |
pipe = DiffusionPipeline.from_pretrained("Lightricks/LTX-Video") | |
print("Model loaded successfully!") | |
return pipe | |
except ImportError as e: | |
print(f"Dependency Error: {e}") | |
print("Please install required libraries: pip install sentencepiece diffusers transformers torch gradio imageio") | |
return None | |
except Exception as e: | |
print(f"Error loading model: {e}") | |
return None | |
# Load the model when the script starts | |
pipe = load_video_model() | |
def generate_video(prompt): | |
if pipe is None: | |
return "Error: Model could not be loaded. Check your dependencies." | |
try: | |
print("Starting video generation...") | |
start_time = time.time() | |
# Generate video output | |
output = pipe(prompt) | |
# Debug: Print the output object to check its attributes | |
print("Output:", output) | |
# Assuming the output is a video tensor | |
video_tensor = output['video'] # Adjust according to the correct output keys | |
# Save the generated video file | |
output_path = "generated_video.mp4" | |
video_tensor.save(output_path) # Replace with correct save method if necessary | |
end_time = time.time() | |
print(f"Video saved to {output_path} in {end_time - start_time:.2f} seconds") | |
return output_path | |
except Exception as e: | |
print(f"Error during video generation: {str(e)}") | |
return f"Error generating video: {str(e)}" | |
# Create Gradio interface | |
demo = gr.Interface( | |
fn=generate_video, | |
inputs=gr.Textbox(label="Enter Video Generation Prompt"), | |
outputs=gr.Video(label="Generated Video"), | |
title="LTX-Video Generation", | |
description="Generate a video using Lightricks Video Diffusion Model" | |
) | |
if __name__ == "__main__": | |
if pipe is not None: | |
demo.launch(share=True) # Set share=True to create a public link | |
else: | |
print("Could not launch app due to model loading failure.") |