amos1088 commited on
Commit
92fa744
·
1 Parent(s): 683afc3

test gradio

Browse files
Files changed (1) hide show
  1. app.py +10 -6
app.py CHANGED
@@ -2,13 +2,17 @@ import gradio as gr
2
  import torch
3
  from diffusers import AnimateDiffPipeline, DDIMScheduler, MotionAdapter
4
  from diffusers.utils import export_to_gif
5
- from PIL import Image
6
- import io
7
 
8
- # Load the motion adapter
9
- adapter = MotionAdapter.from_pretrained("guoyww/animatediff-motion-adapter-v1-5-2", torch_dtype=torch.float16)
 
 
 
10
  model_id = "SG161222/Realistic_Vision_V5.1_noVAE"
11
- pipe = AnimateDiffPipeline.from_pretrained(model_id, motion_adapter=adapter, torch_dtype=torch.float16)
 
 
 
12
  scheduler = DDIMScheduler.from_pretrained(
13
  model_id,
14
  subfolder="scheduler",
@@ -18,8 +22,8 @@ scheduler = DDIMScheduler.from_pretrained(
18
  steps_offset=1,
19
  )
20
  pipe.scheduler = scheduler
 
21
  pipe.enable_vae_slicing()
22
- pipe.enable_model_cpu_offload()
23
 
24
  # Define the animation function
25
  def generate_animation(prompt, negative_prompt, num_frames, guidance_scale, num_inference_steps):
 
2
  import torch
3
  from diffusers import AnimateDiffPipeline, DDIMScheduler, MotionAdapter
4
  from diffusers.utils import export_to_gif
 
 
5
 
6
+ # Set device to CPU
7
+ device = torch.device("cpu")
8
+
9
+ # Load the motion adapter on CPU
10
+ adapter = MotionAdapter.from_pretrained("guoyww/animatediff-motion-adapter-v1-5-2", torch_dtype=torch.float32).to(device)
11
  model_id = "SG161222/Realistic_Vision_V5.1_noVAE"
12
+ pipe = AnimateDiffPipeline.from_pretrained(
13
+ model_id, motion_adapter=adapter, torch_dtype=torch.float32
14
+ ).to(device)
15
+
16
  scheduler = DDIMScheduler.from_pretrained(
17
  model_id,
18
  subfolder="scheduler",
 
22
  steps_offset=1,
23
  )
24
  pipe.scheduler = scheduler
25
+
26
  pipe.enable_vae_slicing()
 
27
 
28
  # Define the animation function
29
  def generate_animation(prompt, negative_prompt, num_frames, guidance_scale, num_inference_steps):