Lawrence-cj commited on
Commit
baf6f43
1 Parent(s): 8b03161

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -8
app.py CHANGED
@@ -8,12 +8,11 @@ import gradio as gr
8
  import numpy as np
9
  import uuid
10
  import spaces
11
- from diffusers import ConsistencyDecoderVAE, DPMSolverMultistepScheduler, Transformer2DModel, AutoencoderKL, SASolverScheduler
12
  import torch
13
  from typing import Tuple
14
  from datetime import datetime
15
  from peft import PeftModel
16
- from diffusers_patches import pixart_sigma_init_patched_inputs, PixArtSigmaPipeline
17
 
18
 
19
  DESCRIPTION = """![Logo](https://raw.githubusercontent.com/PixArt-alpha/PixArt-sigma-project/master/static/images/logo-sigma.png)
@@ -108,12 +107,6 @@ if torch.cuda.is_available():
108
  weight_dtype = torch.float16
109
  T5_token_max_length = 300
110
 
111
- # tmp patches for diffusers PixArtSigmaPipeline Implementation
112
- print(
113
- "Changing _init_patched_inputs method of diffusers.models.Transformer2DModel "
114
- "using scripts.diffusers_patches.pixart_sigma_init_patched_inputs")
115
- setattr(Transformer2DModel, '_init_patched_inputs', pixart_sigma_init_patched_inputs)
116
-
117
  transformer = Transformer2DModel.from_pretrained(
118
  "PixArt-alpha/PixArt-Sigma-XL-2-1024-MS",
119
  subfolder='transformer',
 
8
  import numpy as np
9
  import uuid
10
  import spaces
11
+ from diffusers import PixArtSigmaPipeline, ConsistencyDecoderVAE, DPMSolverMultistepScheduler, Transformer2DModel, AutoencoderKL, SASolverScheduler
12
  import torch
13
  from typing import Tuple
14
  from datetime import datetime
15
  from peft import PeftModel
 
16
 
17
 
18
  DESCRIPTION = """![Logo](https://raw.githubusercontent.com/PixArt-alpha/PixArt-sigma-project/master/static/images/logo-sigma.png)
 
107
  weight_dtype = torch.float16
108
  T5_token_max_length = 300
109
 
 
 
 
 
 
 
110
  transformer = Transformer2DModel.from_pretrained(
111
  "PixArt-alpha/PixArt-Sigma-XL-2-1024-MS",
112
  subfolder='transformer',