Menyu commited on
Commit
6a22f12
1 Parent(s): 7fe735f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +0 -8
app.py CHANGED
@@ -4,7 +4,6 @@ import numpy as np
4
  import spaces
5
  import torch
6
  from diffusers import AutoPipelineForText2Image, AutoencoderKL #,EulerDiscreteScheduler
7
- from transformers import CLIPTokenizer, CLIPTextModel
8
 
9
  if not torch.cuda.is_available():
10
  DESCRIPTION += "\n<p>你现在运行在CPU上 但是只支持GPU.</p>"
@@ -22,13 +21,6 @@ if torch.cuda.is_available():
22
  add_watermarker=False
23
  )
24
  #pipe.scheduler = EulerDiscreteScheduler.from_config(pipe.scheduler.config, timestep_spacing="trailing")
25
- # 加载支持更长序列的 Tokenizer 和文本编码器
26
- tokenizer = CLIPTokenizer.from_pretrained("John6666/noobai-xl-nai-xl-epsilonpred075version-sdxl", model_max_length=512)
27
- text_encoder = CLIPTextModel.from_pretrained("John6666/noobai-xl-nai-xl-epsilonpred075version-sdxl", trust_remote_code=True)
28
-
29
- # 更新 pipeline
30
- pipe.tokenizer = tokenizer
31
- pipe.text_encoder = text_encoder
32
  pipe.to("cuda")
33
 
34
  def randomize_seed_fn(seed: int, randomize_seed: bool) -> int:
 
4
  import spaces
5
  import torch
6
  from diffusers import AutoPipelineForText2Image, AutoencoderKL #,EulerDiscreteScheduler
 
7
 
8
  if not torch.cuda.is_available():
9
  DESCRIPTION += "\n<p>你现在运行在CPU上 但是只支持GPU.</p>"
 
21
  add_watermarker=False
22
  )
23
  #pipe.scheduler = EulerDiscreteScheduler.from_config(pipe.scheduler.config, timestep_spacing="trailing")
 
 
 
 
 
 
 
24
  pipe.to("cuda")
25
 
26
  def randomize_seed_fn(seed: int, randomize_seed: bool) -> int: