Menyu commited on
Commit
0dca0cf
·
verified ·
1 Parent(s): 2f36b77

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -12
app.py CHANGED
@@ -4,6 +4,7 @@ import numpy as np
4
  import spaces
5
  import torch
6
  from diffusers import AutoPipelineForText2Image, AutoencoderKL #,EulerDiscreteScheduler
 
7
 
8
  if not torch.cuda.is_available():
9
  DESCRIPTION += "\n<p>你现在运行在CPU上 但是只支持GPU.</p>"
@@ -21,19 +22,13 @@ if torch.cuda.is_available():
21
  add_watermarker=False
22
  )
23
  #pipe.scheduler = EulerDiscreteScheduler.from_config(pipe.scheduler.config, timestep_spacing="trailing")
24
- # 设置 tokenizer 的最大长度
25
- max_token_length = 512
26
- pipe.tokenizer.model_max_length = max_token_length
27
 
28
- # 调整文本编码器的配置
29
- pipe.text_encoder.config.max_position_embeddings = max_token_length
30
-
31
- # 如果需要,重新初始化位置嵌入
32
- import torch.nn as nn
33
- old_emb = pipe.text_encoder.text_model.embeddings.position_embedding.weight.data
34
- new_emb = nn.Parameter(torch.zeros(max_token_length, old_emb.shape[1]))
35
- new_emb.data[:old_emb.shape[0], :] = old_emb
36
- pipe.text_encoder.text_model.embeddings.position_embedding.weight = new_emb
37
  pipe.to("cuda")
38
 
39
  def randomize_seed_fn(seed: int, randomize_seed: bool) -> int:
 
4
  import spaces
5
  import torch
6
  from diffusers import AutoPipelineForText2Image, AutoencoderKL #,EulerDiscreteScheduler
7
+ from transformers import CLIPTokenizer, CLIPTextModel
8
 
9
  if not torch.cuda.is_available():
10
  DESCRIPTION += "\n<p>你现在运行在CPU上 但是只支持GPU.</p>"
 
22
  add_watermarker=False
23
  )
24
  #pipe.scheduler = EulerDiscreteScheduler.from_config(pipe.scheduler.config, timestep_spacing="trailing")
25
+ # 加载支持更长序列的 Tokenizer 和文本编码器
26
+ tokenizer = CLIPTokenizer.from_pretrained("stabilityai/stable-diffusion-xl-base-1.0", model_max_length=512)
27
+ text_encoder = CLIPTextModel.from_pretrained("stabilityai/stable-diffusion-xl-base-1.0", trust_remote_code=True)
28
 
29
+ # 更新 pipeline
30
+ pipe.tokenizer = tokenizer
31
+ pipe.text_encoder = text_encoder
 
 
 
 
 
 
32
  pipe.to("cuda")
33
 
34
  def randomize_seed_fn(seed: int, randomize_seed: bool) -> int: