nyanko7 commited on
Commit
92cf4da
1 Parent(s): 25062cb

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -4
app.py CHANGED
@@ -14,7 +14,6 @@ from tqdm import tqdm
14
  import bitsandbytes as bnb
15
  from bitsandbytes.nn.modules import Params4bit, QuantState
16
 
17
-
18
  import torch
19
  import random
20
  from einops import rearrange, repeat
@@ -23,7 +22,7 @@ from torch import Tensor, nn
23
  from transformers import CLIPTextModel, CLIPTokenizer
24
  from transformers import T5EncoderModel, T5Tokenizer
25
  from safetensors.torch import load_file
26
- from optimum.quanto import freeze, qfloat8, quantize
27
 
28
 
29
  # ---------------- Encoders ----------------
@@ -68,8 +67,8 @@ device = "cuda"
68
  t5 = HFEmbedder("google/t5-v1_1-xxl", max_length=512, torch_dtype=torch.bfloat16).to(device)
69
  clip = HFEmbedder("openai/clip-vit-large-patch14", max_length=77, torch_dtype=torch.bfloat16).to(device)
70
  ae = AutoencoderKL.from_pretrained("black-forest-labs/FLUX.1-dev", subfolder="vae", torch_dtype=torch.bfloat16).to(device)
71
- quantize(t5, weights=qfloat8)
72
- freeze(t5)
73
 
74
 
75
  # ---------------- NF4 ----------------
 
14
  import bitsandbytes as bnb
15
  from bitsandbytes.nn.modules import Params4bit, QuantState
16
 
 
17
  import torch
18
  import random
19
  from einops import rearrange, repeat
 
22
  from transformers import CLIPTextModel, CLIPTokenizer
23
  from transformers import T5EncoderModel, T5Tokenizer
24
  from safetensors.torch import load_file
25
+ # from optimum.quanto import freeze, qfloat8, quantize
26
 
27
 
28
  # ---------------- Encoders ----------------
 
67
  t5 = HFEmbedder("google/t5-v1_1-xxl", max_length=512, torch_dtype=torch.bfloat16).to(device)
68
  clip = HFEmbedder("openai/clip-vit-large-patch14", max_length=77, torch_dtype=torch.bfloat16).to(device)
69
  ae = AutoencoderKL.from_pretrained("black-forest-labs/FLUX.1-dev", subfolder="vae", torch_dtype=torch.bfloat16).to(device)
70
+ # quantize(t5, weights=qfloat8)
71
+ # freeze(t5)
72
 
73
 
74
  # ---------------- NF4 ----------------