Update src/pipeline.py
Browse files- src/pipeline.py +6 -6
src/pipeline.py
CHANGED
@@ -629,12 +629,12 @@ def load_pipeline() -> Pipeline:
|
|
629 |
|
630 |
dtype, device = torch.bfloat16, "cuda"
|
631 |
|
632 |
-
import pdb; pdb.set_trace()
|
633 |
-
t5_path = os.path.join(HF_HUB_CACHE, "models--manbeast3b--t5-v1_1-xxl-encoder-q8/snapshots/59c6c9cb99dcea42067f32caac3ea0836ef4c548/t5-v1_1-xxl-encoder-Q8_0.gguf")
|
634 |
-
# config_path = os.path.join(HF_HUB_CACHE, "models--black-forest--labs/FLUX.1-schnell/snapshots/741f7c3ce8b383c54771c7003378a50191e9efe9/text_encoder_2/config.json")
|
635 |
-
config_path = os.path.join(HF_HUB_CACHE, "models--black-forest-labs--FLUX.1-schnell/snapshots/741f7c3ce8b383c54771c7003378a50191e9efe9/")
|
636 |
-
ckpt_t5 = load_single_file_checkpoint(t5_path,local_files_only=True)
|
637 |
-
print("the file is loaded")
|
638 |
|
639 |
text_encoder_2 = T5EncoderModel.from_pretrained(
|
640 |
"silentdriver/aadb864af9", revision = "060dabc7fa271c26dfa3fd43c16e7c5bf3ac7892", torch_dtype=torch.bfloat16
|
|
|
629 |
|
630 |
dtype, device = torch.bfloat16, "cuda"
|
631 |
|
632 |
+
# import pdb; pdb.set_trace()
|
633 |
+
# t5_path = os.path.join(HF_HUB_CACHE, "models--manbeast3b--t5-v1_1-xxl-encoder-q8/snapshots/59c6c9cb99dcea42067f32caac3ea0836ef4c548/t5-v1_1-xxl-encoder-Q8_0.gguf")
|
634 |
+
# # config_path = os.path.join(HF_HUB_CACHE, "models--black-forest--labs/FLUX.1-schnell/snapshots/741f7c3ce8b383c54771c7003378a50191e9efe9/text_encoder_2/config.json")
|
635 |
+
# config_path = os.path.join(HF_HUB_CACHE, "models--black-forest-labs--FLUX.1-schnell/snapshots/741f7c3ce8b383c54771c7003378a50191e9efe9/")
|
636 |
+
# ckpt_t5 = load_single_file_checkpoint(t5_path,local_files_only=True)
|
637 |
+
# print("the file is loaded")
|
638 |
|
639 |
text_encoder_2 = T5EncoderModel.from_pretrained(
|
640 |
"silentdriver/aadb864af9", revision = "060dabc7fa271c26dfa3fd43c16e7c5bf3ac7892", torch_dtype=torch.bfloat16
|