Spaces:
Sleeping
Sleeping
Commit
·
967d3f3
1
Parent(s):
215a9ba
Update app.py
Browse files
app.py
CHANGED
@@ -130,7 +130,7 @@ pipe.to(device=DEVICE)
|
|
130 |
from transformers import AutoProcessor, PaliGemmaForConditionalGeneration, BitsAndBytesConfig
|
131 |
|
132 |
quantization_config = BitsAndBytesConfig(load_in_4bit=True)
|
133 |
-
pali = PaliGemmaForConditionalGeneration.from_pretrained('google/paligemma-3b-pt-224', torch_dtype=dtype,).eval()
|
134 |
processor = AutoProcessor.from_pretrained('google/paligemma-3b-pt-224')
|
135 |
|
136 |
|
@@ -393,7 +393,7 @@ def start(_, calibrate_prompts, user_id, request: gr.Request):
|
|
393 |
im = torch.nn.functional.interpolate(im, (224, 224))
|
394 |
im = (im - .5) * 2
|
395 |
|
396 |
-
im_emb, gemb = encode_space(image, im
|
397 |
im_emb = im_emb.to('cpu')
|
398 |
gemb = gemb.to('cpu')
|
399 |
|
@@ -573,7 +573,7 @@ scheduler.start()
|
|
573 |
#thread.start()
|
574 |
|
575 |
# TODO shouldn't call this before gradio launch, yeah?
|
576 |
-
@spaces.GPU()
|
577 |
def encode_space(x, im):
|
578 |
with torch.no_grad():
|
579 |
print('encode')
|
|
|
130 |
from transformers import AutoProcessor, PaliGemmaForConditionalGeneration, BitsAndBytesConfig
|
131 |
|
132 |
quantization_config = BitsAndBytesConfig(load_in_4bit=True)
|
133 |
+
pali = PaliGemmaForConditionalGeneration.from_pretrained('google/paligemma-3b-pt-224', torch_dtype=dtype, device='cuda').eval()
|
134 |
processor = AutoProcessor.from_pretrained('google/paligemma-3b-pt-224')
|
135 |
|
136 |
|
|
|
393 |
im = torch.nn.functional.interpolate(im, (224, 224))
|
394 |
im = (im - .5) * 2
|
395 |
|
396 |
+
im_emb, gemb = encode_space(image, im)
|
397 |
im_emb = im_emb.to('cpu')
|
398 |
gemb = gemb.to('cpu')
|
399 |
|
|
|
573 |
#thread.start()
|
574 |
|
575 |
# TODO shouldn't call this before gradio launch, yeah?
|
576 |
+
@spaces.GPU(duration=20)
|
577 |
def encode_space(x, im):
|
578 |
with torch.no_grad():
|
579 |
print('encode')
|