Spaces:
Runtime error
Runtime error
Commit
•
f981967
1
Parent(s):
4dcc0c1
Update app.py
Browse files
app.py
CHANGED
@@ -31,10 +31,10 @@ model = Blip2ForConditionalGeneration.from_pretrained(
|
|
31 |
"Salesforce/blip2-opt-2.7b", device_map={"": 0}, torch_dtype=torch.float16
|
32 |
)
|
33 |
#Run first captioning as apparently makes the other ones faster
|
34 |
-
pil_image = Image.new('RGB', (512, 512), 'black')
|
35 |
-
blip_inputs = processor(images=pil_image, return_tensors="pt").to(device, torch.float16)
|
36 |
-
generated_ids = model.generate(**blip_inputs)
|
37 |
-
generated_text = processor.batch_decode(generated_ids, skip_special_tokens=True)[0].strip()
|
38 |
|
39 |
def load_captioning(uploaded_images, option):
|
40 |
updates = []
|
|
|
31 |
"Salesforce/blip2-opt-2.7b", device_map={"": 0}, torch_dtype=torch.float16
|
32 |
)
|
33 |
#Run first captioning as apparently makes the other ones faster
|
34 |
+
#pil_image = Image.new('RGB', (512, 512), 'black')
|
35 |
+
#blip_inputs = processor(images=pil_image, return_tensors="pt").to(device, torch.float16)
|
36 |
+
#generated_ids = model.generate(**blip_inputs)
|
37 |
+
#generated_text = processor.batch_decode(generated_ids, skip_special_tokens=True)[0].strip()
|
38 |
|
39 |
def load_captioning(uploaded_images, option):
|
40 |
updates = []
|