Spaces:
Runtime error
Runtime error
Samuel Schmidt
commited on
Commit
·
cfe24db
1
Parent(s):
564548c
Trying out torch.grad for speeding up
Browse files- src/CLIP.py +5 -3
src/CLIP.py
CHANGED
@@ -1,4 +1,5 @@
|
|
1 |
from transformers import AutoProcessor, CLIPModel
|
|
|
2 |
|
3 |
|
4 |
class CLIPImageEncoder:
|
@@ -8,6 +9,7 @@ class CLIPImageEncoder:
|
|
8 |
self.processor = AutoProcessor.from_pretrained("openai/clip-vit-base-patch32")
|
9 |
|
10 |
def encode_image(self, image_pil):
|
11 |
-
|
12 |
-
|
13 |
-
|
|
|
|
1 |
from transformers import AutoProcessor, CLIPModel
|
2 |
+
import torch
|
3 |
|
4 |
|
5 |
class CLIPImageEncoder:
|
|
|
9 |
self.processor = AutoProcessor.from_pretrained("openai/clip-vit-base-patch32")
|
10 |
|
11 |
def encode_image(self, image_pil):
|
12 |
+
with torch.no_grad():
|
13 |
+
input = self.processor(images=image_pil, return_tensors="pt")
|
14 |
+
image_features = self.model.get_image_features(**input)
|
15 |
+
return image_features.cpu().detach().numpy()[0]
|