Spaces:
Runtime error
Runtime error
File size: 531 Bytes
ac1c6ae |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 |
#import torch
#import clip
#class CLIPImageEncoder:
# def __init__(self, device="cpu"):
# self.device = device
# self.model, self.preprocess = clip.load("ViT-B/16", device=device)
#
# def encode_image(self, image_pil):
# print("Encoding image with CLIP")
# with torch.no_grad():
# image_preprocessed = self.preprocess(image_pil).unsqueeze(0).to(self.device)
# image_features = self.model.encode_image(image_preprocessed)
# return image_features.cpu().numpy()[0]
|