File size: 1,455 Bytes
6a28ae4 d63713d 81894b1 8326c43 81894b1 3eb2415 d63713d 3eb2415 8326c43 3eb2415 8326c43 3eb2415 8326c43 6a28ae4 81894b1 3eb2415 8326c43 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 |
from typing import List, Union
from urllib.request import Request, urlopen
import numpy as np
from fashion_clip.fashion_clip import FashionCLIP
from PIL import Image
class PreTrainedPipeline:
def __init__(self, path=""):
self.model = FashionCLIP("fashion-clip")
def _download_image(self, url) -> Image:
user_agent = "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.9.0.7) Gecko/2009021910 Firefox/3.0.7"
headers = {"User-Agent": user_agent}
request = Request(url, None, headers)
image = Image.open(urlopen(request))
return image
def process(self, inputs: Union[str, List[str]]) -> List[float]:
if isinstance(inputs, str):
inputs = [inputs]
images = [self._download_image(url) for url in set(inputs)]
# Encode the image to generate the embedding
embeddings = self.model.encode_images(images, batch_size=1)
# Add all the numpy embeddings together and take the average
embedding = np.divide(np.sum(embeddings, axis=0), len(embeddings)).tolist()
return embedding
def __call__(self, inputs: Union[str, List[str]]) -> List[float]:
"""
Args:
inputs (:obj:`str`):
a string to get the features from.
Return:
A :obj:`list` of floats: The features computed by the model.
"""
embedding = self.process(inputs=inputs)
return embedding
|