RobotJelly commited on
Commit
1ad57bd
·
1 Parent(s): 40a7087
Files changed (1) hide show
  1. app.py +1 -3
app.py CHANGED
@@ -17,8 +17,6 @@ model = CLIPModel.from_pretrained("openai/clip-vit-base-patch32")
17
  processor = CLIPProcessor.from_pretrained("openai/clip-vit-base-patch32")
18
  tokenizer = CLIPTokenizer.from_pretrained("openai/clip-vit-base-patch32")
19
 
20
- model = model.to(device)
21
-
22
  # taking photo IDs
23
  photo_ids = pd.read_csv("./photo_ids.csv")
24
  photo_ids = list(photo_ids['photo_id'])
@@ -44,7 +42,7 @@ def encode_search_query(search_query, model):
44
  #inputs = tokenizer([search_query], padding=True, return_tensors="pt")
45
  inputs = processor(text=[search_query], images=None, return_tensors="pt", padding=True)
46
  text_features = model.get_text_features(**inputs).detach().numpy()
47
- return np.array(text_features)
48
 
49
  # Find all matched photos
50
  def find_matches(text_features, photo_features, photo_ids, results_count=4):
 
17
  processor = CLIPProcessor.from_pretrained("openai/clip-vit-base-patch32")
18
  tokenizer = CLIPTokenizer.from_pretrained("openai/clip-vit-base-patch32")
19
 
 
 
20
  # taking photo IDs
21
  photo_ids = pd.read_csv("./photo_ids.csv")
22
  photo_ids = list(photo_ids['photo_id'])
 
42
  #inputs = tokenizer([search_query], padding=True, return_tensors="pt")
43
  inputs = processor(text=[search_query], images=None, return_tensors="pt", padding=True)
44
  text_features = model.get_text_features(**inputs).detach().numpy()
45
+ return text_features
46
 
47
  # Find all matched photos
48
  def find_matches(text_features, photo_features, photo_ids, results_count=4):