Spaces:
Runtime error
Runtime error
added truncation to clip tokenizer. TODO: only use the question, not the context.
Browse files- clip_for_ppts.py +1 -1
clip_for_ppts.py
CHANGED
@@ -74,7 +74,7 @@ class ClipImage:
|
|
74 |
|
75 |
elif self.mode == 'text':
|
76 |
# Preprocess the text
|
77 |
-
input_arr = torch.cat([clip.tokenize(f"{input_text_or_img}")]).to(self.device)
|
78 |
|
79 |
# Encode the image or text
|
80 |
with torch.no_grad():
|
|
|
74 |
|
75 |
elif self.mode == 'text':
|
76 |
# Preprocess the text
|
77 |
+
input_arr = torch.cat([clip.tokenize(f"{input_text_or_img}", truncate=True)]).to(self.device)
|
78 |
|
79 |
# Encode the image or text
|
80 |
with torch.no_grad():
|