PyTorch
clip
CLIP
LLM2CLIP
custom_code
Gengzigang commited on
Commit
1b9deef
1 Parent(s): aec4766
Files changed (1) hide show
  1. README.md +1 -1
README.md CHANGED
@@ -37,7 +37,7 @@ import torch
37
 
38
  image_path = "CLIP.png"
39
  model_name_or_path = "LLM2CLIP-Openai-L-14-336" # or /path/to/local/LLM2CLIP-Openai-L-14-336
40
- image_size = 224
41
 
42
  processor = CLIPImageProcessor.from_pretrained("openai/clip-vit-large-patch14-336")
43
  model = AutoModel.from_pretrained(
 
37
 
38
  image_path = "CLIP.png"
39
  model_name_or_path = "LLM2CLIP-Openai-L-14-336" # or /path/to/local/LLM2CLIP-Openai-L-14-336
40
+ image_size =336
41
 
42
  processor = CLIPImageProcessor.from_pretrained("openai/clip-vit-large-patch14-336")
43
  model = AutoModel.from_pretrained(