reduce batch size
Browse files
script.py
CHANGED
@@ -66,7 +66,7 @@ def generate_embeddings(metadata_file_path, root_dir):
|
|
66 |
metadata_df, local_filepath=root_dir, transform=transforms
|
67 |
)
|
68 |
|
69 |
-
loader = DataLoader(test_dataset, batch_size=
|
70 |
|
71 |
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
|
72 |
model = timm.create_model("timm/vit_large_patch14_reg4_dinov2.lvd142m", pretrained=True)
|
|
|
66 |
metadata_df, local_filepath=root_dir, transform=transforms
|
67 |
)
|
68 |
|
69 |
+
loader = DataLoader(test_dataset, batch_size=1, shuffle=False, num_workers=4)
|
70 |
|
71 |
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
|
72 |
model = timm.create_model("timm/vit_large_patch14_reg4_dinov2.lvd142m", pretrained=True)
|