idoco commited on
Commit
7d16fe6
·
1 Parent(s): de70fb3

Update scripts

Browse files
scripts/dataset_utils.py CHANGED
@@ -8,6 +8,8 @@ from urllib.parse import urlparse
8
  import requests
9
  import re
10
  from ast import literal_eval
 
 
11
  from tqdm import tqdm
12
  tqdm.pandas()
13
 
@@ -340,4 +342,32 @@ def build_prompts(base_df, triplets, templates):
340
  triplets = triplets[['type','subject','question_for_image','question','possible_answers', 'relation', 's_uri', 'r_uri','a_uri','attribute','a_type']]
341
  return triplets
342
 
343
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8
  import requests
9
  import re
10
  from ast import literal_eval
11
+ from PIL import Image
12
+ import math
13
  from tqdm import tqdm
14
  tqdm.pandas()
15
 
 
342
  triplets = triplets[['type','subject','question_for_image','question','possible_answers', 'relation', 's_uri', 'r_uri','a_uri','attribute','a_type']]
343
  return triplets
344
 
345
+
346
+ def resize_square(image, size=336, resample=Image.LANCZOS):
347
+ """
348
+ Resize an image to a square of the given size, first adding a black background if needed.
349
+ image: a Pillow image instance
350
+ size: an integer, the desired output size (width and height will be the same)
351
+ """
352
+ img_format = image.format
353
+ image = image.copy()
354
+
355
+ size = [size, size]
356
+ img_size = image.size
357
+ ratio = min(size[0] / img_size[0], size[1] / img_size[1])
358
+ new_size = [
359
+ int(math.ceil(img_size[0] * ratio)),
360
+ int(math.ceil(img_size[1] * ratio))
361
+ ]
362
+
363
+ image = image.resize((new_size[0], new_size[1]), resample)
364
+
365
+ # Make the image square by adding black padding
366
+ max_dim = max(image.size)
367
+ new_img = Image.new("RGB", (max_dim, max_dim), (0, 0, 0))
368
+ new_img.paste(image, ((max_dim - image.size[0]) // 2, (max_dim - image.size[1]) // 2))
369
+
370
+ # Resize to target size
371
+ # new_img = new_img.resize((size, size), resample)
372
+ new_img.format = img_format
373
+ return new_img
scripts/download_from_commons.py ADDED
@@ -0,0 +1,118 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pandas as pd
2
+ import requests
3
+ import os
4
+ import argparse
5
+ from tqdm import tqdm
6
+ from dataset_utils import resize_square
7
+ from PIL import Image
8
+ import cairosvg
9
+ from io import BytesIO
10
+ from dotenv import load_dotenv
11
+ import time
12
+ import random
13
+ load_dotenv()
14
+
15
+ DATA_DIR = os.environ['DATA_DIR']
16
+ original_dir = os.path.join(DATA_DIR, 'original_path')
17
+ resized_dir = os.path.join(DATA_DIR, 'resized_path')
18
+
19
+ def fetch_image(url, max_retries=5):
20
+ headers = {
21
+ 'User-Agent': random.choice([
22
+ 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36',
23
+ 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36',
24
+ 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36',
25
+ ])
26
+ }
27
+
28
+ for attempt in range(max_retries):
29
+ try:
30
+ response = requests.get(url, headers=headers, stream=True)
31
+ if response.status_code == 429: # Too Many Requests
32
+ retry_after = int(response.headers.get("Retry-After", random.uniform(1, 5)))
33
+ time.sleep(retry_after)
34
+ continue
35
+ response.raise_for_status()
36
+ return response.content
37
+ except requests.exceptions.RequestException as e:
38
+ wait_time = 2 ** attempt + random.uniform(0, 1) # Exponential backoff
39
+ print(f"Error fetching {url}: {e}, retrying in {wait_time:.2f}s")
40
+ time.sleep(wait_time)
41
+
42
+ print(f"Failed to fetch {url} after {max_retries} attempts.")
43
+ return None
44
+
45
+ def main(args):
46
+ df = pd.read_csv(args.csv)
47
+ df['original_path'] = ''
48
+ df['resized_path'] = ''
49
+
50
+ errors = 0
51
+ count = 0
52
+
53
+ for index, row in tqdm(df.iterrows(), total=len(df)):
54
+ image_url = row['image']
55
+ subject = row['subject'].replace(' ', '_').replace('/', '_')
56
+ type_original_dir = os.path.join(original_dir, row['type'])
57
+ type_resized_dir = os.path.join(resized_dir, row['type'])
58
+ os.makedirs(type_original_dir, exist_ok=True)
59
+ os.makedirs(type_resized_dir, exist_ok=True)
60
+
61
+ image_data = fetch_image(image_url)
62
+ if image_data:
63
+ try:
64
+ if image_url.endswith('.svg'):
65
+ png_bytes = cairosvg.svg2png(bytestring=image_data)
66
+ image = Image.open(BytesIO(png_bytes)).convert("RGBA")
67
+ else:
68
+ image = Image.open(BytesIO(image_data))
69
+
70
+ if row['type'] == 'brands':
71
+ image = image.convert("RGBA")
72
+ white_bg = Image.new("RGB", image.size, (255, 255, 255))
73
+ white_bg.paste(image, mask=image.split()[3])
74
+ image = white_bg
75
+
76
+ first_letter = subject[0].lower()
77
+ os.makedirs(os.path.join(type_original_dir, first_letter), exist_ok=True)
78
+ original_filepath = os.path.join(type_original_dir, first_letter, f'{subject}.jpg')
79
+ image.save(original_filepath, "JPEG", quality=95)
80
+
81
+ im = Image.open(original_filepath)
82
+ im = resize_square(im)
83
+
84
+ os.makedirs(os.path.join(type_resized_dir, first_letter), exist_ok=True)
85
+ resized_filepath = os.path.join(type_resized_dir, first_letter, f'{subject}.jpg')
86
+ im.save(resized_filepath, im.format)
87
+
88
+ df.at[index, 'original_path'] = original_filepath if os.path.exists(original_filepath) else ''
89
+ df.at[index, 'resized_path'] = resized_filepath if os.path.exists(resized_filepath) else ''
90
+
91
+ except Exception as e:
92
+ errors += 1
93
+ print(Exception(f"Failed to download image {subject}: {str(e)}"))
94
+ continue
95
+ else:
96
+ errors += 1
97
+ continue
98
+ count += 1
99
+
100
+ df = df[df['original_path'] != '']
101
+ df = df[df['resized_path'] != '']
102
+ df.to_csv(args.target_df, index=False)
103
+
104
+ print(f'Finished downloading {count} images with {errors} errors')
105
+
106
+
107
+ def get_exp_parser():
108
+ parser = argparse.ArgumentParser(add_help=False)
109
+ parser.add_argument('--base_df', type=str)
110
+ parser.add_argument('--target_df', type=str)
111
+ parser.add_argument('--width', type=int, default=336)
112
+ return parser
113
+
114
+
115
+ if __name__ == "__main__":
116
+ parser = get_exp_parser()
117
+ args = parser.parse_args()
118
+ main(args)