ahmedheakl commited on
Commit
df1db28
·
verified ·
1 Parent(s): 9be0b9d

Create down_r1onevision.py

Browse files
Files changed (1) hide show
  1. down_r1onevision.py +64 -0
down_r1onevision.py ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import datasets
2
+ from tqdm import tqdm
3
+ import os
4
+ import json
5
+ import multiprocessing as mp
6
+ from functools import partial
7
+
8
+ def process_item(idx_data, dataset_name):
9
+ idx, d = idx_data
10
+ conversations = d['conversations']
11
+ image = d['image']
12
+ image_path = f"{dataset_name}/{idx}.png"
13
+ os.makedirs(os.path.dirname(f"data/{image_path}"), exist_ok=True)
14
+ image.save(f"data/{image_path}")
15
+ for i, c in enumerate(conversations):
16
+ conversations[i]['content'] = c['content'].replace("<image>", "")
17
+ if len(conversations) > 1:
18
+ conversations[1]['content'] = "<image>" + conversations[1]['content']
19
+ return {
20
+ "images": [image_path],
21
+ "system": SYSTEM_PROMPT,
22
+ "conversations": conversations
23
+ }
24
+
25
+ ds_id = "ahmedheakl/r1_90k_instruct"
26
+ dataset_name = "r1_onevision_90k"
27
+ out_root = "data"
28
+ SYSTEM_PROMPT = (
29
+ "A conversation between User and Assistant. The user asks a question, and the Assistant solves it. The assistant "
30
+ "first thinks about the reasoning process in the mind and then provides the user with the answer. The reasoning "
31
+ "process and answer are enclosed within <think> </think> and <answer> </answer> tags, respectively, i.e., "
32
+ "<think> reasoning process here </think><answer> answer here </answer>"
33
+ )
34
+ os.makedirs(f"{out_root}/{dataset_name}", exist_ok=True)
35
+ print("Loading dataset...")
36
+ ds = datasets.load_dataset(ds_id, split="train", trust_remote_code=True)
37
+ num_processes = mp.cpu_count() - 6
38
+ print(f"Using {num_processes} processes")
39
+ process_func = partial(process_item, dataset_name=dataset_name)
40
+ with mp.Pool(processes=num_processes) as pool:
41
+ data = list(tqdm(pool.imap(process_func, enumerate(ds)), total=len(ds), desc="Processing items"))
42
+ output_path = f"{out_root}/{dataset_name}.json"
43
+ print(f"Saving dataset to {output_path}")
44
+ with open(output_path, "w") as f:
45
+ json.dump(data, f, indent=4, ensure_ascii=False)
46
+ with open(f"{out_root}/dataset_info.json", "r") as f:
47
+ dataset_info = json.load(f)
48
+ dataset_info[dataset_name] = {
49
+ "file_name": f"{dataset_name}.json",
50
+ "formatting": "sharegpt",
51
+ "columns": {
52
+ "messages": "conversations",
53
+ "images": "images",
54
+ "system": "system"
55
+ },
56
+ "tags": {
57
+ "role_tag": "role",
58
+ "content_tag": "content",
59
+ "user_tag": "user",
60
+ "assistant_tag": "assistant"
61
+ }
62
+ }
63
+ with open(f"{out_root}/dataset_info.json", "w") as f:
64
+ json.dump(dataset_info, f, indent=4, ensure_ascii=False)