File size: 3,662 Bytes
a462809
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
import os
import random
from glob import glob
import json
from huggingface_hub import hf_hub_download
from tqdm import tqdm
import numpy as np

from astropy.io import fits
from astropy.wcs import WCS
import datasets
from datasets import DownloadManager
from fsspec.core import url_to_fs


def get_fits_footprint(fits_path):
    """
    Process a FITS file to extract WCS information and calculate the footprint.

    Parameters:
    fits_path (str): Path to the FITS file.

    Returns:
    tuple: A tuple containing the WCS footprint coordinates.
    """
    with fits.open(fits_path) as hdul:
        hdul[1].data = hdul[1].data[0, 0]
        wcs = WCS(hdul[1].header)
        shape = sorted(tuple(wcs.pixel_shape))[:2]
        footprint = wcs.calc_footprint(axes=shape)
        coords = list(footprint.flatten())
    return coords


def calculate_pixel_scale(header):
    """
    Calculate the pixel scale in arcseconds per pixel from a FITS header.

    Parameters:
    header (astropy.io.fits.header.Header): The FITS header containing WCS information.

    Returns:
    Mean of the pixel scales in x and y.
    """
    
    # Calculate the pixel scales in arcseconds per pixel
    pixscale_x = header.get('CDELT1', np.nan)
    pixscale_y = header.get('CDELT2', np.nan)
    
    return np.mean([pixscale_x, pixscale_y])                


def make_split_jsonl_files(config_type="tiny", data_dir="./data", 
                        outdir="./splits", seed=42):
    """
    Create jsonl files for the SBI-16-3D dataset.

    config_type: str, default="tiny"
        The type of split to create. Options are "tiny" and "full".
    data_dir: str, default="./data"
        The directory where the FITS files are located.
    outdir: str, default="./splits"
        The directory where the jsonl files will be created.
    seed: int, default=42
        The seed for the random split.
    """
    random.seed(seed)
    os.makedirs(outdir, exist_ok=True)

    fits_files = glob(os.path.join(data_dir, "*.fits"))
    random.shuffle(fits_files)
    if config_type == "tiny":
        train_files = fits_files[:2]
        test_files = fits_files[2:3]
    elif config_type == "full":
        split_idx = int(0.8 * len(fits_files))
        train_files = fits_files[:split_idx]
        test_files = fits_files[split_idx:]
    else:
        raise ValueError("Unsupported config_type. Use 'tiny' or 'full'.")

    def create_jsonl(files, split_name):
        output_file = os.path.join(outdir, f"{config_type}_{split_name}.jsonl")
        with open(output_file, "w") as out_f:
            for file in tqdm(files):
                #print(file, flush=True, end="...")
                with fits.open(file, memmap=False) as hdul:
                    image_id = os.path.basename(file).split(".fits")[0]
                    ra = hdul["SCI"].header.get('CRVAL1', 0)
                    dec = hdul["SCI"].header.get('CRVAL2', 0)
                    pixscale = calculate_pixel_scale(hdul["SCI"].header)
                    footprint = get_fits_footprint(file)
                    read_pattern = hdul[0].header.get('READPATT', 0)
                    # get the number of groups per int
                    ntimes = hdul["SCI"].data.shape[1]
                    item = {"image_id": image_id, "image": file, "ra": ra, "dec": dec, 
                            "pixscale": pixscale, "ntimes": ntimes, "read_pattern": read_pattern, "footprint": footprint}
                    out_f.write(json.dumps(item) + "\n")

    create_jsonl(train_files, "train")
    create_jsonl(test_files, "test")

    
if __name__ == "__main__":
    make_split_jsonl_files("tiny")
    make_split_jsonl_files("full")