GBI-16-2D / GBI-16-2D.py
anonuser7251's picture
Upload folder using huggingface_hub
f8c5348 verified
raw
history blame contribute delete
No virus
5.92 kB
import os
import random
from glob import glob
import json
import numpy as np
from astropy.io import fits
from astropy.coordinates import Angle
from astropy import units as u
from fsspec.core import url_to_fs
from huggingface_hub import hf_hub_download
import datasets
from datasets import DownloadManager
from utils import read_lris
_DESCRIPTION = (
"""SBI-16-2D is a dataset which is part of the AstroCompress project. """
"""It contains data assembled from the Keck Telescope. """
"""<TODO>Describe data format</TODO>"""
)
_HOMEPAGE = "https://google.github.io/AstroCompress"
_LICENSE = "CC BY 4.0"
_URL = "https://huggingface.co/datasets/AstroCompress/GBI-16-2D/resolve/main/"
_URLS = {
"tiny": {
"train": "./splits/tiny_train.jsonl",
"test": "./splits/tiny_test.jsonl",
},
"full": {
"train": "./splits/full_train.jsonl",
"test": "./splits/full_test.jsonl",
},
}
_REPO_ID = "AstroCompress/GBI-16-2D"
class GBI_16_2D(datasets.GeneratorBasedBuilder):
"""GBI-16-2D Dataset"""
VERSION = datasets.Version("1.0.1")
BUILDER_CONFIGS = [
datasets.BuilderConfig(
name="tiny",
version=VERSION,
description="A small subset of the data, to test downsteam workflows.",
),
datasets.BuilderConfig(
name="full",
version=VERSION,
description="The full dataset",
),
]
DEFAULT_CONFIG_NAME = "tiny"
def __init__(self, **kwargs):
super().__init__(version=self.VERSION, **kwargs)
def _info(self):
return datasets.DatasetInfo(
description=_DESCRIPTION,
features=datasets.Features(
{
"image": datasets.Image(decode=True, mode="I;16"),
"ra": datasets.Value("float64"),
"dec": datasets.Value("float64"),
"pixscale": datasets.Value("float64"),
"image_id": datasets.Value("string"),
"rotation_angle": datasets.Value("float64"),
"dim_1": datasets.Value("int64"),
"dim_2": datasets.Value("int64"),
"exposure_time": datasets.Value("float64"),
}
),
supervised_keys=None,
homepage=_HOMEPAGE,
license=_LICENSE,
citation="TBD",
)
def _split_generators(self, dl_manager: DownloadManager):
ret = []
base_path = dl_manager._base_path
locally_run = not base_path.startswith(datasets.config.HF_ENDPOINT)
_, path = url_to_fs(base_path)
for split in ["train", "test"]:
if locally_run:
split_file_location = os.path.normpath(
os.path.join(path, _URLS[self.config.name][split])
)
split_file = dl_manager.download_and_extract(split_file_location)
else:
split_file = hf_hub_download(
repo_id=_REPO_ID,
filename=_URLS[self.config.name][split],
repo_type="dataset",
)
with open(split_file, encoding="utf-8") as f:
data_filenames = []
data_metadata = []
for line in f:
item = json.loads(line)
data_filenames.append(item["image"])
data_metadata.append(
{
"ra": item["ra"],
"dec": item["dec"],
"pixscale": item["pixscale"],
"image_id": item["image_id"],
"rotation_angle": item["rotation_angle"],
"dim_1": item["dim_1"],
"dim_2": item["dim_2"],
"exposure_time": item["exposure_time"],
}
)
if locally_run:
data_urls = [
os.path.normpath(os.path.join(path, data_filename))
for data_filename in data_filenames
]
data_files = [
dl_manager.download(data_url) for data_url in data_urls
]
else:
data_urls = data_filenames
data_files = [
hf_hub_download(
repo_id=_REPO_ID, filename=data_url, repo_type="dataset"
)
for data_url in data_urls
]
ret.append(
datasets.SplitGenerator(
name=(
datasets.Split.TRAIN
if split == "train"
else datasets.Split.TEST
),
gen_kwargs={
"filepaths": data_files,
"split_file": split_file,
"split": split,
"data_metadata": data_metadata,
},
),
)
return ret
def _generate_examples(self, filepaths, split_file, split, data_metadata):
"""Generate GBI-16-2D examples"""
for idx, (filepath, item) in enumerate(zip(filepaths, data_metadata)):
task_instance_key = f"{self.config.name}-{split}-{idx}"
with fits.open(filepath, memmap=False) as hdul:
if len(hdul) > 1:
# multiextension ... paste together the amplifiers
data, _ = read_lris(filepath)
else:
data = hdul[0].data
image_data = data[:, :]
yield task_instance_key, {**{"image": image_data}, **item}