|
""" |
|
Runs several baseline compression algorithms and stores results for each FITS file in a csv. |
|
This code is written functionality-only and cleaning it up is a TODO. |
|
""" |
|
|
|
|
|
import os |
|
import re |
|
from pathlib import Path |
|
import argparse |
|
import os.path |
|
from astropy.io import fits |
|
import numpy as np |
|
from time import time |
|
import pandas as pd |
|
from tqdm import tqdm |
|
|
|
from astropy.io.fits import CompImageHDU |
|
from imagecodecs import ( |
|
jpeg2k_encode, |
|
jpeg2k_decode, |
|
jpegls_encode, |
|
jpegls_decode, |
|
jpegxl_encode, |
|
jpegxl_decode, |
|
rcomp_encode, |
|
rcomp_decode, |
|
) |
|
|
|
|
|
|
|
jpegxl_encode_max_effort_preset = lambda x: jpegxl_encode(x, lossless=True, effort=9) |
|
jpegxl_encode_preset = lambda x: jpegxl_encode(x, lossless=True) |
|
|
|
def find_matching_files(): |
|
""" |
|
Returns list of test set file paths. |
|
""" |
|
df = pd.read_json("./splits/full_test.jsonl", lines=True) |
|
return list(df['image']) |
|
|
|
def benchmark_imagecodecs_compression_algos(arr, compression_type): |
|
|
|
encoder, decoder = ALL_CODECS[compression_type] |
|
|
|
write_start_time = time() |
|
encoded = encoder(arr) |
|
write_time = time() - write_start_time |
|
|
|
read_start_time = time() |
|
if compression_type == "RICE": |
|
decoded = decoder(encoded, shape=arr.shape, dtype=np.uint16) |
|
else: |
|
decoded = decoder(encoded) |
|
read_time = time() - read_start_time |
|
|
|
assert np.array_equal(arr, decoded) |
|
|
|
buflength = len(encoded) |
|
|
|
return {compression_type + "_BPD": buflength / arr.size, |
|
compression_type + "_WRITE_RUNTIME": write_time, |
|
compression_type + "_READ_RUNTIME": read_time, |
|
|
|
} |
|
|
|
def main(dim): |
|
|
|
save_path = f"baseline_results_{dim}.csv" |
|
|
|
file_paths = find_matching_files() |
|
|
|
if os.path.isfile(save_path): |
|
df = pd.read_csv(save_path) |
|
else: |
|
pass |
|
|
|
|
|
print(f"Number of files to be tested: {len(file_paths)}") |
|
|
|
ct = 0 |
|
|
|
|
|
|
|
for path in tqdm(file_paths): |
|
for hdu_idx in [1, 4]: |
|
df_idx = path + f"_hdu{hdu_idx}" |
|
if df_idx in df[df.columns[0]].values: |
|
continue |
|
with fits.open(path) as hdul: |
|
if dim == '2d': |
|
arr = hdul[hdu_idx].data |
|
else: |
|
raise RuntimeError(f"{dim} not applicable.") |
|
|
|
ct += 1 |
|
if ct % 10 == 0: |
|
|
|
df.to_csv(save_path) |
|
|
|
for algo in ALL_CODECS.keys(): |
|
try: |
|
if algo == "JPEG_2K" and dim != '2d': |
|
test_results = benchmark_imagecodecs_compression_algos(arr.transpose(1, 2, 0), algo) |
|
else: |
|
test_results = benchmark_imagecodecs_compression_algos(arr, algo) |
|
|
|
for column, value in test_results.items(): |
|
if column in df.columns: |
|
df.at[df_idx, column] = value |
|
|
|
except Exception as e: |
|
print(f"Failed at {path} under exception {e}.") |
|
|
|
df.to_csv(save_path) |
|
|
|
if __name__ == "__main__": |
|
parser = argparse.ArgumentParser(description="Process some 2D or 3D data.") |
|
parser.add_argument( |
|
"dimension", |
|
choices=['2d'], |
|
help="Specify whether the data is 2d, or; not applicable here: 3dt (3d time dimension), or 3dw (3d wavelength dimension)." |
|
) |
|
args = parser.parse_args() |
|
dim = args.dimension.lower() |
|
|
|
|
|
|
|
ALL_CODECS = { |
|
"JPEG_XL_MAX_EFFORT": [jpegxl_encode_max_effort_preset, jpegxl_decode], |
|
"JPEG_XL": [jpegxl_encode_preset, jpegxl_decode], |
|
"JPEG_2K": [jpeg2k_encode, jpeg2k_decode], |
|
"JPEG_LS": [jpegls_encode, jpegls_decode], |
|
"RICE": [rcomp_encode, rcomp_decode], |
|
} |
|
|
|
columns = [] |
|
for algo in ALL_CODECS.keys(): |
|
columns.append(algo + "_BPD") |
|
columns.append(algo + "_WRITE_RUNTIME") |
|
columns.append(algo + "_READ_RUNTIME") |
|
|
|
|
|
main(dim) |