EN-SLAM-Dataset / scripts /compress_dataset.py
DelinQu
???? upload all tar.gz data
bfbecd6
import os
# import tarfile
from pathlib import Path
import argparse
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument("--input", type=str, default=".", help="Path to the folder to compress")
arg_parser.add_argument("--output", type=str, default="EN-SLAM-HF", help="Output Path to the folder to compress")
args = arg_parser.parse_args()
def compress_and_split(path: Path, max_size_gb=2):
max_size_bytes = max_size_gb * 1024 * 1024 * 1024
tar_file = f"{args.output}/{path.parent.parent.name}_{path.parent.name}_{path.stem}.tar.gz"
print(f"compressing ... {path}")
# with tarfile.open(tar_file, "w:gz") as tar: tar.add(path)
os.system(f"tar -czvf {tar_file} {path}")
print(f"DONE!")
if os.path.getsize(tar_file) > max_size_bytes:
print(f"{tar_file} > {max_size_gb}GB, spliting ...")
split_tar(tar_file, max_size_bytes)
os.remove(tar_file)
print(f"FNISHED!")
def split_tar(tar_file, max_size_bytes):
part_num = 1
with open(tar_file, "rb") as f:
while chunk := f.read(max_size_bytes):
part_file = f"{tar_file}.part{part_num:03d}"
with open(part_file, "wb") as part:
part.write(chunk)
print(f"Generate chunks: {part_file}")
part_num += 1
def main():
base_dir = Path(args.input)
for path in base_dir.iterdir():
print(f"Processing ... {path}")
# if path.is_dir() and path.name == "depth":
compress_and_split(path)
if __name__ == "__main__":
main()