|
from datatrove.executor import LocalPipelineExecutor |
|
from datatrove.pipeline.readers import ParquetReader |
|
from datatrove.utils.hashing import HashConfig |
|
from custom_minhash import ( |
|
CustomMinhashConfig, |
|
CustomMinhashDedupSignature, |
|
CustomMinhashDedupBuckets, |
|
CustomMinhashDedupCluster, |
|
CustomMinhashDedupFilter, |
|
) |
|
from datatrove.pipeline.readers import JsonlReader |
|
from datatrove.pipeline.tokens import TokensCounter |
|
from datatrove.pipeline.writers.jsonl import JsonlWriter |
|
from datatrove.utils.hashing import HashConfig |
|
|
|
import argparse |
|
from glob import glob |
|
|
|
custom_minhash_config = CustomMinhashConfig( |
|
hash_config=HashConfig(precision=32, hash_fc='sha1'), |
|
) |
|
|
|
if __name__ == '__main__': |
|
parser = argparse.ArgumentParser(description="Demo script with argparse") |
|
parser.add_argument("--sub-folder", type=str, required=True, help="Subfolder path") |
|
parser.add_argument("--offset", type=int, default=0, required=False, help="task offset") |
|
|
|
|
|
args = parser.parse_args() |
|
print(args) |
|
|
|
MINHASH_BASE_PATH = "minhash" |
|
|
|
LOGS_FOLDER = "minhash_logs" |
|
|
|
n_file = len(glob(f"/gpfs/public/research/liyizhi/huggingface/datasets/fineweb-edu-score-2/data/{args.sub_folder}/*.parquet")) |
|
TOTAL_TASKS = n_file |
|
print(f"Total files in {args.sub_folder}: {n_file}") |
|
|
|
INPUT_READER = ParquetReader( |
|
"/gpfs/public/research/liyizhi/huggingface/datasets/fineweb-edu-score-2", |
|
glob_pattern=f"data/{args.sub_folder}/*.parquet", |
|
batch_size=100_000) |
|
|
|
|
|
|
|
|
|
stage1 = LocalPipelineExecutor( |
|
|
|
pipeline=[ |
|
INPUT_READER, |
|
CustomMinhashDedupSignature( |
|
output_folder=f"{MINHASH_BASE_PATH}/signatures", |
|
config=custom_minhash_config, |
|
naming_prefix=args.sub_folder, |
|
), |
|
], |
|
tasks=TOTAL_TASKS, |
|
|
|
|
|
logging_dir=f"{LOGS_FOLDER}/signatures", |
|
|
|
) |
|
|
|
|
|
stage2 = LocalPipelineExecutor( |
|
|
|
pipeline=[ |
|
CustomMinhashDedupBuckets( |
|
input_folder=f"{MINHASH_BASE_PATH}/signatures", |
|
output_folder=f"{MINHASH_BASE_PATH}/buckets", |
|
config=custom_minhash_config, |
|
), |
|
], |
|
tasks=custom_minhash_config.num_buckets, |
|
local_tasks=1, |
|
local_rank_offset=args.offset, |
|
|
|
|
|
logging_dir=f"{LOGS_FOLDER}/buckets", |
|
|
|
) |
|
|
|
|
|
stage3 = LocalPipelineExecutor( |
|
|
|
pipeline=[ |
|
CustomMinhashDedupCluster( |
|
input_folder=f"{MINHASH_BASE_PATH}/buckets", |
|
output_folder=f"{MINHASH_BASE_PATH}/remove_ids", |
|
config=custom_minhash_config, |
|
), |
|
], |
|
|
|
|
|
|
|
logging_dir=f"{LOGS_FOLDER}/clusters", |
|
|
|
|
|
|
|
|
|
) |
|
|
|
|
|
|
|
stage4 = LocalPipelineExecutor( |
|
|
|
pipeline=[ |
|
INPUT_READER, |
|
TokensCounter(), |
|
CustomMinhashDedupFilter( |
|
remove_id_input_folder=f"{MINHASH_BASE_PATH}/remove_ids", |
|
sig_input_folder=f"{MINHASH_BASE_PATH}/signatures", |
|
exclusion_writer=JsonlWriter(f"{MINHASH_BASE_PATH}/removed"), |
|
config=custom_minhash_config, |
|
naming_prefix=args.sub_folder, |
|
), |
|
], |
|
tasks=TOTAL_TASKS, |
|
|
|
|
|
logging_dir=f"{LOGS_FOLDER}/filter", |
|
depends=stage3, |
|
|
|
) |
|
|
|
stage3.run() |
|
|