|
""" |
|
long: |
|
|
|
{ |
|
"document": "", |
|
"question": "", |
|
"long_answer_candidates": ["", "", ""], |
|
"long_answer_candidate_index": 0 |
|
} |
|
|
|
short: |
|
|
|
{ |
|
"document": "", |
|
"question": "", |
|
"short_answer": "" |
|
} |
|
|
|
either: |
|
|
|
{ |
|
"document": "", |
|
"question": "", |
|
"answer": "" |
|
} |
|
|
|
""" |
|
|
|
|
|
import sys |
|
import jsonlines |
|
from datasets import load_dataset |
|
from huggingface_hub import HfApi |
|
|
|
|
|
def filter(raw, short_path, long_path, either_path): |
|
fps = open(short_path, "a") |
|
writers = jsonlines.Writer(fps) |
|
|
|
fpl = open(long_path, "a") |
|
writerl = jsonlines.Writer(fpl) |
|
|
|
fpe = open(either_path, "a") |
|
writere = jsonlines.Writer(fpe) |
|
|
|
count = 0 |
|
long = [] |
|
short = [] |
|
either = [] |
|
for sample in raw: |
|
try: |
|
answer = "" |
|
|
|
if sample["short_answers"][0]: |
|
answer = sample["short_answers"][0] |
|
|
|
short.append({ |
|
"document": sample["document"], |
|
"question": sample["question"], |
|
"short_answer": answer |
|
}) |
|
|
|
if sample["long_answer_candidate_index"] != -1: |
|
answer = sample["long_answer_candidates"][sample["long_answer_candidate_index"]] |
|
|
|
long.append({ |
|
"document": sample["document"], |
|
"question": sample["question"], |
|
"long_answer_candidates": sample["long_answer_candidates"], |
|
"long_answer_candidate_index": sample["long_answer_candidate_index"] |
|
}) |
|
|
|
if answer: |
|
count += 1 |
|
|
|
either.append({ |
|
"document": sample["document"], |
|
"question": sample["question"], |
|
"answer": answer |
|
}) |
|
|
|
except Exception as ex: |
|
|
|
print("Exception: " + str(ex)) |
|
|
|
if (count + 1) % 1000 == 0: |
|
writere.write_all(either) |
|
either = [] |
|
|
|
if short: |
|
writers.write_all(short) |
|
short = [] |
|
|
|
if long: |
|
writerl.write_all(long) |
|
long = [] |
|
|
|
print("Done: " + str(count), end="\r") |
|
|
|
|
|
if either: |
|
writere.write_all(either) |
|
either = [] |
|
|
|
if short: |
|
writers.write_all(short) |
|
short = [] |
|
|
|
if long: |
|
writerl.write_all(long) |
|
long = [] |
|
|
|
writere.close() |
|
fpe.close() |
|
|
|
writers.close() |
|
fps.close() |
|
|
|
writerl.close() |
|
fpl.close() |
|
|
|
|
|
if __name__ == "__main__": |
|
if len(sys.argv) < 1: |
|
raise AttributeError("Missing required argument: repository id") |
|
|
|
repo = sys.argv[1] |
|
|
|
api = HfApi() |
|
|
|
train_data = load_dataset(repo, split="train", streaming=True) |
|
filter(raw=train_data, short_path="data/short/train.jsonl", long_path="data/long/train.jsonl", either_path="data/either/train.jsonl") |
|
|
|
val_data = load_dataset(repo, split="validation", streaming=True) |
|
filter(raw=val_data, short_path="data/short/validation.jsonl", long_path="data/long/validation.jsonl", either_path="data/either/validation.jsonl") |
|
|
|
api.upload_folder( |
|
folder_path="data/", |
|
repo_id=repo, |
|
repo_type="dataset", |
|
multi_commits=True, |
|
multi_commits_verbose=True |
|
) |
|
|