diff --git a/.gitattributes b/.gitattributes index b9d7a63d6cd7246188e0c08efbc7383f495ffd85..b8b88912127cef44ca0e814eb2e231395e141762 100644 --- a/.gitattributes +++ b/.gitattributes @@ -22450,3 +22450,38 @@ train/stackexchange/stackexchange_0023-tokenized-chunked-1024-512-128-backfill-n train/stackexchange/stackexchange_0023-tokenized-chunked-1024-512-128-backfill-nodups/shard.00012.mds filter=lfs diff=lfs merge=lfs -text train/stackexchange/stackexchange_0023-tokenized-chunked-1024-512-128-backfill-nodups/shard.00011.mds filter=lfs diff=lfs merge=lfs -text train/stackexchange/stackexchange_0023-tokenized-chunked-1024-512-128-backfill-nodups/shard.00003.mds filter=lfs diff=lfs merge=lfs -text +train/stackexchange/stackexchange_0023-tokenized-chunked-1024-512-128-backfill-nodups/shard.00025.mds filter=lfs diff=lfs merge=lfs -text +train/stackexchange/stackexchange_0023-tokenized-chunked-1024-512-128-backfill-nodups/shard.00017.mds filter=lfs diff=lfs merge=lfs -text +train/stackexchange/stackexchange_0023-tokenized-chunked-1024-512-128-backfill-nodups/shard.00008.mds filter=lfs diff=lfs merge=lfs -text +train/stackexchange/stackexchange_0023-tokenized-chunked-1024-512-128-backfill-nodups/shard.00009.mds filter=lfs diff=lfs merge=lfs -text +train/stackexchange/stackexchange_0023-tokenized-chunked-1024-512-128-backfill-nodups/shard.00004.mds filter=lfs diff=lfs merge=lfs -text +train/stackexchange/stackexchange_0023-tokenized-chunked-1024-512-128-backfill-nodups/shard.00019.mds filter=lfs diff=lfs merge=lfs -text +train/stackexchange/stackexchange_0023-tokenized-chunked-1024-512-128-backfill-nodups/shard.00006.mds filter=lfs diff=lfs merge=lfs -text +train/stackexchange/stackexchange_0023-tokenized-chunked-1024-512-128-backfill-nodups/shard.00024.mds filter=lfs diff=lfs merge=lfs -text +train/stackexchange/stackexchange_0023-tokenized-chunked-1024-512-128-backfill-nodups/shard.00013.mds filter=lfs diff=lfs merge=lfs -text +train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00010.mds filter=lfs diff=lfs merge=lfs -text +train/stackexchange/stackexchange_0023-tokenized-chunked-1024-512-128-backfill-nodups/shard.00021.mds filter=lfs diff=lfs merge=lfs -text +train/stackexchange/stackexchange_0023-tokenized-chunked-1024-512-128-backfill-nodups/shard.00014.mds filter=lfs diff=lfs merge=lfs -text +train/stackexchange/stackexchange_0023-tokenized-chunked-1024-512-128-backfill-nodups/shard.00022.mds filter=lfs diff=lfs merge=lfs -text +train/stackexchange/stackexchange_0023-tokenized-chunked-1024-512-128-backfill-nodups/shard.00007.mds filter=lfs diff=lfs merge=lfs -text +train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00014.mds filter=lfs diff=lfs merge=lfs -text +train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00002.mds filter=lfs diff=lfs merge=lfs -text +train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00011.mds filter=lfs diff=lfs merge=lfs -text +train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00000.mds filter=lfs diff=lfs merge=lfs -text +train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00021.mds filter=lfs diff=lfs merge=lfs -text +train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00003.mds filter=lfs diff=lfs merge=lfs -text +train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00005.mds filter=lfs diff=lfs merge=lfs -text +train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00016.mds filter=lfs diff=lfs merge=lfs -text +train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00020.mds filter=lfs diff=lfs merge=lfs -text +train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00007.mds filter=lfs diff=lfs merge=lfs -text +train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00009.mds filter=lfs diff=lfs merge=lfs -text +train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00006.mds filter=lfs diff=lfs merge=lfs -text +train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00018.mds filter=lfs diff=lfs merge=lfs -text +train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00001.mds filter=lfs diff=lfs merge=lfs -text +train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00015.mds filter=lfs diff=lfs merge=lfs -text +train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00017.mds filter=lfs diff=lfs merge=lfs -text +train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00004.mds filter=lfs diff=lfs merge=lfs -text +train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00012.mds filter=lfs diff=lfs merge=lfs -text +train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00008.mds filter=lfs diff=lfs merge=lfs -text +train/stackexchange/stackexchange_0022-tokenized-chunked-1024-512-128-backfill-nodups/shard.00006.mds filter=lfs diff=lfs merge=lfs -text +train/stackexchange/stackexchange_0022-tokenized-chunked-1024-512-128-backfill-nodups/shard.00008.mds filter=lfs diff=lfs merge=lfs -text diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_13955-tokenized-chunked-1024-512-128-backfill-nodups/index.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_13955-tokenized-chunked-1024-512-128-backfill-nodups/index.json new file mode 100644 index 0000000000000000000000000000000000000000..9b91f9c5bc65754c93d47af72b1b95a8dc1bf9c9 --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_13955-tokenized-chunked-1024-512-128-backfill-nodups/index.json @@ -0,0 +1 @@ +{"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67107826, "hashes": {}}, "samples": 44432, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 48013811, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 10287912, "hashes": {}}, "samples": 6730, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 7273532, "hashes": {}}}], "version": 2} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_13955-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_13955-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json new file mode 100644 index 0000000000000000000000000000000000000000..fb0eaa7497b8db60c73795f01cdf8b15eb36264d --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_13955-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json @@ -0,0 +1,4 @@ +{ + "num_tokens": 37114503, + "num_truncated_tokens": 37085272 +} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_19094-tokenized-chunked-1024-512-128-backfill-nodups/index.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_19094-tokenized-chunked-1024-512-128-backfill-nodups/index.json new file mode 100644 index 0000000000000000000000000000000000000000..2124d62f462f4023ba079544e6c52dfedf0789ca --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_19094-tokenized-chunked-1024-512-128-backfill-nodups/index.json @@ -0,0 +1 @@ +{"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67108449, "hashes": {}}, "samples": 42888, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 47700736, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 21845070, "hashes": {}}, "samples": 13592, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 15415752, "hashes": {}}}], "version": 2} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_19094-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_19094-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json new file mode 100644 index 0000000000000000000000000000000000000000..e155ae98f2c937c809a5def9eea29f35415d2ccc --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_19094-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json @@ -0,0 +1,4 @@ +{ + "num_tokens": 42728432, + "num_truncated_tokens": 42691465 +} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_20505-tokenized-chunked-1024-512-128-backfill-nodups/index.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_20505-tokenized-chunked-1024-512-128-backfill-nodups/index.json new file mode 100644 index 0000000000000000000000000000000000000000..cf1b52a2affb0aa25b75f011a92590a2b6cd3dee --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_20505-tokenized-chunked-1024-512-128-backfill-nodups/index.json @@ -0,0 +1 @@ +{"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67108539, "hashes": {}}, "samples": 42730, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 47552382, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 20543933, "hashes": {}}, "samples": 13213, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 14598678, "hashes": {}}}], "version": 2} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_20505-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_20505-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json new file mode 100644 index 0000000000000000000000000000000000000000..0f1d7c7069dd22814ff42460d6b1296115ae04bb --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_20505-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json @@ -0,0 +1,4 @@ +{ + "num_tokens": 42094642, + "num_truncated_tokens": 42058495 +} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_21711-tokenized-chunked-1024-512-128-backfill-nodups/index.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_21711-tokenized-chunked-1024-512-128-backfill-nodups/index.json new file mode 100644 index 0000000000000000000000000000000000000000..057ee4479f879eda408db6bc3702ff1b80e2581c --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_21711-tokenized-chunked-1024-512-128-backfill-nodups/index.json @@ -0,0 +1 @@ +{"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67107951, "hashes": {}}, "samples": 43542, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 47494851, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 15733380, "hashes": {}}, "samples": 10144, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 11081712, "hashes": {}}}], "version": 2} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_21711-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_21711-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json new file mode 100644 index 0000000000000000000000000000000000000000..fd7147ac8808b6b0e3e2f3806b81cdc77ba5ebad --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_21711-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json @@ -0,0 +1,4 @@ +{ + "num_tokens": 39759138, + "num_truncated_tokens": 39726418 +} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_21740-tokenized-chunked-1024-512-128-backfill-nodups/index.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_21740-tokenized-chunked-1024-512-128-backfill-nodups/index.json new file mode 100644 index 0000000000000000000000000000000000000000..8f4e29534a123f9a51b78531417018dc3c110ec0 --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_21740-tokenized-chunked-1024-512-128-backfill-nodups/index.json @@ -0,0 +1 @@ +{"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67106891, "hashes": {}}, "samples": 43205, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 48040843, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 17028305, "hashes": {}}, "samples": 10898, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 12212078, "hashes": {}}}], "version": 2} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_21740-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_21740-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json new file mode 100644 index 0000000000000000000000000000000000000000..34b671ded2e0e5cc8241044bb1e750733466f776 --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_21740-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json @@ -0,0 +1,4 @@ +{ + "num_tokens": 40392857, + "num_truncated_tokens": 40359591 +} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_22400-tokenized-chunked-1024-512-128-backfill-nodups/index.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_22400-tokenized-chunked-1024-512-128-backfill-nodups/index.json new file mode 100644 index 0000000000000000000000000000000000000000..f944e931a1e4bc5b5ffccf33406d3b045e55f68c --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_22400-tokenized-chunked-1024-512-128-backfill-nodups/index.json @@ -0,0 +1 @@ +{"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67108176, "hashes": {}}, "samples": 44472, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 47776114, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 8944091, "hashes": {}}, "samples": 5816, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 6334839, "hashes": {}}}], "version": 2} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_22400-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_22400-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json new file mode 100644 index 0000000000000000000000000000000000000000..7849097867eff80986bfae68454b9f1848e6d65e --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_22400-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json @@ -0,0 +1,4 @@ +{ + "num_tokens": 36469791, + "num_truncated_tokens": 36442704 +} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_23872-tokenized-chunked-1024-512-128-backfill-nodups/index.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_23872-tokenized-chunked-1024-512-128-backfill-nodups/index.json new file mode 100644 index 0000000000000000000000000000000000000000..d193b9e5a22e1b120a1edc0bf3be22cd3d97f5b5 --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_23872-tokenized-chunked-1024-512-128-backfill-nodups/index.json @@ -0,0 +1 @@ +{"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67108244, "hashes": {}}, "samples": 45524, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 48095655, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 4618387, "hashes": {}}, "samples": 3110, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 3302576, "hashes": {}}}], "version": 2} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_23872-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_23872-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json new file mode 100644 index 0000000000000000000000000000000000000000..3b30883f3039941009f8365cf4407ddb9a5e3996 --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_23872-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json @@ -0,0 +1,4 @@ +{ + "num_tokens": 34358533, + "num_truncated_tokens": 34333243 +} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_25761-tokenized-chunked-1024-512-128-backfill-nodups/index.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_25761-tokenized-chunked-1024-512-128-backfill-nodups/index.json new file mode 100644 index 0000000000000000000000000000000000000000..b83e198588c8ee030dad2635d60c90dce80dc867 --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_25761-tokenized-chunked-1024-512-128-backfill-nodups/index.json @@ -0,0 +1 @@ +{"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67108415, "hashes": {}}, "samples": 44597, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 47887843, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 8663088, "hashes": {}}, "samples": 5888, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 6224893, "hashes": {}}}], "version": 2} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_25761-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_25761-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json new file mode 100644 index 0000000000000000000000000000000000000000..79e6faf99c41fa9fc4c9df8c354c5413a377c2aa --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_25761-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json @@ -0,0 +1,4 @@ +{ + "num_tokens": 36323449, + "num_truncated_tokens": 36295511 +} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_30044-tokenized-chunked-1024-512-128-backfill-nodups/index.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_30044-tokenized-chunked-1024-512-128-backfill-nodups/index.json new file mode 100644 index 0000000000000000000000000000000000000000..bd43a6d439efc719a5744e68189cb9b08eeca80c --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_30044-tokenized-chunked-1024-512-128-backfill-nodups/index.json @@ -0,0 +1 @@ +{"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67107864, "hashes": {}}, "samples": 43837, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 47542226, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 11574850, "hashes": {}}, "samples": 7564, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 8177377, "hashes": {}}}], "version": 2} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_30044-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_30044-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json new file mode 100644 index 0000000000000000000000000000000000000000..917cc476451a5c131942578e4a608d9e64eab280 --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_30044-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json @@ -0,0 +1,4 @@ +{ + "num_tokens": 37750349, + "num_truncated_tokens": 37722060 +} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_30636-tokenized-chunked-1024-512-128-backfill-nodups/index.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_30636-tokenized-chunked-1024-512-128-backfill-nodups/index.json new file mode 100644 index 0000000000000000000000000000000000000000..7d4ab7dfd00a81decdf2ec956446ca328d9ad923 --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_30636-tokenized-chunked-1024-512-128-backfill-nodups/index.json @@ -0,0 +1 @@ +{"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67107460, "hashes": {}}, "samples": 43064, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 47999864, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 17653886, "hashes": {}}, "samples": 11375, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 12646415, "hashes": {}}}], "version": 2} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_30636-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_30636-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json new file mode 100644 index 0000000000000000000000000000000000000000..ea9235e82c7beec394e3582531c044e6eb58af87 --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_30636-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json @@ -0,0 +1,4 @@ +{ + "num_tokens": 40695512, + "num_truncated_tokens": 40661533 +} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_32635-tokenized-chunked-1024-512-128-backfill-nodups/index.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_32635-tokenized-chunked-1024-512-128-backfill-nodups/index.json new file mode 100644 index 0000000000000000000000000000000000000000..8adec6361ad5ab14bf4835895eca2bb0309a51a2 --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_32635-tokenized-chunked-1024-512-128-backfill-nodups/index.json @@ -0,0 +1 @@ +{"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67107123, "hashes": {}}, "samples": 42472, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 47589658, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 22373004, "hashes": {}}, "samples": 14211, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 15901034, "hashes": {}}}], "version": 2} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_32635-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_32635-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json new file mode 100644 index 0000000000000000000000000000000000000000..d140ab05ac1617462d9d29fe2bb293cc4410d0b3 --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_32635-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json @@ -0,0 +1,4 @@ +{ + "num_tokens": 42985436, + "num_truncated_tokens": 42947973 +} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_34553-tokenized-chunked-1024-512-128-backfill-nodups/index.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_34553-tokenized-chunked-1024-512-128-backfill-nodups/index.json new file mode 100644 index 0000000000000000000000000000000000000000..ebaa600a985ddc718f90898c11c80a0f060d2b0c --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_34553-tokenized-chunked-1024-512-128-backfill-nodups/index.json @@ -0,0 +1 @@ +{"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67108137, "hashes": {}}, "samples": 43723, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 47531787, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 12472741, "hashes": {}}, "samples": 8192, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 8870637, "hashes": {}}}], "version": 2} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_34553-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_34553-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json new file mode 100644 index 0000000000000000000000000000000000000000..fe7d3e89e80380ce15701e603c1b5ea50e7793d5 --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_34553-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json @@ -0,0 +1,4 @@ +{ + "num_tokens": 38183620, + "num_truncated_tokens": 38153875 +} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_35318-tokenized-chunked-1024-512-128-backfill-nodups/index.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_35318-tokenized-chunked-1024-512-128-backfill-nodups/index.json new file mode 100644 index 0000000000000000000000000000000000000000..2c55744c7d2893a386964a65578d8032e8ae2b7d --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_35318-tokenized-chunked-1024-512-128-backfill-nodups/index.json @@ -0,0 +1 @@ +{"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67108620, "hashes": {}}, "samples": 43604, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 47886260, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 14890282, "hashes": {}}, "samples": 9598, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 10658421, "hashes": {}}}], "version": 2} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_35318-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_35318-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json new file mode 100644 index 0000000000000000000000000000000000000000..8b0f9ab2412602f4234c77ba095579a267af1e24 --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_35318-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json @@ -0,0 +1,4 @@ +{ + "num_tokens": 39352705, + "num_truncated_tokens": 39320352 +} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_37013-tokenized-chunked-1024-512-128-backfill-nodups/index.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_37013-tokenized-chunked-1024-512-128-backfill-nodups/index.json new file mode 100644 index 0000000000000000000000000000000000000000..250c4c5d331f3fd47d7ca15a0da07a6a352dc2eb --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_37013-tokenized-chunked-1024-512-128-backfill-nodups/index.json @@ -0,0 +1 @@ +{"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67108154, "hashes": {}}, "samples": 44232, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 47798229, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 11423111, "hashes": {}}, "samples": 7479, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 8083624, "hashes": {}}}], "version": 2} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_37013-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_37013-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json new file mode 100644 index 0000000000000000000000000000000000000000..630d99b2e8ecb07fb94a374a32117986e493fe90 --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_37013-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json @@ -0,0 +1,4 @@ +{ + "num_tokens": 37665338, + "num_truncated_tokens": 37636205 +} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_37675-tokenized-chunked-1024-512-128-backfill-nodups/index.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_37675-tokenized-chunked-1024-512-128-backfill-nodups/index.json new file mode 100644 index 0000000000000000000000000000000000000000..b0c8528d1252c89017fa9542a57b88eea3c24a4d --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_37675-tokenized-chunked-1024-512-128-backfill-nodups/index.json @@ -0,0 +1 @@ +{"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67108015, "hashes": {}}, "samples": 43441, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 47833394, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 15995222, "hashes": {}}, "samples": 10279, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 11457461, "hashes": {}}}], "version": 2} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_38934-tokenized-chunked-1024-512-128-backfill-nodups/index.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_38934-tokenized-chunked-1024-512-128-backfill-nodups/index.json new file mode 100644 index 0000000000000000000000000000000000000000..52b2aac5d5d732b966df9ceb52ff595fa7a1fcb0 --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_38934-tokenized-chunked-1024-512-128-backfill-nodups/index.json @@ -0,0 +1 @@ +{"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67107649, "hashes": {}}, "samples": 43349, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 47587621, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 16190896, "hashes": {}}, "samples": 10505, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 11520773, "hashes": {}}}], "version": 2} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_38934-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_38934-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json new file mode 100644 index 0000000000000000000000000000000000000000..1c21ee28cb3f5fc9c9d4d627d77de1ec8b09fcfc --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_38934-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json @@ -0,0 +1,4 @@ +{ + "num_tokens": 39982436, + "num_truncated_tokens": 39949472 +} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_42691-tokenized-chunked-1024-512-128-backfill-nodups/index.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_42691-tokenized-chunked-1024-512-128-backfill-nodups/index.json new file mode 100644 index 0000000000000000000000000000000000000000..3b1c0ca6b7f13a846d412f9e517e3e6adcf46797 --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_42691-tokenized-chunked-1024-512-128-backfill-nodups/index.json @@ -0,0 +1 @@ +{"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67108704, "hashes": {}}, "samples": 42734, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 47392247, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 20510727, "hashes": {}}, "samples": 13073, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 14492043, "hashes": {}}}], "version": 2} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_42691-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_42691-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json new file mode 100644 index 0000000000000000000000000000000000000000..726d7e7400734bbee3f4b03bf84d16b0d918311c --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_42691-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json @@ -0,0 +1,4 @@ +{ + "num_tokens": 42082248, + "num_truncated_tokens": 42045923 +} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_44875-tokenized-chunked-1024-512-128-backfill-nodups/index.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_44875-tokenized-chunked-1024-512-128-backfill-nodups/index.json new file mode 100644 index 0000000000000000000000000000000000000000..43007ff1f0204a6a130e665ae3bcc5001bfa17fd --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_44875-tokenized-chunked-1024-512-128-backfill-nodups/index.json @@ -0,0 +1 @@ +{"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67107848, "hashes": {}}, "samples": 44289, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 47831537, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 9945744, "hashes": {}}, "samples": 6674, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 7103988, "hashes": {}}}], "version": 2} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_44875-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_44875-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json new file mode 100644 index 0000000000000000000000000000000000000000..a629e807c29ca0f0fce918f737af733a12b974bd --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_44875-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json @@ -0,0 +1,4 @@ +{ + "num_tokens": 36949685, + "num_truncated_tokens": 36920899 +} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_48510-tokenized-chunked-1024-512-128-backfill-nodups/index.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_48510-tokenized-chunked-1024-512-128-backfill-nodups/index.json new file mode 100644 index 0000000000000000000000000000000000000000..63bae2817ebfd40a7acd44a1e9dfd939213ed673 --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_48510-tokenized-chunked-1024-512-128-backfill-nodups/index.json @@ -0,0 +1 @@ +{"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67107870, "hashes": {}}, "samples": 43778, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 47829266, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 12503510, "hashes": {}}, "samples": 8373, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 8950602, "hashes": {}}}], "version": 2} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_48510-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_48510-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json new file mode 100644 index 0000000000000000000000000000000000000000..b9ee7a1d4d048ae00f2fe3f9990844e83991042b --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_48510-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json @@ -0,0 +1,4 @@ +{ + "num_tokens": 38191618, + "num_truncated_tokens": 38161388 +} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_48688-tokenized-chunked-1024-512-128-backfill-nodups/index.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_48688-tokenized-chunked-1024-512-128-backfill-nodups/index.json new file mode 100644 index 0000000000000000000000000000000000000000..3ea38ba7ddf0aaa95944ce3950f0328c8544652c --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_48688-tokenized-chunked-1024-512-128-backfill-nodups/index.json @@ -0,0 +1 @@ +{"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67108653, "hashes": {}}, "samples": 42983, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 47783057, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 18127516, "hashes": {}}, "samples": 11786, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 12961290, "hashes": {}}}], "version": 2} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_48688-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_48688-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json new file mode 100644 index 0000000000000000000000000000000000000000..51452e23dd79a587f548dd4b9c2577f2b4bcf072 --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_48688-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json @@ -0,0 +1,4 @@ +{ + "num_tokens": 40922826, + "num_truncated_tokens": 40887378 +} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_49476-tokenized-chunked-1024-512-128-backfill-nodups/index.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_49476-tokenized-chunked-1024-512-128-backfill-nodups/index.json new file mode 100644 index 0000000000000000000000000000000000000000..0783acda22276b5df423e21c0860664ae8d47370 --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_49476-tokenized-chunked-1024-512-128-backfill-nodups/index.json @@ -0,0 +1 @@ +{"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67106787, "hashes": {}}, "samples": 44082, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 47504419, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 11398824, "hashes": {}}, "samples": 7439, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 8068072, "hashes": {}}}], "version": 2} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_49476-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_49476-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json new file mode 100644 index 0000000000000000000000000000000000000000..64c44b6dad7550f4d2d8ce75210c5d55db6ac142 --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_49476-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json @@ -0,0 +1,4 @@ +{ + "num_tokens": 37658200, + "num_truncated_tokens": 37629174 +} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_52470-tokenized-chunked-1024-512-128-backfill-nodups/index.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_52470-tokenized-chunked-1024-512-128-backfill-nodups/index.json new file mode 100644 index 0000000000000000000000000000000000000000..b1f65337bc44a3ded41dcb9da8f0d9c6e04c04f7 --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_52470-tokenized-chunked-1024-512-128-backfill-nodups/index.json @@ -0,0 +1 @@ +{"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67107452, "hashes": {}}, "samples": 42924, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 47762196, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 18735382, "hashes": {}}, "samples": 12152, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 13323050, "hashes": {}}}], "version": 2} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_52470-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_52470-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json new file mode 100644 index 0000000000000000000000000000000000000000..5b476406281112624b50223db6a18ca051aef456 --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_52470-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json @@ -0,0 +1,4 @@ +{ + "num_tokens": 41216669, + "num_truncated_tokens": 41181382 +} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_56355-tokenized-chunked-1024-512-128-backfill-nodups/index.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_56355-tokenized-chunked-1024-512-128-backfill-nodups/index.json new file mode 100644 index 0000000000000000000000000000000000000000..b4ba1d22c1f21614d822a0ae3075c627cabaf4dc --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_56355-tokenized-chunked-1024-512-128-backfill-nodups/index.json @@ -0,0 +1 @@ +{"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67108507, "hashes": {}}, "samples": 43764, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 47766045, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 14170916, "hashes": {}}, "samples": 9224, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 10063007, "hashes": {}}}], "version": 2} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_56355-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_56355-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json new file mode 100644 index 0000000000000000000000000000000000000000..66d164086b2e53ee1d247dfdd33675af8d3e7fca --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_56355-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json @@ -0,0 +1,4 @@ +{ + "num_tokens": 38999836, + "num_truncated_tokens": 38967882 +} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_66323-tokenized-chunked-1024-512-128-backfill-nodups/index.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_66323-tokenized-chunked-1024-512-128-backfill-nodups/index.json new file mode 100644 index 0000000000000000000000000000000000000000..f986bc3fe1f249eaa8857fd74a0657925a56d16d --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_66323-tokenized-chunked-1024-512-128-backfill-nodups/index.json @@ -0,0 +1 @@ +{"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67107898, "hashes": {}}, "samples": 44834, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 47828807, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 8342534, "hashes": {}}, "samples": 5436, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 5919651, "hashes": {}}}], "version": 2} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_66323-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_66323-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json new file mode 100644 index 0000000000000000000000000000000000000000..5b45157a66d3f5204333dab05f462d7e016f5c46 --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_66323-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json @@ -0,0 +1,4 @@ +{ + "num_tokens": 36169569, + "num_truncated_tokens": 36142997 +} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_67473-tokenized-chunked-1024-512-128-backfill-nodups/index.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_67473-tokenized-chunked-1024-512-128-backfill-nodups/index.json new file mode 100644 index 0000000000000000000000000000000000000000..6927f9dfe734d01309f3b7a19b0c71a01ca3cdc2 --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_67473-tokenized-chunked-1024-512-128-backfill-nodups/index.json @@ -0,0 +1 @@ +{"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67108312, "hashes": {}}, "samples": 43136, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 47895352, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 16468955, "hashes": {}}, "samples": 10768, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 11814741, "hashes": {}}}], "version": 2} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_67473-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_67473-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json new file mode 100644 index 0000000000000000000000000000000000000000..0609c375c91e65cd53c46763443635e286895219 --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_67473-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json @@ -0,0 +1,4 @@ +{ + "num_tokens": 40120008, + "num_truncated_tokens": 40086534 +} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_67675-tokenized-chunked-1024-512-128-backfill-nodups/index.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_67675-tokenized-chunked-1024-512-128-backfill-nodups/index.json new file mode 100644 index 0000000000000000000000000000000000000000..9937506bb647ac14f3f9540acdaf5a14a114371b --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_67675-tokenized-chunked-1024-512-128-backfill-nodups/index.json @@ -0,0 +1 @@ +{"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67108037, "hashes": {}}, "samples": 44069, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 47522085, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 10933773, "hashes": {}}, "samples": 7070, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 7757421, "hashes": {}}}], "version": 2} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_67675-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_67675-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json new file mode 100644 index 0000000000000000000000000000000000000000..f76c46d349817df6c94a6f8d61257c4541b9d37c --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_67675-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json @@ -0,0 +1,4 @@ +{ + "num_tokens": 37438075, + "num_truncated_tokens": 37410034 +} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_68746-tokenized-chunked-1024-512-128-backfill-nodups/index.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_68746-tokenized-chunked-1024-512-128-backfill-nodups/index.json new file mode 100644 index 0000000000000000000000000000000000000000..366e090cfbabc30835f7d23ca3889a93795c3e5d --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_68746-tokenized-chunked-1024-512-128-backfill-nodups/index.json @@ -0,0 +1 @@ +{"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67108232, "hashes": {}}, "samples": 44502, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 47830029, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 8150076, "hashes": {}}, "samples": 5584, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 5826706, "hashes": {}}}], "version": 2} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_68746-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_68746-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json new file mode 100644 index 0000000000000000000000000000000000000000..8619c399f13c5d32534e92114c75b24da8235dc2 --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_68746-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json @@ -0,0 +1,4 @@ +{ + "num_tokens": 36079204, + "num_truncated_tokens": 36052310 +} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_76120-tokenized-chunked-1024-512-128-backfill-nodups/index.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_76120-tokenized-chunked-1024-512-128-backfill-nodups/index.json new file mode 100644 index 0000000000000000000000000000000000000000..40178b1278ba6a29d9bc2c52187902201d00aef7 --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_76120-tokenized-chunked-1024-512-128-backfill-nodups/index.json @@ -0,0 +1 @@ +{"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67108725, "hashes": {}}, "samples": 44173, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 47833141, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 11900296, "hashes": {}}, "samples": 7782, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 8483128, "hashes": {}}}], "version": 2} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_76120-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_76120-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json new file mode 100644 index 0000000000000000000000000000000000000000..635ec77bb4414e12d43528ff55c1b83cba1ae3aa --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_76120-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json @@ -0,0 +1,4 @@ +{ + "num_tokens": 37896658, + "num_truncated_tokens": 37866466 +} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_83185-tokenized-chunked-1024-512-128-backfill-nodups/index.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_83185-tokenized-chunked-1024-512-128-backfill-nodups/index.json new file mode 100644 index 0000000000000000000000000000000000000000..ce22bde82fa890bc9eb641624ad9c5975cbc4916 --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_83185-tokenized-chunked-1024-512-128-backfill-nodups/index.json @@ -0,0 +1 @@ +{"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67107935, "hashes": {}}, "samples": 43973, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 47931090, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 12665081, "hashes": {}}, "samples": 8347, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 9042081, "hashes": {}}}], "version": 2} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_83185-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_83185-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json new file mode 100644 index 0000000000000000000000000000000000000000..41260b1c4db19804129a02bc2f61fe5b3befe1c1 --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_83185-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json @@ -0,0 +1,4 @@ +{ + "num_tokens": 38267329, + "num_truncated_tokens": 38236901 +} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_83544-tokenized-chunked-1024-512-128-backfill-nodups/index.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_83544-tokenized-chunked-1024-512-128-backfill-nodups/index.json new file mode 100644 index 0000000000000000000000000000000000000000..f66cb7a97c18f76dd93e7d0a369e06bbaac8ac25 --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_83544-tokenized-chunked-1024-512-128-backfill-nodups/index.json @@ -0,0 +1 @@ +{"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67107985, "hashes": {}}, "samples": 43463, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 48003813, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 15836650, "hashes": {}}, "samples": 10185, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 11296926, "hashes": {}}}], "version": 2} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_83544-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_83544-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json new file mode 100644 index 0000000000000000000000000000000000000000..eb99e34917922b3a6b2df8053f4b797da4704e3e --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_83544-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json @@ -0,0 +1,4 @@ +{ + "num_tokens": 39811696, + "num_truncated_tokens": 39778363 +} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_85514-tokenized-chunked-1024-512-128-backfill-nodups/index.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_85514-tokenized-chunked-1024-512-128-backfill-nodups/index.json new file mode 100644 index 0000000000000000000000000000000000000000..39fc05820386f5d58323d0b1e9fb0b42dcf905c1 --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_85514-tokenized-chunked-1024-512-128-backfill-nodups/index.json @@ -0,0 +1 @@ +{"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67107535, "hashes": {}}, "samples": 43559, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 47764742, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 17261987, "hashes": {}}, "samples": 10906, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 12249864, "hashes": {}}}], "version": 2} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_85514-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_85514-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json new file mode 100644 index 0000000000000000000000000000000000000000..d1bc51a1a5a775b0852834ddad128390790dc05e --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_85514-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json @@ -0,0 +1,4 @@ +{ + "num_tokens": 40499000, + "num_truncated_tokens": 40464820 +} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_87539-tokenized-chunked-1024-512-128-backfill-nodups/index.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_87539-tokenized-chunked-1024-512-128-backfill-nodups/index.json new file mode 100644 index 0000000000000000000000000000000000000000..0324c6b52c04f3c9c050e80c8e71c5dd2591ae49 --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_87539-tokenized-chunked-1024-512-128-backfill-nodups/index.json @@ -0,0 +1 @@ +{"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67108186, "hashes": {}}, "samples": 43089, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 47933616, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 16812706, "hashes": {}}, "samples": 10997, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 12038743, "hashes": {}}}], "version": 2} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_87539-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_87539-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json new file mode 100644 index 0000000000000000000000000000000000000000..6b4dc8d5c05a77981f79a9c900e6ad8e92518479 --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_87539-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json @@ -0,0 +1,4 @@ +{ + "num_tokens": 40286258, + "num_truncated_tokens": 40252773 +} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_88010-tokenized-chunked-1024-512-128-backfill-nodups/index.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_88010-tokenized-chunked-1024-512-128-backfill-nodups/index.json new file mode 100644 index 0000000000000000000000000000000000000000..bce9d96f9931691c96abcfe8cf12cc976a3cc427 --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_88010-tokenized-chunked-1024-512-128-backfill-nodups/index.json @@ -0,0 +1 @@ +{"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67108290, "hashes": {}}, "samples": 44503, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 47726144, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 8806681, "hashes": {}}, "samples": 5878, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 6274866, "hashes": {}}}], "version": 2} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_88010-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_88010-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json new file mode 100644 index 0000000000000000000000000000000000000000..405df1eca4a1b1e6a25f0a9e57b5ed3321e9bf6e --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_88010-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json @@ -0,0 +1,4 @@ +{ + "num_tokens": 36398287, + "num_truncated_tokens": 36370903 +} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_92459-tokenized-chunked-1024-512-128-backfill-nodups/index.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_92459-tokenized-chunked-1024-512-128-backfill-nodups/index.json new file mode 100644 index 0000000000000000000000000000000000000000..2fbeaa4ee0ce317ce91b8511fa37c4b1a67e6d11 --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_92459-tokenized-chunked-1024-512-128-backfill-nodups/index.json @@ -0,0 +1 @@ +{"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67108370, "hashes": {}}, "samples": 44498, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 48038022, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 9127992, "hashes": {}}, "samples": 6140, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 6522753, "hashes": {}}}], "version": 2} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_92459-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_92459-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json new file mode 100644 index 0000000000000000000000000000000000000000..28fa46c89f5263c97b9a14509325c473cfb49542 --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_92459-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json @@ -0,0 +1,4 @@ +{ + "num_tokens": 36551145, + "num_truncated_tokens": 36522609 +} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_94001-tokenized-chunked-1024-512-128-backfill-nodups/index.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_94001-tokenized-chunked-1024-512-128-backfill-nodups/index.json new file mode 100644 index 0000000000000000000000000000000000000000..46bf4d401c8d281779d0409f4737773b3b01e253 --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_94001-tokenized-chunked-1024-512-128-backfill-nodups/index.json @@ -0,0 +1 @@ +{"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67108790, "hashes": {}}, "samples": 43287, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 47753969, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 18539407, "hashes": {}}, "samples": 11751, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 13095474, "hashes": {}}}], "version": 2} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_94001-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_94001-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json new file mode 100644 index 0000000000000000000000000000000000000000..2f7106c9e52093611953e67f7dd42c1ea8a6da82 --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_94001-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json @@ -0,0 +1,4 @@ +{ + "num_tokens": 41120493, + "num_truncated_tokens": 41085515 +} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_95226-tokenized-chunked-1024-512-128-backfill-nodups/index.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_95226-tokenized-chunked-1024-512-128-backfill-nodups/index.json new file mode 100644 index 0000000000000000000000000000000000000000..926484458c5cca591382dc04bb9a392e84bfab1d --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_95226-tokenized-chunked-1024-512-128-backfill-nodups/index.json @@ -0,0 +1 @@ +{"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67108235, "hashes": {}}, "samples": 43828, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 47653140, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 13014724, "hashes": {}}, "samples": 8725, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 9283809, "hashes": {}}}], "version": 2} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_95226-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_95226-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json new file mode 100644 index 0000000000000000000000000000000000000000..c8c8ec8b66b08644ae86a657815d9667ae37e7ef --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_95226-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json @@ -0,0 +1,4 @@ +{ + "num_tokens": 38435053, + "num_truncated_tokens": 38403824 +} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_95382-tokenized-chunked-1024-512-128-backfill-nodups/index.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_95382-tokenized-chunked-1024-512-128-backfill-nodups/index.json new file mode 100644 index 0000000000000000000000000000000000000000..6b8e4a6afd48dd89d207ac8ffe364a9bbba634fe --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_95382-tokenized-chunked-1024-512-128-backfill-nodups/index.json @@ -0,0 +1 @@ +{"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67108445, "hashes": {}}, "samples": 43158, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 47611354, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 17004993, "hashes": {}}, "samples": 11132, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 12079749, "hashes": {}}}], "version": 2} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_95382-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_95382-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json new file mode 100644 index 0000000000000000000000000000000000000000..4a87f4eb740d190f0a6da2c0089e5bc966159bc6 --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_95382-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json @@ -0,0 +1,4 @@ +{ + "num_tokens": 40376350, + "num_truncated_tokens": 40342937 +} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_97253-tokenized-chunked-1024-512-128-backfill-nodups/index.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_97253-tokenized-chunked-1024-512-128-backfill-nodups/index.json new file mode 100644 index 0000000000000000000000000000000000000000..df04d6b4378a2c3aa01e9507718ce8c2e6f30d21 --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_97253-tokenized-chunked-1024-512-128-backfill-nodups/index.json @@ -0,0 +1 @@ +{"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67108099, "hashes": {}}, "samples": 44411, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 47560860, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 9237458, "hashes": {}}, "samples": 6046, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 6532773, "hashes": {}}}], "version": 2} \ No newline at end of file diff --git a/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_97253-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_97253-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json new file mode 100644 index 0000000000000000000000000000000000000000..850b5796d1188619c14844f46647f78c9ebc5335 --- /dev/null +++ b/train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_97253-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json @@ -0,0 +1,4 @@ +{ + "num_tokens": 36611138, + "num_truncated_tokens": 36584622 +} \ No newline at end of file diff --git a/train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00000.mds b/train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00000.mds new file mode 100644 index 0000000000000000000000000000000000000000..febc21d8fe195d5224af0b428c71ec9c6b36014a --- /dev/null +++ b/train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00000.mds @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:eda5c4b047956001edf011f761aec420c73d7c986ddade3e2a1986818b271af4 +size 67108535 diff --git a/train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00001.mds b/train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00001.mds new file mode 100644 index 0000000000000000000000000000000000000000..6686c60dd46e6257ca19bb0e5d330957311e80e5 --- /dev/null +++ b/train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00001.mds @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:89089d6ae07dc3fac3a424e1ad0d851336c5dce72fccc81c18174ae9ea2457a0 +size 67108425 diff --git a/train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00002.mds b/train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00002.mds new file mode 100644 index 0000000000000000000000000000000000000000..9a1ccb629e0a76fd0671b5a077419e89a73e99ae --- /dev/null +++ b/train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00002.mds @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:42a328e60b517ddc32759bf32310dcb55f6749b13cae7162175be2f002565663 +size 67107937 diff --git a/train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00003.mds b/train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00003.mds new file mode 100644 index 0000000000000000000000000000000000000000..5c939f85be58ef9e8cdc087ddbefda4452c2583b --- /dev/null +++ b/train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00003.mds @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:99e10c26bc63b2261c5218a28d444521f73996f34bfba807b116d614f4018a84 +size 67108347 diff --git a/train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00004.mds b/train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00004.mds new file mode 100644 index 0000000000000000000000000000000000000000..faa39299d5b47790bc580224170f3a8d38ba1748 --- /dev/null +++ b/train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00004.mds @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3bf3c0e856ae30a309e72239d72d167dc7106764ea4ea2f873d061d51265d044 +size 67107759 diff --git a/train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00005.mds b/train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00005.mds new file mode 100644 index 0000000000000000000000000000000000000000..1c5af15f77bee6291edf998c81b726caacb05d2a --- /dev/null +++ b/train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00005.mds @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f1c1b9d92c97b3a8df37e31ff70650529b47d53fcf9cf5be388020fd955cd25f +size 67107676 diff --git a/train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00006.mds b/train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00006.mds new file mode 100644 index 0000000000000000000000000000000000000000..7c404beb4bc4f41595e906d383a5d68ce4a43279 --- /dev/null +++ b/train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00006.mds @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0cf6b3d279d281f963f1634f992e16b084219e0c75459004ac591b812e108629 +size 67108157 diff --git a/train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00007.mds b/train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00007.mds new file mode 100644 index 0000000000000000000000000000000000000000..565336689d684465ad2c5968db60545135deb075 --- /dev/null +++ b/train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00007.mds @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bef5d84d34bd7b28401293e1dffbab6cc303a09234bdee5ec2fadf6b8217d8ae +size 67107465 diff --git a/train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00008.mds b/train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00008.mds new file mode 100644 index 0000000000000000000000000000000000000000..405ec945679e728723e5416b0745208c01369ea0 --- /dev/null +++ b/train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00008.mds @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:da2d0f06f4e78cc71cbf4d7cc16b404b3d89214e8b876ec6e12a2badedae0a96 +size 67108573 diff --git a/train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00009.mds b/train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00009.mds new file mode 100644 index 0000000000000000000000000000000000000000..a25f354bf73dba4e9c8ea93fd1efd589acfc6e83 --- /dev/null +++ b/train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00009.mds @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f5a03645acd46e1f7a30543a10cf6347f447dc7fa68eaf5bc255a5381b0b75e1 +size 67107588 diff --git a/train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00010.mds b/train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00010.mds new file mode 100644 index 0000000000000000000000000000000000000000..d5c7f218046154978a63c7102bd78839802e50d1 --- /dev/null +++ b/train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00010.mds @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:08168c52259590c6ba800bbae873e8f6dff229ab2a254edd0a24367c17b89537 +size 67108678 diff --git a/train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00011.mds b/train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00011.mds new file mode 100644 index 0000000000000000000000000000000000000000..043f7e468904854dba659525e41db29c782dae57 --- /dev/null +++ b/train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00011.mds @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:95f6099b18adab88763de467f1d1290860dca9978d492320fb5a9b14f47aced1 +size 67107468 diff --git a/train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00012.mds b/train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00012.mds new file mode 100644 index 0000000000000000000000000000000000000000..893d53bec7a309a2b25c867640d14c4ed135d868 --- /dev/null +++ b/train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00012.mds @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3d4f43fc9a2fa0bc882180c6ebca9671943908ec323acbb16b62e888de4eea77 +size 67107110 diff --git a/train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00014.mds b/train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00014.mds new file mode 100644 index 0000000000000000000000000000000000000000..a2b79adccca6ce4709757e13290e1eedd90a351b --- /dev/null +++ b/train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00014.mds @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d7974af769cd388afa605f6b02703cd11acfc100f61db714ad7db2e1615ad031 +size 67108621 diff --git a/train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00015.mds b/train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00015.mds new file mode 100644 index 0000000000000000000000000000000000000000..71c7439bc3ab3bf4adb1503bf6b3b41116734b44 --- /dev/null +++ b/train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00015.mds @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:271da9ccd6305f608b08ae3961c69ed2c162c7abaf7178fe7281bb08f756f957 +size 67107688 diff --git a/train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00016.mds b/train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00016.mds new file mode 100644 index 0000000000000000000000000000000000000000..7263543e002a3f265d5c0a39f3bcd8647b03a715 --- /dev/null +++ b/train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00016.mds @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ddd5ddbcfaca338a4ba5de27d92a9adfbfff8fa57185552c8b834f24816bc27a +size 67108660 diff --git a/train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00017.mds b/train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00017.mds new file mode 100644 index 0000000000000000000000000000000000000000..45020cb761dd9eee16494ae4df93cf291f6c065d --- /dev/null +++ b/train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00017.mds @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9048dab15592fd289a28ca2eccefcc21f44364ce32ce5c9fc7e9163b1136f6a3 +size 67108580 diff --git a/train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00018.mds b/train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00018.mds new file mode 100644 index 0000000000000000000000000000000000000000..fa3619a48f63ab849e6560052653bf4c3fcba1ba --- /dev/null +++ b/train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00018.mds @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:767a24a94b56a2f79beee755c3330e25de75bd6e03ca7b53ebc353b95546b302 +size 67107859 diff --git a/train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00020.mds b/train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00020.mds new file mode 100644 index 0000000000000000000000000000000000000000..10e1675fb1519c951615f2c9fc53a6dd62d7b7c7 --- /dev/null +++ b/train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00020.mds @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d1d071b168be380d1db67cde55cb95bc73a9b817ec0fdddac3e0d174d5f202d4 +size 67107154 diff --git a/train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00021.mds b/train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00021.mds new file mode 100644 index 0000000000000000000000000000000000000000..e8e5d2c27400fc502109bb88ccc818d70690784a --- /dev/null +++ b/train/stackexchange/stackexchange_0004-tokenized-chunked-1024-512-128-backfill-nodups/shard.00021.mds @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6319cb4de49fb492ccc7eac9762873802bea5c0283064e7c12c9ee86773408cc +size 10912902 diff --git a/train/stackexchange/stackexchange_0022-tokenized-chunked-1024-512-128-backfill-nodups/shard.00006.mds b/train/stackexchange/stackexchange_0022-tokenized-chunked-1024-512-128-backfill-nodups/shard.00006.mds new file mode 100644 index 0000000000000000000000000000000000000000..706d442a6f72cf5fd559ecba1d90890d90090d92 --- /dev/null +++ b/train/stackexchange/stackexchange_0022-tokenized-chunked-1024-512-128-backfill-nodups/shard.00006.mds @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e00739d48148d099cead5a5843d7e2c01babe8b1de9fea266789d34d0c2d80af +size 67108640 diff --git a/train/stackexchange/stackexchange_0022-tokenized-chunked-1024-512-128-backfill-nodups/shard.00008.mds b/train/stackexchange/stackexchange_0022-tokenized-chunked-1024-512-128-backfill-nodups/shard.00008.mds new file mode 100644 index 0000000000000000000000000000000000000000..0852f99767f5166e720a8398ac1baffa41457717 --- /dev/null +++ b/train/stackexchange/stackexchange_0022-tokenized-chunked-1024-512-128-backfill-nodups/shard.00008.mds @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ed0fa20228df460f5a2ed01f06bd241a99fc4d570b6ccc99bdbda04fa990447d +size 67108042 diff --git a/train/stackexchange/stackexchange_0023-tokenized-chunked-1024-512-128-backfill-nodups/shard.00004.mds b/train/stackexchange/stackexchange_0023-tokenized-chunked-1024-512-128-backfill-nodups/shard.00004.mds new file mode 100644 index 0000000000000000000000000000000000000000..0970bc48cddefa58a8d3bf6f513429331673628a --- /dev/null +++ b/train/stackexchange/stackexchange_0023-tokenized-chunked-1024-512-128-backfill-nodups/shard.00004.mds @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0a124db6fdeef34bac915eff24ccbccbd23acb2851e1f4ded769b4f9fe79d147 +size 67107630 diff --git a/train/stackexchange/stackexchange_0023-tokenized-chunked-1024-512-128-backfill-nodups/shard.00006.mds b/train/stackexchange/stackexchange_0023-tokenized-chunked-1024-512-128-backfill-nodups/shard.00006.mds new file mode 100644 index 0000000000000000000000000000000000000000..1ed00438fadc9bd5a022caa222f87ab8fdb0b5ce --- /dev/null +++ b/train/stackexchange/stackexchange_0023-tokenized-chunked-1024-512-128-backfill-nodups/shard.00006.mds @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:09f51e1b1f4bd2b040689b959bd333443803ae136779b9ec18852e1d6ba9fa83 +size 67108447 diff --git a/train/stackexchange/stackexchange_0023-tokenized-chunked-1024-512-128-backfill-nodups/shard.00007.mds b/train/stackexchange/stackexchange_0023-tokenized-chunked-1024-512-128-backfill-nodups/shard.00007.mds new file mode 100644 index 0000000000000000000000000000000000000000..6e1733b1e39ac29fec0e770c32148b32e5a85f09 --- /dev/null +++ b/train/stackexchange/stackexchange_0023-tokenized-chunked-1024-512-128-backfill-nodups/shard.00007.mds @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0e89eebbc206d95ebe409a64ff3ce74f52d9d139a82a53ab92e2dd332ecae423 +size 67108626 diff --git a/train/stackexchange/stackexchange_0023-tokenized-chunked-1024-512-128-backfill-nodups/shard.00008.mds b/train/stackexchange/stackexchange_0023-tokenized-chunked-1024-512-128-backfill-nodups/shard.00008.mds new file mode 100644 index 0000000000000000000000000000000000000000..2bf5eb6b3acc31b4253811d1334731ac13019163 --- /dev/null +++ b/train/stackexchange/stackexchange_0023-tokenized-chunked-1024-512-128-backfill-nodups/shard.00008.mds @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a993c3efb9d9f40b93ae59eac4377e2a780715b3eb22b5274dc9a52c0329fcb3 +size 67108861 diff --git a/train/stackexchange/stackexchange_0023-tokenized-chunked-1024-512-128-backfill-nodups/shard.00009.mds b/train/stackexchange/stackexchange_0023-tokenized-chunked-1024-512-128-backfill-nodups/shard.00009.mds new file mode 100644 index 0000000000000000000000000000000000000000..c9e925848673600af7840ed4f2cd4c43b294e63e --- /dev/null +++ b/train/stackexchange/stackexchange_0023-tokenized-chunked-1024-512-128-backfill-nodups/shard.00009.mds @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fc999f786f0530d0c1725d2871483e7f8cfbeb89a01fba130b5ad098393fcc11 +size 67108693 diff --git a/train/stackexchange/stackexchange_0023-tokenized-chunked-1024-512-128-backfill-nodups/shard.00013.mds b/train/stackexchange/stackexchange_0023-tokenized-chunked-1024-512-128-backfill-nodups/shard.00013.mds new file mode 100644 index 0000000000000000000000000000000000000000..2c5eee08cfbf262a97c242385e8d628d00850056 --- /dev/null +++ b/train/stackexchange/stackexchange_0023-tokenized-chunked-1024-512-128-backfill-nodups/shard.00013.mds @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:26191446ddfd266b5b94d8c593c332f4bdc888247e2677e0a50d23234326dc58 +size 67108823 diff --git a/train/stackexchange/stackexchange_0023-tokenized-chunked-1024-512-128-backfill-nodups/shard.00014.mds b/train/stackexchange/stackexchange_0023-tokenized-chunked-1024-512-128-backfill-nodups/shard.00014.mds new file mode 100644 index 0000000000000000000000000000000000000000..e0ccc813fc60c03a59fcf3d121a63cf59cddf9cd --- /dev/null +++ b/train/stackexchange/stackexchange_0023-tokenized-chunked-1024-512-128-backfill-nodups/shard.00014.mds @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6aee3875ff32d8a07a62a025b8f779a35bf9161c393e242754383c6969a2bf42 +size 67108429 diff --git a/train/stackexchange/stackexchange_0023-tokenized-chunked-1024-512-128-backfill-nodups/shard.00017.mds b/train/stackexchange/stackexchange_0023-tokenized-chunked-1024-512-128-backfill-nodups/shard.00017.mds new file mode 100644 index 0000000000000000000000000000000000000000..41e97a44a75fc2fd0f42ed106249525a139a0516 --- /dev/null +++ b/train/stackexchange/stackexchange_0023-tokenized-chunked-1024-512-128-backfill-nodups/shard.00017.mds @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c18811f91bbaded3e70c34e11e357c1c36b919033877ff718572b35ad3c8ddbe +size 67107985 diff --git a/train/stackexchange/stackexchange_0023-tokenized-chunked-1024-512-128-backfill-nodups/shard.00019.mds b/train/stackexchange/stackexchange_0023-tokenized-chunked-1024-512-128-backfill-nodups/shard.00019.mds new file mode 100644 index 0000000000000000000000000000000000000000..6ffb8192998a0fa1f4bc565a4ac23d07b9f7d65b --- /dev/null +++ b/train/stackexchange/stackexchange_0023-tokenized-chunked-1024-512-128-backfill-nodups/shard.00019.mds @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b7bf042f38dc0ca92cb1eaac7183b55dd13eff693e454f41e5d76dcee038b061 +size 67108539 diff --git a/train/stackexchange/stackexchange_0023-tokenized-chunked-1024-512-128-backfill-nodups/shard.00021.mds b/train/stackexchange/stackexchange_0023-tokenized-chunked-1024-512-128-backfill-nodups/shard.00021.mds new file mode 100644 index 0000000000000000000000000000000000000000..0a1c6dab596d2a081ede962c65e17492ed5dac17 --- /dev/null +++ b/train/stackexchange/stackexchange_0023-tokenized-chunked-1024-512-128-backfill-nodups/shard.00021.mds @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:19f69fe9213463d82cef65413a73580d201496c001162095a6a82300b12dd5fa +size 67108399 diff --git a/train/stackexchange/stackexchange_0023-tokenized-chunked-1024-512-128-backfill-nodups/shard.00022.mds b/train/stackexchange/stackexchange_0023-tokenized-chunked-1024-512-128-backfill-nodups/shard.00022.mds new file mode 100644 index 0000000000000000000000000000000000000000..a04d97f6e3b04ae5b2e1aa6b2fef61ad660cad39 --- /dev/null +++ b/train/stackexchange/stackexchange_0023-tokenized-chunked-1024-512-128-backfill-nodups/shard.00022.mds @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bb8e781e83f21f99dd385f95d1c3f3f9a29f0ba5d1b30954971ed62504017ee2 +size 67107216 diff --git a/train/stackexchange/stackexchange_0023-tokenized-chunked-1024-512-128-backfill-nodups/shard.00024.mds b/train/stackexchange/stackexchange_0023-tokenized-chunked-1024-512-128-backfill-nodups/shard.00024.mds new file mode 100644 index 0000000000000000000000000000000000000000..dc4729a4a6d4e10f2b9e91bd8cedc267456a2307 --- /dev/null +++ b/train/stackexchange/stackexchange_0023-tokenized-chunked-1024-512-128-backfill-nodups/shard.00024.mds @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a541df6241f2127e0d84507df7a5fd0e255fb39fa3b608b496c2b9243832a1f6 +size 67107969 diff --git a/train/stackexchange/stackexchange_0023-tokenized-chunked-1024-512-128-backfill-nodups/shard.00025.mds b/train/stackexchange/stackexchange_0023-tokenized-chunked-1024-512-128-backfill-nodups/shard.00025.mds new file mode 100644 index 0000000000000000000000000000000000000000..03ddedffbef20f719e34281a9ec06dd9e624cddc --- /dev/null +++ b/train/stackexchange/stackexchange_0023-tokenized-chunked-1024-512-128-backfill-nodups/shard.00025.mds @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:59db4c10be921c9ef6dfd05b2d43ada3d4c1c6ea5a952d3c89cfcafa850d646a +size 56250586