parquet-converter commited on
Commit
7e92f83
1 Parent(s): eb22643

Update parquet files

Browse files
kanji.tar.gz → default/kanjis2radicals-train.parquet RENAMED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:acfae37c5f4473e4a8b4f45c73960457eaea4c7272e835b80b4fa8578ad78097
3
- size 4624662
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a858c504fcd4cb98f78916069fc805d134b7c3cead6b3f45c8ed49b402fbefd8
3
+ size 5028071
kanji_metadata.jsonl DELETED
The diff for this file is too large to render. See raw diff
 
kanjis2radicals.py DELETED
@@ -1,66 +0,0 @@
1
- import datasets
2
- import json
3
-
4
- _DESCRIPTION = """\
5
- Contains Kanji images with corresponding radicals ids from WaniKani or https://api.robanohashi.org/docs/index.html
6
- """
7
-
8
- _METADATA_URL = "https://huggingface.co/datasets/martingrzzler/kanjis2radicals/raw/main/kanji_metadata.jsonl"
9
- _IMAGES_URL = "https://huggingface.co/datasets/martingrzzler/kanjis2radicals/resolve/main/kanji.tar.gz"
10
-
11
-
12
- class Kanji2Radicals(datasets.GeneratorBasedBuilder):
13
- """Kanji to radicals dataset."""
14
-
15
- def _info(self):
16
- return datasets.DatasetInfo(
17
- description=_DESCRIPTION,
18
- features=datasets.Features(
19
- {
20
- "kanji_image": datasets.Image(),
21
- "meta": {
22
- "id": datasets.Value("int32"),
23
- "characters": datasets.Value("string"),
24
- "meanings": datasets.Value("string"),
25
- "radicals": datasets.Sequence(
26
- {
27
- "characters": datasets.Value("string"),
28
- "id": datasets.Value("int32"),
29
- "slug": datasets.Value("string"),
30
- }
31
- ),
32
- },
33
- }
34
- ),
35
- supervised_keys=None,
36
- homepage="https://robanohashi.org/",
37
- )
38
-
39
- def _split_generators(self, dl_manager):
40
- metadata_path = dl_manager.download(_METADATA_URL)
41
- images_path = dl_manager.download(_IMAGES_URL)
42
- images_iter = dl_manager.iter_archive(images_path)
43
-
44
- return [
45
- datasets.SplitGenerator(
46
- name=datasets.Split.TRAIN,
47
- gen_kwargs={
48
- "metadata_path": metadata_path,
49
- "images_iter": images_iter,
50
- },
51
- ),
52
- ]
53
-
54
- def _generate_examples(self, metadata_path, images_iter):
55
- radicals = []
56
- with open(metadata_path, encoding="utf-8") as f:
57
- for line in f:
58
- metadata = json.loads(line)
59
- radicals.append(metadata)
60
-
61
- for idx, (image_path, image) in enumerate(images_iter):
62
- yield image_path, {
63
- "meta": radicals[idx],
64
- "kanji_image": image.read(),
65
- }
66
-