|
|
import xml.etree.ElementTree as ET |
|
|
from pathlib import Path |
|
|
from PIL import Image |
|
|
import pandas as pd |
|
|
from datasets import Dataset, Features, Image as HFImage, Value |
|
|
from huggingface_hub import HfApi |
|
|
|
|
|
|
|
|
def parse_kanjidic(xml_file_path): |
|
|
""" |
|
|
Parses the kanjidic2.xml file to extract kanji literals and their |
|
|
English meanings. |
|
|
|
|
|
Args: |
|
|
xml_file_path (str or Path): Path to the kanjidic2.xml file. |
|
|
|
|
|
Returns: |
|
|
dict: A dictionary mapping each kanji character to a list of its |
|
|
English meanings. |
|
|
""" |
|
|
print(f"Parsing {xml_file_path}...") |
|
|
try: |
|
|
tree = ET.parse(xml_file_path) |
|
|
root = tree.getroot() |
|
|
except (ET.ParseError, FileNotFoundError) as e: |
|
|
print(f"Error reading or parsing XML file: {e}") |
|
|
return {} |
|
|
|
|
|
kanji_meanings = {} |
|
|
|
|
|
for char_element in root.findall('character'): |
|
|
literal = char_element.find('literal').text |
|
|
meanings = [] |
|
|
|
|
|
|
|
|
rmgroup = char_element.find('reading_meaning/rmgroup') |
|
|
if rmgroup is not None: |
|
|
|
|
|
for meaning_element in rmgroup.findall('meaning'): |
|
|
if 'm_lang' not in meaning_element.attrib: |
|
|
meanings.append(meaning_element.text) |
|
|
|
|
|
if literal and meanings: |
|
|
kanji_meanings[literal] = meanings |
|
|
|
|
|
print(f"Successfully parsed {len(kanji_meanings)} kanji characters.") |
|
|
return kanji_meanings |
|
|
|
|
|
def create_kanji_dataset(image_dir, kanji_data): |
|
|
""" |
|
|
Creates a Hugging Face Dataset from a directory of kanji images and |
|
|
parsed kanji meanings. |
|
|
|
|
|
Args: |
|
|
image_dir (str or Path): Directory containing the rendered kanji images. |
|
|
kanji_data (dict): Dictionary of kanji meanings from parse_kanjidic. |
|
|
|
|
|
Returns: |
|
|
datasets.Dataset: The final Hugging Face Dataset object. |
|
|
""" |
|
|
image_dir = Path(image_dir) |
|
|
records = [] |
|
|
|
|
|
print(f"Processing images from {image_dir.resolve()}...") |
|
|
image_paths = list(image_dir.glob('*.png')) |
|
|
if not image_paths: |
|
|
print("No PNG images found in the specified directory.") |
|
|
return None |
|
|
|
|
|
for img_path in image_paths: |
|
|
|
|
|
hex_codepoint = img_path.stem.split('U+')[-1] |
|
|
try: |
|
|
|
|
|
kanji_char = chr(int(hex_codepoint, 16)) |
|
|
except ValueError: |
|
|
print(f"Skipping invalid filename: {img_path.name}") |
|
|
continue |
|
|
|
|
|
if kanji_char in kanji_data: |
|
|
|
|
|
text_description = ", ".join(kanji_data[kanji_char]) |
|
|
records.append({ |
|
|
"image": str(img_path), |
|
|
"text": text_description |
|
|
}) |
|
|
|
|
|
if not records: |
|
|
print("No matching data found. Ensure image filenames correspond to kanji in the XML.") |
|
|
return None |
|
|
|
|
|
print(f"Created {len(records)} records for the dataset.") |
|
|
|
|
|
|
|
|
df = pd.DataFrame(records) |
|
|
|
|
|
|
|
|
features = Features({ |
|
|
'image': HFImage(decode=True), |
|
|
'text': Value(dtype='string'), |
|
|
}) |
|
|
|
|
|
|
|
|
dataset = Dataset.from_pandas(df, features=features) |
|
|
|
|
|
return dataset |
|
|
|
|
|
def main(): |
|
|
image_directory = "kanji_images" |
|
|
kanjidic_xml_path = "kanjidic2.xml" |
|
|
repo_id = "LLParallax/kanji-dataset" |
|
|
|
|
|
|
|
|
kanji_meanings_data = parse_kanjidic(kanjidic_xml_path) |
|
|
if not kanji_meanings_data: |
|
|
return |
|
|
|
|
|
|
|
|
kanji_dataset = create_kanji_dataset(image_directory, kanji_meanings_data) |
|
|
if kanji_dataset is None: |
|
|
return |
|
|
|
|
|
print("\nDataset created successfully. Sample record:") |
|
|
print(kanji_dataset[0]) |
|
|
|
|
|
|
|
|
|
|
|
print(f"\nPushing dataset to Hugging Face Hub at {repo_id}...") |
|
|
try: |
|
|
kanji_dataset.push_to_hub(repo_id, private=False) |
|
|
print("Dataset pushed successfully!") |
|
|
print(f"View your dataset at: https://huggingface.co/datasets/{repo_id}") |
|
|
except Exception as e: |
|
|
print(f"Failed to push dataset to Hub: {e}") |
|
|
|
|
|
if __name__ == "__main__": |
|
|
main() |
|
|
|
|
|
|