wattpad2 / fix_languages.py
Fizzarolli's picture
Upload 2 files
aa02067 verified
raw
history blame contribute delete
738 Bytes
from datasets import load_dataset
import numpy as np
import unicodedata
dataset = load_dataset("Fizzarolli/wattpad2", "default", split="train")
from lingua import LanguageDetectorBuilder
detector = LanguageDetectorBuilder.from_all_languages().with_preloaded_language_models().build()
def fix_language_column(example):
res = detector.compute_language_confidence_values(unicodedata.normalize('NFKD', example["description"]))
example["language"] = res[0].language.iso_code_639_3.name
confidences = list(map(lambda x: x.value, res))
example["language_confidence"] = np.max(confidences)
return example
dataset = dataset.map(fix_language_column)
dataset.push_to_hub("Fizzarolli/wattpad2", "default")