conll-03 / preprocess.py
lukasgarbas's picture
add classic conll
1301e71
import argparse
import json
import shutil
from pathlib import Path
from typing import TypedDict
from datasets import DatasetDict, load_dataset
OUT_DIR = Path(__file__).parent / "data"
METADATA_PATH = OUT_DIR / "metadata.json"
class ConllExample(TypedDict):
tokens: list[str]
ner_tags: list[str]
chunk_tags: list[str]
pos_tags: list[str]
class LabelMaps(TypedDict):
ner_tags: list[str]
chunk_tags: list[str]
pos_tags: list[str]
def ids_to_strings(example: dict, label_maps: LabelMaps) -> ConllExample:
return {
"tokens": example["tokens"],
"ner_tags": [label_maps["ner_tags"][i] for i in example["ner_tags"]],
"chunk_tags": [label_maps["chunk_tags"][i] for i in example["chunk_tags"]],
"pos_tags": [label_maps["pos_tags"][i] for i in example["pos_tags"]],
}
def extract_label_maps(data: DatasetDict) -> LabelMaps:
feats = data["train"].features
return {
"ner_tags": feats["ner_tags"].feature.names,
"chunk_tags": feats["chunk_tags"].feature.names,
"pos_tags": feats["pos_tags"].feature.names,
}
def extract_metadata(data: DatasetDict, label_maps: LabelMaps) -> dict:
num_rows = {split_name: int(split.num_rows) for split_name, split in data.items()}
features = {name: repr(feature) for name, feature in data["train"].features.items()}
return {"num_rows": num_rows, "features": features, "label_maps": label_maps}
def main() -> None:
"""Load CoNLL-03 with datasets v3, save as Parquet and add metadata.
Run: python preprocess.py --out-dir data
"""
ap = argparse.ArgumentParser()
ap.add_argument("--out-dir", type=Path, help="Output directory for Parquet files")
ap.add_argument("--metadata-path", type=Path, help="Path for metadata.json")
args = ap.parse_args()
out_dir = args.out_dir or OUT_DIR
metadata_path = args.metadata_path or METADATA_PATH
out_dir.mkdir(parents=True, exist_ok=True)
cache_path = Path(__file__).parent / "tmp"
# using datasets v3.6
data = load_dataset("conll2003", cache_dir=str(cache_path))
split_map = {"train": "train", "validation": "validation", "test": "test"}
if "validation" not in data and "valid" in data:
split_map["validation"] = "valid"
label_maps = extract_label_maps(data)
meta = extract_metadata(data, label_maps)
for split, split_name in split_map.items():
if split_name not in data:
continue
out_path = out_dir / f"{split}.parquet"
ds_str = data[split_name].map(ids_to_strings, fn_kwargs={"label_maps": label_maps})
if "id" in ds_str.column_names:
ds_str = ds_str.remove_columns("id")
ds_str.to_parquet(str(out_path))
metadata_path.write_text(json.dumps(meta, indent=2), encoding="utf-8")
if cache_path.exists():
shutil.rmtree(cache_path)
if __name__ == "__main__":
main()