Datasets:
File size: 1,695 Bytes
a9e6fc2 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 | #!/usr/bin/env python3
"""
Build the complete UVW 2026 dataset.
UVW 2026: Underthesea Vietnamese Wikipedia Dataset
https://github.com/undertheseanlp/underthesea/issues/896
This script runs the entire pipeline:
1. Download Vietnamese Wikipedia dump
2. Extract and clean articles
3. Create train/dev/test splits
4. Prepare for HuggingFace Hub
"""
import subprocess
import sys
from pathlib import Path
SCRIPTS_DIR = Path(__file__).parent
def run_script(script_name: str) -> bool:
"""Run a Python script and return success status."""
script_path = SCRIPTS_DIR / script_name
print(f"\n{'='*60}")
print(f"Running: {script_name}")
print(f"{'='*60}\n")
result = subprocess.run([sys.executable, str(script_path)])
return result.returncode == 0
def main():
"""Build the complete UVW 2026 dataset."""
print("=" * 60)
print("UVW 2026: Underthesea Vietnamese Wikipedia Dataset")
print("=" * 60)
scripts = [
"download_wikipedia.py",
"extract_articles.py",
"create_splits.py",
"prepare_huggingface.py",
]
for script in scripts:
success = run_script(script)
if not success:
print(f"\nError running {script}. Stopping pipeline.")
sys.exit(1)
print("\n" + "=" * 60)
print("Dataset build complete!")
print("=" * 60)
# Print summary
data_dir = SCRIPTS_DIR.parent / "data"
print(f"\nOutput locations:")
print(f" - Raw dump: {data_dir / 'raw'}")
print(f" - Processed: {data_dir / 'processed'}")
print(f" - Splits: {data_dir / 'splits'}")
print(f" - HuggingFace: {data_dir / 'huggingface'}")
if __name__ == "__main__":
main()
|