|
|
|
|
|
""" |
|
|
Analyze downloaded Vietnamese Legal Corpus files |
|
|
""" |
|
|
|
|
|
import re |
|
|
from pathlib import Path |
|
|
from collections import defaultdict |
|
|
|
|
|
import click |
|
|
from rich.console import Console |
|
|
from rich.table import Table |
|
|
|
|
|
console = Console() |
|
|
|
|
|
DATA_DIR = Path(__file__).parent.parent / "data" |
|
|
|
|
|
|
|
|
def parse_front_matter(content: str) -> dict: |
|
|
"""Parse YAML front matter from markdown file.""" |
|
|
metadata = {} |
|
|
if content.startswith("---"): |
|
|
parts = content.split("---", 2) |
|
|
if len(parts) >= 3: |
|
|
for line in parts[1].strip().split("\n"): |
|
|
if ":" in line: |
|
|
key, value = line.split(":", 1) |
|
|
value = value.strip().strip('"') |
|
|
metadata[key.strip()] = value |
|
|
return metadata |
|
|
|
|
|
|
|
|
def analyze_file(filepath: Path) -> dict: |
|
|
"""Analyze a single law file.""" |
|
|
content = filepath.read_text(encoding="utf-8") |
|
|
size = filepath.stat().st_size |
|
|
|
|
|
metadata = parse_front_matter(content) |
|
|
|
|
|
|
|
|
parts = content.split("---", 2) |
|
|
body = parts[2] if len(parts) >= 3 else "" |
|
|
|
|
|
has_content = len(body.strip()) > 200 and "*Nội dung chưa được tải xuống.*" not in body |
|
|
|
|
|
return { |
|
|
"filename": filepath.name, |
|
|
"title": metadata.get("title", "Unknown"), |
|
|
"type": metadata.get("type", "unknown"), |
|
|
"year": metadata.get("year", "unknown"), |
|
|
"document_number": metadata.get("document_number", ""), |
|
|
"url": metadata.get("url", ""), |
|
|
"size": size, |
|
|
"has_content": has_content, |
|
|
"content_length": len(body.strip()), |
|
|
} |
|
|
|
|
|
|
|
|
@click.group() |
|
|
def cli(): |
|
|
"""Analyze VLC corpus files.""" |
|
|
pass |
|
|
|
|
|
|
|
|
@cli.command() |
|
|
def summary(): |
|
|
"""Show summary statistics.""" |
|
|
if not DATA_DIR.exists(): |
|
|
console.print("[red]Data directory not found[/red]") |
|
|
return |
|
|
|
|
|
files = list(DATA_DIR.glob("*.md")) |
|
|
|
|
|
stats = { |
|
|
"total": len(files), |
|
|
"with_content": 0, |
|
|
"without_content": 0, |
|
|
"codes": 0, |
|
|
"laws": 0, |
|
|
"total_size": 0, |
|
|
"by_year": defaultdict(int), |
|
|
} |
|
|
|
|
|
failed = [] |
|
|
|
|
|
for f in files: |
|
|
info = analyze_file(f) |
|
|
stats["total_size"] += info["size"] |
|
|
|
|
|
if info["type"] == "code": |
|
|
stats["codes"] += 1 |
|
|
else: |
|
|
stats["laws"] += 1 |
|
|
|
|
|
stats["by_year"][info["year"]] += 1 |
|
|
|
|
|
if info["has_content"]: |
|
|
stats["with_content"] += 1 |
|
|
else: |
|
|
stats["without_content"] += 1 |
|
|
failed.append(info) |
|
|
|
|
|
|
|
|
console.print("[bold blue]VLC Corpus Analysis[/bold blue]\n") |
|
|
|
|
|
table = Table(title="Summary") |
|
|
table.add_column("Metric", style="cyan") |
|
|
table.add_column("Value", style="green") |
|
|
|
|
|
table.add_row("Total files", str(stats["total"])) |
|
|
table.add_row("Codes (Bộ luật)", str(stats["codes"])) |
|
|
table.add_row("Laws (Luật)", str(stats["laws"])) |
|
|
table.add_row("With content", f"{stats['with_content']} ✓") |
|
|
table.add_row("Without content", f"{stats['without_content']} ✗") |
|
|
table.add_row("Success rate", f"{stats['with_content']/stats['total']*100:.1f}%") |
|
|
table.add_row("Total size", f"{stats['total_size']/1024/1024:.1f} MB") |
|
|
|
|
|
console.print(table) |
|
|
|
|
|
|
|
|
console.print("\n[bold]By Year:[/bold]") |
|
|
for year in sorted(stats["by_year"].keys(), reverse=True)[:10]: |
|
|
console.print(f" {year}: {stats['by_year'][year]}") |
|
|
|
|
|
|
|
|
if failed: |
|
|
console.print(f"\n[bold red]Files without content ({len(failed)}):[/bold red]") |
|
|
for info in failed: |
|
|
console.print(f" - {info['filename']}") |
|
|
console.print(f" Title: {info['title']}") |
|
|
console.print(f" Doc#: {info['document_number']}") |
|
|
|
|
|
|
|
|
@cli.command() |
|
|
def failed(): |
|
|
"""List files that failed to download.""" |
|
|
if not DATA_DIR.exists(): |
|
|
console.print("[red]Data directory not found[/red]") |
|
|
return |
|
|
|
|
|
files = list(DATA_DIR.glob("*.md")) |
|
|
|
|
|
table = Table(title="Files Without Content") |
|
|
table.add_column("#", style="dim") |
|
|
table.add_column("Filename", style="cyan") |
|
|
table.add_column("Title", style="white") |
|
|
table.add_column("Year", style="green") |
|
|
table.add_column("Size", style="yellow") |
|
|
|
|
|
count = 0 |
|
|
for f in sorted(files): |
|
|
info = analyze_file(f) |
|
|
if not info["has_content"]: |
|
|
count += 1 |
|
|
table.add_row( |
|
|
str(count), |
|
|
info["filename"], |
|
|
info["title"][:40] + "..." if len(info["title"]) > 40 else info["title"], |
|
|
str(info["year"]), |
|
|
f"{info['size']} B" |
|
|
) |
|
|
|
|
|
if count > 0: |
|
|
console.print(table) |
|
|
console.print(f"\n[red]Total: {count} files without content[/red]") |
|
|
else: |
|
|
console.print("[green]All files have content![/green]") |
|
|
|
|
|
|
|
|
@cli.command() |
|
|
def success(): |
|
|
"""List files that were downloaded successfully.""" |
|
|
if not DATA_DIR.exists(): |
|
|
console.print("[red]Data directory not found[/red]") |
|
|
return |
|
|
|
|
|
files = list(DATA_DIR.glob("*.md")) |
|
|
|
|
|
table = Table(title="Successfully Downloaded Files") |
|
|
table.add_column("#", style="dim") |
|
|
table.add_column("Filename", style="cyan") |
|
|
table.add_column("Title", style="white") |
|
|
table.add_column("Size", style="green") |
|
|
|
|
|
count = 0 |
|
|
for f in sorted(files, key=lambda x: x.stat().st_size, reverse=True): |
|
|
info = analyze_file(f) |
|
|
if info["has_content"]: |
|
|
count += 1 |
|
|
if count <= 20: |
|
|
table.add_row( |
|
|
str(count), |
|
|
info["filename"], |
|
|
info["title"][:50] + "..." if len(info["title"]) > 50 else info["title"], |
|
|
f"{info['size']/1024:.1f} KB" |
|
|
) |
|
|
|
|
|
console.print(table) |
|
|
if count > 20: |
|
|
console.print(f"\n... and {count - 20} more files") |
|
|
console.print(f"\n[green]Total: {count} files with content[/green]") |
|
|
|
|
|
|
|
|
@cli.command() |
|
|
@click.argument("pattern", default="") |
|
|
def search(pattern: str): |
|
|
"""Search files by name or title.""" |
|
|
if not DATA_DIR.exists(): |
|
|
console.print("[red]Data directory not found[/red]") |
|
|
return |
|
|
|
|
|
files = list(DATA_DIR.glob("*.md")) |
|
|
pattern = pattern.lower() |
|
|
|
|
|
results = [] |
|
|
for f in files: |
|
|
info = analyze_file(f) |
|
|
if pattern in info["filename"].lower() or pattern in info["title"].lower(): |
|
|
results.append(info) |
|
|
|
|
|
if results: |
|
|
table = Table(title=f"Search Results: '{pattern}'") |
|
|
table.add_column("Filename", style="cyan") |
|
|
table.add_column("Title", style="white") |
|
|
table.add_column("Content", style="green") |
|
|
table.add_column("Size", style="yellow") |
|
|
|
|
|
for info in results: |
|
|
table.add_row( |
|
|
info["filename"], |
|
|
info["title"][:40], |
|
|
"✓" if info["has_content"] else "✗", |
|
|
f"{info['size']/1024:.1f} KB" |
|
|
) |
|
|
|
|
|
console.print(table) |
|
|
else: |
|
|
console.print(f"[yellow]No files found matching '{pattern}'[/yellow]") |
|
|
|
|
|
|
|
|
REQUIRED_FIELDS = ["title", "type", "year", "document_number"] |
|
|
OPTIONAL_FIELDS = ["title_en", "effective_date", "status", "url", "downloaded_at"] |
|
|
|
|
|
|
|
|
def validate_front_matter(filepath: Path) -> dict: |
|
|
"""Validate front matter of a file and return issues.""" |
|
|
content = filepath.read_text(encoding="utf-8") |
|
|
issues = [] |
|
|
metadata = {} |
|
|
|
|
|
|
|
|
if not content.startswith("---"): |
|
|
issues.append("Missing front matter (no opening ---)") |
|
|
return {"filename": filepath.name, "issues": issues, "metadata": metadata} |
|
|
|
|
|
parts = content.split("---", 2) |
|
|
if len(parts) < 3: |
|
|
issues.append("Invalid front matter (no closing ---)") |
|
|
return {"filename": filepath.name, "issues": issues, "metadata": metadata} |
|
|
|
|
|
|
|
|
for line in parts[1].strip().split("\n"): |
|
|
if ":" in line: |
|
|
key, value = line.split(":", 1) |
|
|
key = key.strip() |
|
|
value = value.strip().strip('"') |
|
|
metadata[key] = value |
|
|
|
|
|
|
|
|
for field in REQUIRED_FIELDS: |
|
|
if field not in metadata: |
|
|
issues.append(f"Missing required field: {field}") |
|
|
elif not metadata[field] or metadata[field] in ("Unknown", "unknown", ""): |
|
|
issues.append(f"Empty or invalid value for: {field}") |
|
|
|
|
|
|
|
|
if metadata.get("type") and metadata["type"] not in ("code", "law"): |
|
|
issues.append(f"Invalid type: {metadata['type']} (should be 'code' or 'law')") |
|
|
|
|
|
|
|
|
year = metadata.get("year", "") |
|
|
if year and not year.isdigit(): |
|
|
issues.append(f"Invalid year format: {year}") |
|
|
elif year and (int(year) < 1945 or int(year) > 2030): |
|
|
issues.append(f"Year out of range: {year}") |
|
|
|
|
|
|
|
|
expected_prefix = "code" if metadata.get("type") == "code" else "law" |
|
|
if not filepath.name.startswith(f"{expected_prefix}-"): |
|
|
issues.append(f"Filename prefix mismatch: expected '{expected_prefix}-'") |
|
|
|
|
|
if year and f"-{year}-" not in filepath.name: |
|
|
issues.append(f"Year in filename doesn't match metadata: {year}") |
|
|
|
|
|
return {"filename": filepath.name, "issues": issues, "metadata": metadata} |
|
|
|
|
|
|
|
|
@cli.command() |
|
|
def validate(): |
|
|
"""Validate front matter of all files.""" |
|
|
if not DATA_DIR.exists(): |
|
|
console.print("[red]Data directory not found[/red]") |
|
|
return |
|
|
|
|
|
files = list(DATA_DIR.glob("*.md")) |
|
|
invalid_files = [] |
|
|
|
|
|
for f in sorted(files): |
|
|
result = validate_front_matter(f) |
|
|
if result["issues"]: |
|
|
invalid_files.append(result) |
|
|
|
|
|
if invalid_files: |
|
|
table = Table(title="Files with Invalid Front Matter") |
|
|
table.add_column("#", style="dim") |
|
|
table.add_column("Filename", style="cyan") |
|
|
table.add_column("Issues", style="red") |
|
|
|
|
|
for i, item in enumerate(invalid_files, 1): |
|
|
table.add_row( |
|
|
str(i), |
|
|
item["filename"], |
|
|
"\n".join(item["issues"]) |
|
|
) |
|
|
|
|
|
console.print(table) |
|
|
console.print(f"\n[red]Total: {len(invalid_files)} files with issues[/red]") |
|
|
else: |
|
|
console.print("[green]All files have valid front matter![/green]") |
|
|
|
|
|
|
|
|
console.print(f"\n[bold]Validation Summary:[/bold]") |
|
|
console.print(f" Total files: {len(files)}") |
|
|
console.print(f" Valid: {len(files) - len(invalid_files)}") |
|
|
console.print(f" Invalid: {len(invalid_files)}") |
|
|
|
|
|
|
|
|
@cli.command() |
|
|
@click.argument("filename", required=False) |
|
|
def inspect(filename: str): |
|
|
"""Inspect front matter of a specific file or show all fields.""" |
|
|
if not DATA_DIR.exists(): |
|
|
console.print("[red]Data directory not found[/red]") |
|
|
return |
|
|
|
|
|
if filename: |
|
|
|
|
|
filepath = DATA_DIR / filename |
|
|
if not filepath.exists(): |
|
|
|
|
|
matches = list(DATA_DIR.glob(f"*{filename}*")) |
|
|
if matches: |
|
|
filepath = matches[0] |
|
|
else: |
|
|
console.print(f"[red]File not found: {filename}[/red]") |
|
|
return |
|
|
|
|
|
result = validate_front_matter(filepath) |
|
|
console.print(f"\n[bold]File: {result['filename']}[/bold]\n") |
|
|
|
|
|
table = Table(title="Front Matter") |
|
|
table.add_column("Field", style="cyan") |
|
|
table.add_column("Value", style="white") |
|
|
table.add_column("Status", style="green") |
|
|
|
|
|
for field in REQUIRED_FIELDS + OPTIONAL_FIELDS: |
|
|
value = result["metadata"].get(field, "") |
|
|
if field in REQUIRED_FIELDS: |
|
|
status = "✓" if value and value not in ("Unknown", "unknown") else "✗ required" |
|
|
else: |
|
|
status = "✓" if value else "-" |
|
|
table.add_row(field, str(value)[:50], status) |
|
|
|
|
|
console.print(table) |
|
|
|
|
|
if result["issues"]: |
|
|
console.print("\n[bold red]Issues:[/bold red]") |
|
|
for issue in result["issues"]: |
|
|
console.print(f" - {issue}") |
|
|
else: |
|
|
|
|
|
files = list(DATA_DIR.glob("*.md")) |
|
|
field_stats = defaultdict(int) |
|
|
|
|
|
for f in files: |
|
|
result = validate_front_matter(f) |
|
|
for field in REQUIRED_FIELDS + OPTIONAL_FIELDS: |
|
|
if result["metadata"].get(field): |
|
|
field_stats[field] += 1 |
|
|
|
|
|
table = Table(title="Front Matter Field Statistics") |
|
|
table.add_column("Field", style="cyan") |
|
|
table.add_column("Present", style="green") |
|
|
table.add_column("Missing", style="red") |
|
|
table.add_column("Coverage", style="yellow") |
|
|
|
|
|
for field in REQUIRED_FIELDS + OPTIONAL_FIELDS: |
|
|
present = field_stats[field] |
|
|
missing = len(files) - present |
|
|
coverage = f"{present/len(files)*100:.1f}%" |
|
|
req = " (required)" if field in REQUIRED_FIELDS else "" |
|
|
table.add_row(f"{field}{req}", str(present), str(missing), coverage) |
|
|
|
|
|
console.print(table) |
|
|
|
|
|
|
|
|
@cli.command() |
|
|
def export_failed(): |
|
|
"""Export failed files to retry list.""" |
|
|
if not DATA_DIR.exists(): |
|
|
console.print("[red]Data directory not found[/red]") |
|
|
return |
|
|
|
|
|
files = list(DATA_DIR.glob("*.md")) |
|
|
|
|
|
failed = [] |
|
|
for f in files: |
|
|
info = analyze_file(f) |
|
|
if not info["has_content"]: |
|
|
failed.append({ |
|
|
"filename": info["filename"], |
|
|
"title": info["title"], |
|
|
"document_number": info["document_number"], |
|
|
}) |
|
|
|
|
|
if failed: |
|
|
output = DATA_DIR.parent / "failed_downloads.txt" |
|
|
with open(output, "w", encoding="utf-8") as f: |
|
|
for item in failed: |
|
|
f.write(f"{item['filename']}\t{item['title']}\t{item['document_number']}\n") |
|
|
|
|
|
console.print(f"[green]Exported {len(failed)} failed files to {output}[/green]") |
|
|
else: |
|
|
console.print("[green]No failed files to export[/green]") |
|
|
|
|
|
|
|
|
if __name__ == "__main__": |
|
|
cli() |
|
|
|