repair_data / csv_repair.py
savedata101's picture
Publish repair_data folder
4a225ce verified
#!/usr/bin/env python3
import argparse
import json
import os
import random
from typing import Dict, Any, Optional, Tuple
import pandas as pd
import numpy as np
from tqdm import tqdm
def try_read_csv(path: str) -> pd.DataFrame:
print(f"📂 Reading: {path}")
try:
df = pd.read_csv(path, encoding="utf-8")
print(" ✅ UTF-8")
return df
except Exception:
df = pd.read_csv(path, encoding="latin-1")
print(" ✅ latin-1")
return df
def canonical_country(name: Any) -> Any:
if pd.isna(name) or not str(name).strip():
return np.nan
n = str(name).strip().lower()
synonyms = {
"usa": "United States", "us": "United States", "u.s.": "United States",
"uk": "United Kingdom", "u.k.": "United Kingdom",
"south korea": "Korea, South", "republic of korea": "Korea, South",
"russian federation": "Russia",
"uae": "United Arab Emirates", "u.a.e.": "United Arab Emirates",
}
canonical = synonyms.get(n, str(name).strip())
'''if canonical != n:
print(f" 🌍 Canonicalized: '{name}' → '{canonical}'")'''
return canonical
def load_city_catalog(json_path: str) -> Dict[str, list]:
print(f"📚 Loading catalog: {json_path}")
with open(json_path, "r", encoding="utf-8") as f:
catalog = json.load(f)
# Filter: chỉ giữ quốc gia có ít nhất 1 thành phố
valid_catalog = {k: v for k, v in catalog.items() if isinstance(v, list) and len(v) > 0}
print(f" ✅ {len(valid_catalog)} countries with cities")
return valid_catalog
def pick_random_city_country(catalog: Dict[str, list], rng: random.Random) -> tuple[str, str]:
country = rng.choice(list(catalog.keys()))
city = rng.choice(catalog[country]).strip()
return city, country
def repair_location_in_place(df: pd.DataFrame, catalog_path: str) -> Tuple[pd.DataFrame, Dict[str, Any]]:
print("🔧 Repairing Location → always ensure 'city, country' pair...")
out = df.copy()
catalog = load_city_catalog(catalog_path)
rng = random.Random(42)
# Parse Location
s = out["Location"].astype(str).str.strip()
parts = s.str.rsplit(',', n=1, expand=True)
city_raw = parts.iloc[:, 0].str.strip().replace("", np.nan) if parts.shape[1] > 1 else pd.Series([np.nan]*len(out))
country_raw = parts.iloc[:, 1].str.strip().replace("", np.nan) if parts.shape[1] > 1 else pd.Series([np.nan]*len(out))
# Canonicalize country
country_canonical = country_raw.apply(canonical_country)
country_invalid = country_canonical.isna() | ~country_canonical.isin(catalog)
country_fix_count = country_invalid.sum()
# Prepare arrays
final_city = []
final_country = []
city_fixed = 0
country_fixed = 0
both_missing_fixed = 0
print(" 🔄 Processing rows...")
for idx in tqdm(range(len(out)), ncols=80):
ctry = country_canonical.iloc[idx]
city = city_raw.iloc[idx] if len(city_raw) > idx else np.nan
# Trường hợp 1: Cả city lẫn country đều thiếu/hỏng → tạo cặp mới
if pd.isna(ctry) or pd.isna(city) or ctry not in catalog:
if pd.isna(ctry) and pd.isna(city):
both_missing_fixed += 1
new_city, new_country = pick_random_city_country(catalog, rng)
final_city.append(new_city)
final_country.append(new_country)
if ctry not in catalog and not pd.isna(ctry):
country_fixed += 1
continue
# Trường hợp 2: Country hợp lệ, kiểm tra city
valid_cities_lower = {c.strip().lower() for c in catalog[ctry]}
if pd.notna(city) and str(city).strip().lower() in valid_cities_lower:
# City hợp lệ → giữ nguyên
final_city.append(str(city).strip())
final_country.append(ctry)
else:
# City không hợp lệ → thay bằng city thực ngẫu nhiên của country đó
new_city = rng.choice(catalog[ctry]).strip()
final_city.append(new_city)
final_country.append(ctry)
city_fixed += 1
# Tạo Location mới
new_location = [f"{c}, {co}" for c, co in zip(final_city, final_country)]
out["Location"] = new_location
actions = {
"catalog_path": catalog_path,
"country_fixed": int(country_fixed + both_missing_fixed),
"city_fixed": int(city_fixed + both_missing_fixed),
"both_missing_filled": int(both_missing_fixed),
"total_rows": len(out),
}
print(f" ✅ Done. City fixed: {city_fixed}, Country fixed: {country_fixed}, Both missing → random pair: {both_missing_fixed}")
return out, actions
def main():
print("🚀 Starting CSV Repair (ensure city + country in Location)")
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--input", default="HR_Data_Clean_2020_2025.csv")
parser.add_argument("-o", "--cleaned-csv", default="cleaned.csv")
parser.add_argument("--catalog", default="report/provinces_by_country.json")
args = parser.parse_args()
# Read
df = try_read_csv(args.input)
print(f" 📊 Rows: {len(df):,}, Columns: {list(df.columns)}")
if "Location" not in df.columns:
raise ValueError("❌ 'Location' column not found")
# Repair
cleaned, actions = repair_location_in_place(df, catalog_path=args.catalog)
# Save
print(f"💾 Saving cleaned CSV → {args.cleaned_csv}")
cleaned.to_csv(args.cleaned_csv, index=False)
# Report
report_path = "report/location_repair_report.json"
os.makedirs("report", exist_ok=True)
with open(report_path, "w", encoding="utf-8") as f:
json.dump({"input": args.input, "actions": actions}, f, indent=2, ensure_ascii=False)
# Summary
print("\n" + "═" * 50)
print("✅ REPAIR SUMMARY")
print(f" Input: {args.input}")
print(f" Output: {args.cleaned_csv}")
print(f" Country fixed: {actions['country_fixed']:,}")
print(f" City fixed: {actions['city_fixed']:,}")
print(f" Both missing → pair: {actions['both_missing_filled']:,}")
print(f" Report: {report_path}")
print("═" * 50)
if __name__ == "__main__":
main()