|
|
|
|
|
""" |
|
|
Parametric analysis of geodata mapping in IFVI Global Value Factors Dataset |
|
|
Analyzes the geographic entity distribution and ISO code coverage |
|
|
""" |
|
|
|
|
|
import json |
|
|
import os |
|
|
from collections import Counter |
|
|
|
|
|
def analyze_geodata(): |
|
|
""" |
|
|
Analyze the geodata.json file for geographic statistics |
|
|
""" |
|
|
geodata_path = "/home/daniel/repos/hugging-face/IFVI-Global-Value-Factors-Dataset-V2/processing/remapping/geodata.json" |
|
|
output_dir = "/home/daniel/repos/hugging-face/IFVI-Global-Value-Factors-Dataset-V2/parametric-data" |
|
|
|
|
|
print("IFVI Global Value Factors Dataset - Geographic Entity Analysis") |
|
|
print("=" * 65) |
|
|
print() |
|
|
|
|
|
try: |
|
|
with open(geodata_path, 'r', encoding='utf-8') as f: |
|
|
data = json.load(f) |
|
|
|
|
|
metadata = data.get('metadata', {}) |
|
|
mapping = data.get('mapping', []) |
|
|
|
|
|
|
|
|
total_entities = metadata.get('total_entities', 0) |
|
|
entity_types = metadata.get('entity_types', {}) |
|
|
entities_with_iso = metadata.get('entities_with_iso', 0) |
|
|
us_states_with_codes = metadata.get('us_states_with_codes', 0) |
|
|
|
|
|
|
|
|
entities_without_iso = total_entities - entities_with_iso - us_states_with_codes |
|
|
|
|
|
|
|
|
region_counts = Counter() |
|
|
iso_by_region = Counter() |
|
|
non_iso_by_region = Counter() |
|
|
|
|
|
for entity in mapping: |
|
|
region = entity.get('region', 'Unknown') |
|
|
has_iso = entity.get('has_iso', False) |
|
|
entity_type = entity.get('entity_type', 'unknown') |
|
|
|
|
|
region_counts[region] += 1 |
|
|
|
|
|
if has_iso: |
|
|
iso_by_region[region] += 1 |
|
|
elif entity_type != 'us_state': |
|
|
non_iso_by_region[region] += 1 |
|
|
|
|
|
|
|
|
print("GEOGRAPHIC ENTITY STATISTICS") |
|
|
print("-" * 40) |
|
|
print(f"Total unique geolocations: {total_entities:,}") |
|
|
print(f"Entities with ISO 3166-1 codes: {entities_with_iso:,}") |
|
|
print(f"US states (with state codes): {us_states_with_codes:,}") |
|
|
print(f"Non-sovereign entities (no ISO): {entities_without_iso:,}") |
|
|
print() |
|
|
|
|
|
print("ENTITY TYPE BREAKDOWN") |
|
|
print("-" * 25) |
|
|
for entity_type, count in entity_types.items(): |
|
|
percentage = (count / total_entities) * 100 |
|
|
print(f"{entity_type.replace('_', ' ').title()}: {count:,} ({percentage:.1f}%)") |
|
|
print() |
|
|
|
|
|
print("ISO CODE COVERAGE") |
|
|
print("-" * 20) |
|
|
total_with_codes = entities_with_iso + us_states_with_codes |
|
|
coverage_percentage = (total_with_codes / total_entities) * 100 |
|
|
print(f"Entities with standardized codes: {total_with_codes:,} ({coverage_percentage:.1f}%)") |
|
|
print(f"Entities without codes: {entities_without_iso:,} ({(entities_without_iso/total_entities)*100:.1f}%)") |
|
|
print() |
|
|
|
|
|
print("REGIONAL DISTRIBUTION") |
|
|
print("-" * 22) |
|
|
for region in sorted(region_counts.keys()): |
|
|
total_in_region = region_counts[region] |
|
|
with_iso = iso_by_region.get(region, 0) |
|
|
without_iso = non_iso_by_region.get(region, 0) |
|
|
|
|
|
print(f"{region}:") |
|
|
print(f" Total entities: {total_in_region}") |
|
|
print(f" With ISO codes: {with_iso}") |
|
|
print(f" Without ISO codes: {without_iso}") |
|
|
print() |
|
|
|
|
|
|
|
|
analysis_results = { |
|
|
"analysis_metadata": { |
|
|
"timestamp": "2025-08-21T21:20:00+03:00", |
|
|
"source_file": "processing/remapping/geodata.json", |
|
|
"iso_standard": metadata.get('iso_standard', 'ISO 3166-1 alpha-3') |
|
|
}, |
|
|
"geographic_statistics": { |
|
|
"total_unique_geolocations": total_entities, |
|
|
"entities_with_iso_codes": entities_with_iso, |
|
|
"us_states_with_codes": us_states_with_codes, |
|
|
"non_sovereign_entities": entities_without_iso, |
|
|
"total_with_standardized_codes": total_with_codes, |
|
|
"code_coverage_percentage": round(coverage_percentage, 2) |
|
|
}, |
|
|
"entity_type_breakdown": { |
|
|
entity_type: { |
|
|
"count": count, |
|
|
"percentage": round((count / total_entities) * 100, 2) |
|
|
} |
|
|
for entity_type, count in entity_types.items() |
|
|
}, |
|
|
"regional_distribution": { |
|
|
region: { |
|
|
"total_entities": region_counts[region], |
|
|
"with_iso_codes": iso_by_region.get(region, 0), |
|
|
"without_iso_codes": non_iso_by_region.get(region, 0) |
|
|
} |
|
|
for region in sorted(region_counts.keys()) |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
output_file = os.path.join(output_dir, "geodata_analysis.json") |
|
|
with open(output_file, 'w', encoding='utf-8') as f: |
|
|
json.dump(analysis_results, f, indent=2, ensure_ascii=False) |
|
|
|
|
|
print(f"Results saved to: {output_file}") |
|
|
|
|
|
|
|
|
import csv |
|
|
csv_file = os.path.join(output_dir, "geodata_summary.csv") |
|
|
with open(csv_file, 'w', newline='', encoding='utf-8') as f: |
|
|
writer = csv.writer(f) |
|
|
writer.writerow(["Metric", "Count", "Percentage"]) |
|
|
writer.writerow(["Total Geolocations", total_entities, "100.0"]) |
|
|
writer.writerow(["With ISO Codes", entities_with_iso, f"{(entities_with_iso/total_entities)*100:.1f}"]) |
|
|
writer.writerow(["US States", us_states_with_codes, f"{(us_states_with_codes/total_entities)*100:.1f}"]) |
|
|
writer.writerow(["Non-Sovereign", entities_without_iso, f"{(entities_without_iso/total_entities)*100:.1f}"]) |
|
|
|
|
|
print(f"Summary CSV saved to: {csv_file}") |
|
|
|
|
|
except Exception as e: |
|
|
print(f"Error analyzing geodata: {e}") |
|
|
|
|
|
if __name__ == "__main__": |
|
|
analyze_geodata() |
|
|
|