| from __future__ import annotations |
|
|
| import argparse |
|
|
| from speakervid_metadb import SpeakerVidMetaDB |
|
|
|
|
| def main() -> int: |
| parser = argparse.ArgumentParser( |
| description=( |
| "Compute stats for the top portion of Sync-c samples: mean Sync-c and total duration (hours). " |
| 'Sync-c = first element of sync["<sync-key>"][0].' |
| ) |
| ) |
| parser.add_argument( |
| "--parquet-glob", |
| type=str, |
| default="/mnt/nfs/datasets/SpeakerVid-5M/merged_anno/extracted_parquet/*.parquet", |
| help="Parquet glob path for tar_batch parquet files.", |
| ) |
| parser.add_argument( |
| "--sync-key", |
| type=str, |
| default="0", |
| help='Sync dict key to use (default: "0").', |
| ) |
| parser.add_argument( |
| "--top-pct", |
| type=float, |
| default=3.0, |
| help="Top percentage by Sync-c (e.g. 20 for top 20%%).", |
| ) |
| parser.add_argument( |
| "--where", |
| type=str, |
| default=None, |
| help="Optional extra DuckDB WHERE fragment to apply before ranking (e.g. 'is_talking=1').", |
| ) |
| args = parser.parse_args() |
|
|
| db = SpeakerVidMetaDB(parquet_glob=args.parquet_glob) |
|
|
| sync_c_expr = db.sync_c_expr(sync_key=args.sync_key) |
| |
| sync_d_expr = db.json_extract_expr("sync", f'$."{args.sync_key}"[0][1]', cast="DOUBLE") |
| base_where = db.where_and(args.where, "sync IS NOT NULL", "duration IS NOT NULL") |
|
|
| if not (0.0 < args.top_pct < 100.0): |
| raise SystemExit("--top-pct must be in (0, 100)") |
| q = 1.0 - (args.top_pct / 100.0) |
|
|
| sql = f""" |
| WITH base AS ( |
| SELECT |
| {sync_c_expr} AS sync_c, |
| {sync_d_expr} AS sync_d, |
| duration AS duration_s |
| FROM {db.view_name} |
| WHERE {base_where} |
| ), |
| clean AS ( |
| SELECT |
| sync_c, |
| sync_d, |
| duration_s |
| FROM base |
| WHERE sync_c IS NOT NULL AND duration_s IS NOT NULL AND duration_s > 0 |
| ), |
| q AS ( |
| SELECT quantile_cont(sync_c, {q}) AS p_quantile FROM clean |
| ), |
| topN AS ( |
| SELECT sync_c, sync_d, duration_s FROM clean, q WHERE clean.sync_c >= q.p_quantile |
| ) |
| SELECT |
| (SELECT COUNT(*) FROM clean) AS n_total, |
| (SELECT p_quantile FROM q) AS p_threshold, |
| COUNT(*) AS n_top, |
| AVG(sync_c) AS avg_sync_c_top, |
| AVG(sync_d) AS avg_sync_d_top, |
| SUM(duration_s) / 3600.0 AS total_duration_hours |
| FROM topN |
| """ |
|
|
| row = db.con.sql(sql).fetchone() |
| print( |
| f"top_pct={args.top_pct} n_total={row[0]} threshold={row[1]} n_top={row[2]} " |
| f"avg_sync_c_top={row[3]} avg_sync_d_top={row[4]} total_duration_hours={row[5]}", |
| flush=True, |
| ) |
| return 0 |
|
|
|
|
| if __name__ == "__main__": |
| raise SystemExit(main()) |
|
|