mcp-clients / mcp_remote_daily.py
evalstate's picture
evalstate HF Staff
Upload mcp_remote_daily.py with huggingface_hub
e7e3105 verified
raw
history blame
3.93 kB
#!/usr/bin/env -S uv run
# /// script
# requires-python = ">=3.10"
# dependencies = [
# "datasets",
# "pandas",
# ]
# ///
"""
Analyze daily proportion of sessions that include mcp-remote.
This script analyzes the evalstate/hf-mcp-logs dataset to calculate what proportion
of sessions included "mcp-remote" in the client name on a daily basis.
Usage:
uv run mcp_remote_daily.py [--limit N]
"""
import sys
import re
import argparse
from datetime import datetime
from collections import defaultdict
from datasets import load_dataset
def main():
parser = argparse.ArgumentParser(description="Analyze daily mcp-remote usage")
parser.add_argument("--limit", type=int, default=None, help="Limit processing to N rows (for testing)")
args = parser.parse_args()
print(f"[{datetime.now().isoformat()}] Loading dataset from evalstate/hf-mcp-logs...", file=sys.stderr)
# Load the source dataset (streaming for large datasets)
ds = load_dataset('evalstate/hf-mcp-logs', 'sessions', streaming=True)
sessions_ds = ds['sessions']
# Pattern to detect mcp-remote suffix (any version)
MCP_REMOTE_PATTERN = re.compile(r'\(via mcp-remote')
# Track by day
daily_stats = defaultdict(lambda: {'total': 0, 'with_mcp_remote': 0})
total_rows = 0
start_time = datetime.now()
print(f"[{datetime.now().isoformat()}] Processing sessions...", file=sys.stderr)
for batch in sessions_ds.iter(batch_size=10000):
batch_len = len(batch['time'])
total_rows += batch_len
# Progress indicator
if total_rows % 100000 == 0:
elapsed = (datetime.now() - start_time).total_seconds()
rate = total_rows / elapsed if elapsed > 0 else 0
print(f"[{datetime.now().isoformat()}] Progress: {total_rows:,} rows processed "
f"({rate:.0f} rows/sec)", file=sys.stderr)
if args.limit and total_rows >= args.limit:
print(f"[{datetime.now().isoformat()}] Reached limit of {args.limit:,} rows", file=sys.stderr)
break
for i in range(batch_len):
time_str = batch['time'][i]
name = batch['name'][i]
# Skip if name is None
if name is None:
continue
# Extract date from time string (ISO format: 2025-11-19T...)
date_str = time_str.split('T')[0] if 'T' in time_str else time_str
daily_stats[date_str]['total'] += 1
# Check if name contains mcp-remote suffix
if MCP_REMOTE_PATTERN.search(name):
daily_stats[date_str]['with_mcp_remote'] += 1
elapsed = (datetime.now() - start_time).total_seconds()
rate = total_rows / elapsed if elapsed > 0 else 0
print(f"[{datetime.now().isoformat()}] Processing complete: {total_rows:,} rows", file=sys.stderr)
print(f"Rate: {rate:.0f} rows/sec, elapsed: {elapsed:.1f} seconds", file=sys.stderr)
print(f"Found {len(daily_stats)} days of data", file=sys.stderr)
print()
# Print daily statistics
print("Daily mcp-remote Usage")
print("=" * 70)
print(f"{'Date':<12} {'Total Sessions':>15} {'mcp-remote':>12} {'%':>10}")
print("-" * 70)
# Sort by date
for date in sorted(daily_stats.keys()):
stats = daily_stats[date]
total = stats['total']
with_mcp = stats['with_mcp_remote']
pct = (with_mcp / total * 100) if total > 0 else 0
print(f"{date:<12} {total:>15,} {with_mcp:>12,} {pct:>9.1f}%")
print("=" * 70)
# Overall summary
overall_total = sum(s['total'] for s in daily_stats.values())
overall_with_mcp = sum(s['with_mcp_remote'] for s in daily_stats.values())
overall_pct = (overall_with_mcp / overall_total * 100) if overall_total > 0 else 0
print(f"{'TOTAL':<12} {overall_total:>15,} {overall_with_mcp:>12,} {overall_pct:>9.1f}%")
print()
if __name__ == '__main__':
main()