| |
| |
| |
| |
| |
| |
| |
| |
| """ |
| Scheduled job to extract and update MCP clients dataset. |
| This script runs via HF Jobs to keep the dataset fresh. |
| |
| Usage: |
| uv run mcp_clients_job.py [--limit N] |
| """ |
|
|
| import sys |
| import json |
| import argparse |
| from datetime import datetime |
|
|
| from datasets import Dataset, Features, Value |
|
|
|
|
| def normalize_capabilities(caps): |
| """Normalize capabilities for comparison.""" |
| if caps is None: |
| return None |
| if isinstance(caps, str): |
| if caps == '{}': |
| return {} |
| try: |
| return json.loads(caps) |
| except Exception: |
| return caps |
| return caps |
|
|
|
|
| def create_dataset_from_clients(clients_list): |
| """Create a Hugging Face Dataset from the clients list.""" |
| features = Features({ |
| 'name': Value('string'), |
| 'version': Value('string'), |
| 'capabilities': Value('string'), |
| 'last_seen': Value('string'), |
| }) |
|
|
| records = [] |
| for client in clients_list: |
| |
| caps = client['capabilities'] |
| if isinstance(caps, dict): |
| caps = json.dumps(caps, sort_keys=True) |
| elif caps is None: |
| caps = '' |
| else: |
| caps = str(caps) |
| |
| records.append({ |
| 'name': client['name'], |
| 'version': client['version'], |
| 'capabilities': caps, |
| 'last_seen': client['last_seen'], |
| }) |
|
|
| return Dataset.from_list(records, features=features) |
|
|
|
|
| def main(): |
| parser = argparse.ArgumentParser(description="Extract and update MCP clients dataset") |
| parser.add_argument("--limit", type=int, default=None, help="Limit processing to N rows (for testing)") |
| parser.add_argument("--repo-id", default="evalstate/mcp-clients", help="Target repository ID") |
| args = parser.parse_args() |
|
|
| print(f"[{datetime.now().isoformat()}] Starting MCP clients extraction...", file=sys.stderr) |
| if args.limit: |
| print(f"[{datetime.now().isoformat()}] LIMIT MODE: Will process at most {args.limit:,} rows", file=sys.stderr) |
| |
| unique_clients = {} |
|
|
| from datasets import load_dataset |
| ds = load_dataset('evalstate/hf-mcp-logs', 'sessions', streaming=True) |
| sessions_ds = ds['sessions'] |
|
|
| total_rows = 0 |
| skipped_deletes = 0 |
| skipped_other = 0 |
| start_time = datetime.now() |
|
|
| for batch in sessions_ds.iter(batch_size=1000): |
| batch_len = len(batch['name']) |
| total_rows += batch_len |
|
|
| |
| if total_rows % 100000 == 0: |
| elapsed = (datetime.now() - start_time).total_seconds() |
| rate = total_rows / elapsed if elapsed > 0 else 0 |
| print(f"[{datetime.now().isoformat()}] Progress: {total_rows:,} rows processed " |
| f"({rate:.0f} rows/sec), {len(unique_clients):,} unique clients found", file=sys.stderr) |
|
|
| if args.limit and total_rows >= args.limit: |
| print(f"[{datetime.now().isoformat()}] Reached limit of {args.limit:,} rows", file=sys.stderr) |
| break |
|
|
| for i in range(batch_len): |
| method_name = batch['methodName'][i] if 'methodName' in batch else None |
| |
| if method_name == 'session_delete': |
| skipped_deletes += 1 |
| continue |
| |
| if method_name != 'initialize': |
| skipped_other += 1 |
| continue |
| |
| name = batch['name'][i] |
| version = batch['version'][i] |
| capabilities = normalize_capabilities(batch['capabilities'][i]) |
| time_str = batch['time'][i] |
|
|
| if not all([name, version, time_str]): |
| continue |
|
|
| if isinstance(capabilities, dict): |
| cap_key = json.dumps(capabilities, sort_keys=True) |
| else: |
| cap_key = str(capabilities) if capabilities is not None else None |
|
|
| key = (name, version, cap_key) |
|
|
| if key not in unique_clients or time_str > unique_clients[key]['last_seen']: |
| unique_clients[key] = { |
| 'name': name, |
| 'version': version, |
| 'capabilities': capabilities, |
| 'last_seen': time_str |
| } |
|
|
| elapsed = (datetime.now() - start_time).total_seconds() |
| rate = total_rows / elapsed if elapsed > 0 else 0 |
|
|
| print(f"[{datetime.now().isoformat()}] Processing complete: {total_rows:,} rows", file=sys.stderr) |
| print(f"Rate: {rate:.0f} rows/sec, elapsed: {elapsed:.1f} seconds", file=sys.stderr) |
| print(f"Skipped {skipped_deletes:,} deletes, {skipped_other:,} non-initialize events", file=sys.stderr) |
| print(f"Found {len(unique_clients):,} unique client configurations", file=sys.stderr) |
|
|
| |
| sorted_clients = sorted( |
| unique_clients.values(), |
| key=lambda x: x['last_seen'], |
| reverse=True |
| ) |
|
|
| |
| print(f"[{datetime.now().isoformat()}] Pushing to Hugging Face Hub...", file=sys.stderr) |
| dataset = create_dataset_from_clients(sorted_clients) |
| |
| dataset.push_to_hub( |
| repo_id=args.repo_id, |
| commit_message=f"Update MCP clients dataset ({datetime.now().strftime('%Y-%m-%d %H:%M')})", |
| ) |
|
|
| print(f"[{datetime.now().isoformat()}] Successfully pushed {len(sorted_clients):,} clients to {args.repo_id}", file=sys.stderr) |
|
|
|
|
| if __name__ == '__main__': |
| main() |
|
|