evalstate HF Staff commited on
Commit
0234efd
·
verified ·
1 Parent(s): 9347f81

Delete scripts

Browse files
Files changed (1) hide show
  1. scripts/mcp_clients_job.py +0 -167
scripts/mcp_clients_job.py DELETED
@@ -1,167 +0,0 @@
1
- #!/usr/bin/env -S uv run
2
- # /// script
3
- # requires-python = ">=3.10"
4
- # dependencies = [
5
- # "datasets",
6
- # "huggingface_hub",
7
- # ]
8
- # ///
9
- """
10
- Scheduled job to extract and update MCP clients dataset.
11
- This script runs via HF Jobs to keep the dataset fresh.
12
-
13
- Usage:
14
- uv run mcp_clients_job.py [--limit N]
15
- """
16
-
17
- import sys
18
- import json
19
- import argparse
20
- from datetime import datetime
21
-
22
- from datasets import Dataset, Features, Value
23
-
24
-
25
- def normalize_capabilities(caps):
26
- """Normalize capabilities for comparison."""
27
- if caps is None:
28
- return None
29
- if isinstance(caps, str):
30
- if caps == '{}':
31
- return {}
32
- try:
33
- return json.loads(caps)
34
- except Exception:
35
- return caps
36
- return caps
37
-
38
-
39
- def create_dataset_from_clients(clients_list):
40
- """Create a Hugging Face Dataset from the clients list."""
41
- features = Features({
42
- 'name': Value('string'),
43
- 'version': Value('string'),
44
- 'capabilities': Value('string'), # Store as JSON string for consistency
45
- 'last_seen': Value('string'),
46
- })
47
-
48
- records = []
49
- for client in clients_list:
50
- # Convert capabilities to JSON string for consistency
51
- caps = client['capabilities']
52
- if isinstance(caps, dict):
53
- caps = json.dumps(caps, sort_keys=True)
54
- elif caps is None:
55
- caps = ''
56
- else:
57
- caps = str(caps)
58
-
59
- records.append({
60
- 'name': client['name'],
61
- 'version': client['version'],
62
- 'capabilities': caps,
63
- 'last_seen': client['last_seen'],
64
- })
65
-
66
- return Dataset.from_list(records, features=features)
67
-
68
-
69
- def main():
70
- parser = argparse.ArgumentParser(description="Extract and update MCP clients dataset")
71
- parser.add_argument("--limit", type=int, default=None, help="Limit processing to N rows (for testing)")
72
- parser.add_argument("--repo-id", default="evalstate/mcp-clients", help="Target repository ID")
73
- args = parser.parse_args()
74
-
75
- print(f"[{datetime.now().isoformat()}] Starting MCP clients extraction...", file=sys.stderr)
76
- if args.limit:
77
- print(f"[{datetime.now().isoformat()}] LIMIT MODE: Will process at most {args.limit:,} rows", file=sys.stderr)
78
-
79
- unique_clients = {}
80
-
81
- from datasets import load_dataset
82
- ds = load_dataset('evalstate/hf-mcp-logs', 'sessions', streaming=True)
83
- sessions_ds = ds['sessions']
84
-
85
- total_rows = 0
86
- skipped_deletes = 0
87
- skipped_other = 0
88
- start_time = datetime.now()
89
-
90
- for batch in sessions_ds.iter(batch_size=1000):
91
- batch_len = len(batch['name'])
92
- total_rows += batch_len
93
-
94
- # Progress indicator every 100k rows
95
- if total_rows % 100000 == 0:
96
- elapsed = (datetime.now() - start_time).total_seconds()
97
- rate = total_rows / elapsed if elapsed > 0 else 0
98
- print(f"[{datetime.now().isoformat()}] Progress: {total_rows:,} rows processed "
99
- f"({rate:.0f} rows/sec), {len(unique_clients):,} unique clients found", file=sys.stderr)
100
-
101
- if args.limit and total_rows >= args.limit:
102
- print(f"[{datetime.now().isoformat()}] Reached limit of {args.limit:,} rows", file=sys.stderr)
103
- break
104
-
105
- for i in range(batch_len):
106
- method_name = batch['methodName'][i] if 'methodName' in batch else None
107
-
108
- if method_name == 'session_delete':
109
- skipped_deletes += 1
110
- continue
111
-
112
- if method_name != 'initialize':
113
- skipped_other += 1
114
- continue
115
-
116
- name = batch['name'][i]
117
- version = batch['version'][i]
118
- capabilities = normalize_capabilities(batch['capabilities'][i])
119
- time_str = batch['time'][i]
120
-
121
- if not all([name, version, time_str]):
122
- continue
123
-
124
- if isinstance(capabilities, dict):
125
- cap_key = json.dumps(capabilities, sort_keys=True)
126
- else:
127
- cap_key = str(capabilities) if capabilities is not None else None
128
-
129
- key = (name, version, cap_key)
130
-
131
- if key not in unique_clients or time_str > unique_clients[key]['last_seen']:
132
- unique_clients[key] = {
133
- 'name': name,
134
- 'version': version,
135
- 'capabilities': capabilities,
136
- 'last_seen': time_str
137
- }
138
-
139
- elapsed = (datetime.now() - start_time).total_seconds()
140
- rate = total_rows / elapsed if elapsed > 0 else 0
141
-
142
- print(f"[{datetime.now().isoformat()}] Processing complete: {total_rows:,} rows", file=sys.stderr)
143
- print(f"Rate: {rate:.0f} rows/sec, elapsed: {elapsed:.1f} seconds", file=sys.stderr)
144
- print(f"Skipped {skipped_deletes:,} deletes, {skipped_other:,} non-initialize events", file=sys.stderr)
145
- print(f"Found {len(unique_clients):,} unique client configurations", file=sys.stderr)
146
-
147
- # Sort by last_seen descending
148
- sorted_clients = sorted(
149
- unique_clients.values(),
150
- key=lambda x: x['last_seen'],
151
- reverse=True
152
- )
153
-
154
- # Push to Hub
155
- print(f"[{datetime.now().isoformat()}] Pushing to Hugging Face Hub...", file=sys.stderr)
156
- dataset = create_dataset_from_clients(sorted_clients)
157
-
158
- dataset.push_to_hub(
159
- repo_id=args.repo_id,
160
- commit_message=f"Update MCP clients dataset ({datetime.now().strftime('%Y-%m-%d %H:%M')})",
161
- )
162
-
163
- print(f"[{datetime.now().isoformat()}] Successfully pushed {len(sorted_clients):,} clients to {args.repo_id}", file=sys.stderr)
164
-
165
-
166
- if __name__ == '__main__':
167
- main()