evalstate HF Staff commited on
Commit
6408098
·
verified ·
1 Parent(s): 1f0657a

Update: read from raw split, push to deduplicated split

Browse files
Files changed (1) hide show
  1. deduplicate_clients.py +114 -34
deduplicate_clients.py CHANGED
@@ -18,7 +18,14 @@ It also strips " (via mcp-remote X.Y.Z)" suffixes from client names and reports
18
  the mcp-remote versions encountered.
19
 
20
  Usage:
21
- uv run deduplicate_clients.py [--source-repo REPO] [--target-repo REPO]
 
 
 
 
 
 
 
22
  """
23
 
24
  import sys
@@ -62,16 +69,97 @@ def normalize_capabilities(caps):
62
  return caps
63
 
64
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
65
  def main():
66
- parser = argparse.ArgumentParser(description="Deduplicate MCP clients by name and capabilities")
67
- parser.add_argument("--source-repo", default="evalstate/mcp-clients", help="Source repository ID")
68
- parser.add_argument("--target-repo", default="evalstate/mcp-clients-dedup", help="Target repository ID")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
69
  args = parser.parse_args()
70
 
71
- print(f"[{datetime.now().isoformat()}] Loading dataset from {args.source_repo}...", file=sys.stderr)
72
 
73
- # Load the source dataset
74
- ds = load_dataset(args.source_repo, split='train')
75
 
76
  print(f"[{datetime.now().isoformat()}] Loaded {len(ds):,} client records", file=sys.stderr)
77
 
@@ -89,21 +177,14 @@ def main():
89
  mcp_remote_versions[mcp_version] += 1
90
 
91
  # Normalize capabilities for grouping
92
- caps = row['capabilities']
93
- if isinstance(caps, str):
94
- try:
95
- caps_dict = json.loads(caps)
96
- caps_key = json.dumps(caps_dict, sort_keys=True)
97
- except Exception:
98
- caps_key = caps
99
- else:
100
- caps_key = json.dumps(caps, sort_keys=True) if caps else ''
101
 
102
  key = (clean_name, caps_key)
103
  groups[key].append({
104
  'name': clean_name,
105
  'version': row['version'],
106
- 'capabilities': caps_key,
107
  'last_seen': row['last_seen'],
108
  })
109
 
@@ -134,7 +215,7 @@ def main():
134
  deduplicated.append({
135
  'name': name,
136
  'versions': versions_str,
137
- 'capabilities': caps_key,
138
  'last_seen': latest_record['last_seen'],
139
  })
140
 
@@ -143,25 +224,24 @@ def main():
143
 
144
  print(f"[{datetime.now().isoformat()}] Created {len(deduplicated):,} deduplicated records", file=sys.stderr)
145
 
146
- # Create dataset
147
- features = Features({
148
- 'name': Value('string'),
149
- 'versions': Value('string'),
150
- 'capabilities': Value('string'),
151
- 'last_seen': Value('string'),
152
- })
153
-
154
- dataset = Dataset.from_list(deduplicated, features=features)
155
 
156
- # Push to Hub
157
- print(f"[{datetime.now().isoformat()}] Pushing deduplicated dataset to {args.target_repo}...", file=sys.stderr)
 
 
 
158
 
159
- dataset.push_to_hub(
160
- repo_id=args.target_repo,
161
- commit_message=f"Update deduplicated dataset ({datetime.now().strftime('%Y-%m-%d %H:%M')})",
162
- )
163
 
164
- print(f"[{datetime.now().isoformat()}] Successfully pushed {len(deduplicated):,} deduplicated records to {args.target_repo}", file=sys.stderr)
 
 
165
 
166
 
167
  if __name__ == '__main__':
 
18
  the mcp-remote versions encountered.
19
 
20
  Usage:
21
+ # Push deduplicated data to the same repo (evalstate/mcp-clients) under 'deduplicated' split
22
+ uv run deduplicate_clients.py --push-to-hub
23
+
24
+ # Read from raw, push to dedup split in same repo
25
+ uv run deduplicate_clients.py --source-split raw --target-split deduplicated --push-to-hub
26
+
27
+ # Output to local file
28
+ uv run deduplicate_clients.py -o deduplicated.ndjson
29
  """
30
 
31
  import sys
 
69
  return caps
70
 
71
 
72
+ def capabilities_to_string(caps):
73
+ """Convert capabilities to JSON string for storage."""
74
+ if caps is None:
75
+ return ''
76
+ if isinstance(caps, dict):
77
+ return json.dumps(caps, sort_keys=True)
78
+ return str(caps)
79
+
80
+
81
+ def create_dataset_from_deduplicated(deduplicated_list, features=None):
82
+ """Create a Hugging Face Dataset from the deduplicated list."""
83
+ if features is None:
84
+ features = Features({
85
+ 'name': Value('string'),
86
+ 'versions': Value('string'),
87
+ 'capabilities': Value('string'),
88
+ 'last_seen': Value('string'),
89
+ })
90
+
91
+ records = []
92
+ for client in deduplicated_list:
93
+ records.append({
94
+ 'name': client['name'],
95
+ 'versions': client['versions'],
96
+ 'capabilities': capabilities_to_string(client['capabilities']),
97
+ 'last_seen': client['last_seen'],
98
+ })
99
+
100
+ return Dataset.from_list(records, features=features)
101
+
102
+
103
+ def push_to_hub(deduplicated_list, repo_id, split=None, token=None, private=False):
104
+ """Push the deduplicated dataset to Hugging Face Hub."""
105
+ dataset = create_dataset_from_deduplicated(deduplicated_list)
106
+
107
+ if split:
108
+ print(f"Pushing dataset to https://huggingface.co/datasets/{repo_id} (split: {split})", file=sys.stderr)
109
+ else:
110
+ print(f"Pushing dataset to https://huggingface.co/datasets/{repo_id}", file=sys.stderr)
111
+
112
+ dataset.push_to_hub(
113
+ repo_id=repo_id,
114
+ split=split,
115
+ token=token,
116
+ private=private,
117
+ commit_message=f"Update deduplicated clients ({datetime.now().strftime('%Y-%m-%d %H:%M')})",
118
+ )
119
+
120
+ print(f"Successfully pushed {len(deduplicated_list):,} deduplicated records to {repo_id}", file=sys.stderr)
121
+
122
+
123
  def main():
124
+ parser = argparse.ArgumentParser(
125
+ description="Deduplicate MCP clients by name and capabilities",
126
+ formatter_class=argparse.RawDescriptionHelpFormatter,
127
+ epilog="""
128
+ Examples:
129
+ # Push deduplicated data to same repo under 'deduplicated' split
130
+ %(prog)s --push-to-hub
131
+
132
+ # Use custom source/target splits
133
+ %(prog)s --source-split raw --target-split dedup --push-to-hub
134
+
135
+ # Output to local file
136
+ %(prog)s -o deduplicated.ndjson
137
+
138
+ # Push to a different repo
139
+ %(prog)s --target-repo my-org/mcp-clients-dedup --push-to-hub
140
+ """
141
+ )
142
+ parser.add_argument("-o", "--output", help="Output file path (default: stdout)")
143
+ parser.add_argument("--source-repo", default="evalstate/mcp-clients",
144
+ help="Source repository ID (default: evalstate/mcp-clients)")
145
+ parser.add_argument("--source-split", default="raw",
146
+ help="Source split name (default: raw)")
147
+ parser.add_argument("--target-repo", default="evalstate/mcp-clients",
148
+ help="Target repository ID (default: evalstate/mcp-clients)")
149
+ parser.add_argument("--target-split", default="deduplicated",
150
+ help="Target split name (default: deduplicated)")
151
+ parser.add_argument("--push-to-hub", action="store_true",
152
+ help="Push the resulting dataset to Hugging Face Hub")
153
+ parser.add_argument("--token", default=None,
154
+ help="HF token (defaults to HF_TOKEN env var or cached token)")
155
+ parser.add_argument("--private", action="store_true",
156
+ help="Create/push to a private repository")
157
  args = parser.parse_args()
158
 
159
+ print(f"[{datetime.now().isoformat()}] Loading dataset from {args.source_repo} ({args.source_split} split)...", file=sys.stderr)
160
 
161
+ # Load the source dataset from the specified split
162
+ ds = load_dataset(args.source_repo, split=args.source_split)
163
 
164
  print(f"[{datetime.now().isoformat()}] Loaded {len(ds):,} client records", file=sys.stderr)
165
 
 
177
  mcp_remote_versions[mcp_version] += 1
178
 
179
  # Normalize capabilities for grouping
180
+ caps = normalize_capabilities(row['capabilities'])
181
+ caps_key = capabilities_to_string(caps)
 
 
 
 
 
 
 
182
 
183
  key = (clean_name, caps_key)
184
  groups[key].append({
185
  'name': clean_name,
186
  'version': row['version'],
187
+ 'capabilities': caps,
188
  'last_seen': row['last_seen'],
189
  })
190
 
 
215
  deduplicated.append({
216
  'name': name,
217
  'versions': versions_str,
218
+ 'capabilities': latest_record['capabilities'],
219
  'last_seen': latest_record['last_seen'],
220
  })
221
 
 
224
 
225
  print(f"[{datetime.now().isoformat()}] Created {len(deduplicated):,} deduplicated records", file=sys.stderr)
226
 
227
+ # Handle push to hub
228
+ if args.push_to_hub:
229
+ push_to_hub(deduplicated, args.target_repo, split=args.target_split, token=args.token, private=args.private)
230
+ return
 
 
 
 
 
231
 
232
+ # Handle local output
233
+ if args.output:
234
+ out_file = open(args.output, 'w')
235
+ else:
236
+ out_file = sys.stdout
237
 
238
+ # Output as NDJSON
239
+ for client in deduplicated:
240
+ out_file.write(json.dumps(client) + '\n')
 
241
 
242
+ if args.output:
243
+ out_file.close()
244
+ print(f"Output written to: {args.output}", file=sys.stderr)
245
 
246
 
247
  if __name__ == '__main__':