evalstate HF Staff commited on
Commit
eb0439e
·
verified ·
1 Parent(s): f24a4f7

Upload mcp_remote_daily_job.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. mcp_remote_daily_job.py +6 -12
mcp_remote_daily_job.py CHANGED
@@ -13,28 +13,26 @@ This script analyzes the evalstate/hf-mcp-logs dataset to calculate what proport
13
  of sessions included "mcp-remote" in the client name on a daily basis.
14
 
15
  Usage:
16
- uv run mcp_remote_daily_job.py [--limit N]
17
  """
18
 
19
  import sys
 
20
  import re
21
- import argparse
22
  from datetime import datetime
23
  from collections import defaultdict
24
  from datasets import load_dataset
25
 
26
 
27
  def main():
28
- parser = argparse.ArgumentParser(description="Analyze daily mcp-remote usage")
29
- parser.add_argument("--limit", type=int, default=None, help="Limit processing to N rows (for testing)")
30
- args = parser.parse_args()
31
 
32
  print(f"[{datetime.now().isoformat()}] Loading dataset from evalstate/hf-mcp-logs...", file=sys.stderr)
33
- if args.limit:
34
- print(f"[{datetime.now().isoformat()}] LIMIT MODE: Will process at most {args.limit:,} rows", file=sys.stderr)
35
 
36
  # Load the source dataset (streaming for large datasets)
37
- ds = load_dataset('evalstate/hf-mcp-logs', 'sessions', streaming=True)
 
38
  sessions_ds = ds['sessions']
39
 
40
  # Pattern to detect mcp-remote suffix (any version)
@@ -59,10 +57,6 @@ def main():
59
  print(f"[{datetime.now().isoformat()}] Progress: {total_rows:,} rows processed "
60
  f"({rate:.0f} rows/sec)", file=sys.stderr)
61
 
62
- if args.limit and total_rows >= args.limit:
63
- print(f"[{datetime.now().isoformat()}] Reached limit of {args.limit:,} rows", file=sys.stderr)
64
- break
65
-
66
  for i in range(batch_len):
67
  time_str = batch['time'][i]
68
  name = batch['name'][i]
 
13
  of sessions included "mcp-remote" in the client name on a daily basis.
14
 
15
  Usage:
16
+ uv run mcp_remote_daily_job.py
17
  """
18
 
19
  import sys
20
+ import os
21
  import re
 
22
  from datetime import datetime
23
  from collections import defaultdict
24
  from datasets import load_dataset
25
 
26
 
27
  def main():
28
+ # Get HF_TOKEN from environment (passed as secret in HF Jobs)
29
+ hf_token = os.environ.get("HF_TOKEN")
 
30
 
31
  print(f"[{datetime.now().isoformat()}] Loading dataset from evalstate/hf-mcp-logs...", file=sys.stderr)
 
 
32
 
33
  # Load the source dataset (streaming for large datasets)
34
+ # Pass token explicitly for private dataset access
35
+ ds = load_dataset('evalstate/hf-mcp-logs', 'sessions', streaming=True, token=hf_token)
36
  sessions_ds = ds['sessions']
37
 
38
  # Pattern to detect mcp-remote suffix (any version)
 
57
  print(f"[{datetime.now().isoformat()}] Progress: {total_rows:,} rows processed "
58
  f"({rate:.0f} rows/sec)", file=sys.stderr)
59
 
 
 
 
 
60
  for i in range(batch_len):
61
  time_str = batch['time'][i]
62
  name = batch['name'][i]