File size: 7,819 Bytes
e965652
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
21ff762
e965652
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
21ff762
 
 
 
 
e965652
 
 
21ff762
e965652
 
 
 
 
 
 
 
 
21ff762
e965652
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
21ff762
e965652
21ff762
 
 
 
 
 
e965652
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
#!/usr/bin/env python3
"""

mcp_fetch.py -- Dispatcher for MCP catalog sources.



Emits one JSON record per line on stdout so the output pipes directly into

``ctx-mcp-add --from-stdin``.  The dispatcher knows nothing about specific

catalog shapes; it resolves a source by name from

:data:`mcp_sources.SOURCES`, calls its ``fetch`` method, and serialises

each yielded dict as JSONL.



Usage

-----

    ctx-mcp-fetch --source awesome-mcp [--limit N] [--refresh]

    ctx-mcp-fetch --source all [--limit N]

    ctx-mcp-fetch --list-sources



Downstream pipe

---------------

    ctx-mcp-fetch --source awesome-mcp --limit 5 | ctx-mcp-add --from-stdin

"""

from __future__ import annotations

import argparse
import json
import sys
from typing import Iterator

from mcp_sources import SOURCES


def _emit(records: Iterator[dict]) -> int:
    """Write *records* as JSONL to stdout.  Return the count emitted."""
    count = 0
    for rec in records:
        # ``separators`` kills the default ", " / ": " whitespace so each
        # line is compact; JSONL consumers treat each line as a standalone
        # document, so trailing whitespace is wasted bytes at batch scale.
        sys.stdout.write(json.dumps(rec, ensure_ascii=False, separators=(",", ":")))
        sys.stdout.write("\n")
        count += 1
    sys.stdout.flush()
    return count


def _run_one(

    source_name: str, *, limit: int | None, refresh: bool, verbosity: int = 0

) -> tuple[int, int]:
    """Fetch from a single named source.  Return ``(emitted, errors)``."""
    try:
        source = SOURCES[source_name]
    except KeyError:
        print(
            f"Error: unknown source {source_name!r}. "
            f"Known: {', '.join(sorted(SOURCES)) or '(none registered)'}",
            file=sys.stderr,
        )
        return 0, 1

    try:
        emitted = _emit(source.fetch(limit=limit, refresh=refresh))
    except Exception as exc:  # noqa: BLE001 — dispatcher must not leak tracebacks to pipes
        print(f"Error: source {source_name!r} failed: {exc}", file=sys.stderr)
        return 0, 1

    if verbosity > 0:
        print(
            f"[{source_name}] emitted {emitted} record(s)",
            file=sys.stderr,
        )
    return emitted, 0


def _run_all(*, limit: int | None, verbosity: int = 0) -> tuple[int, int]:
    """Fetch from every registered source, summing emissions and errors.



    ``--limit`` applies *per source*.  Applying a single global cap would

    bias the output toward whichever source is iterated first, which

    defeats the point of listing them side-by-side.

    """
    total_emitted = 0
    total_errors = 0
    for name in sorted(SOURCES):
        emitted, errors = _run_one(name, limit=limit, refresh=False, verbosity=verbosity)
        total_emitted += emitted
        total_errors += errors
    return total_emitted, total_errors


def _list_sources() -> int:
    """Print each registered source and its homepage to stdout.  Return 0."""
    if not SOURCES:
        print("(no sources registered)")
        return 0
    for name in sorted(SOURCES):
        src = SOURCES[name]
        print(f"{name}\t{src.homepage}")
    return 0


def _build_parser() -> argparse.ArgumentParser:
    parser = argparse.ArgumentParser(
        prog="ctx-mcp-fetch",
        description=(
            "Fetch MCP server records from a registered catalog source "
            "and emit them as JSONL on stdout."
        ),
    )
    group = parser.add_mutually_exclusive_group(required=True)
    group.add_argument(
        "--source",
        metavar="NAME",
        help="Source name (e.g. 'awesome-mcp') or 'all' for every registered source",
    )
    group.add_argument(
        "--list-sources",
        action="store_true",
        help="List registered sources with their homepages and exit",
    )
    parser.add_argument(
        "--limit",
        type=int,
        default=None,
        help="Cap the number of records yielded per source (default: no cap)",
    )
    parser.add_argument(
        "--refresh",
        action="store_true",
        help="Bypass the local raw cache and fetch fresh upstream content",
    )
    parser.add_argument(
        "-v", "--verbose",
        action="count",
        default=0,
        help=(
            "Enable progress logging to stderr. -v for INFO (parse counts, "
            "page progress), -vv for DEBUG (per-entry skip reasons). Library "
            "modules emit via ``logging`` by default but are silent unless "
            "this flag wires up basicConfig."
        ),
    )
    return parser


def _configure_logging(verbosity: int) -> None:
    """Wire logging.basicConfig for CLI visibility.



    Phase 2.5 replaced print(stderr) in library code with logging calls

    which are silent by default. This helper lights them up on demand.

    stderr (not stdout) so JSONL pipe consumers stay clean.

    """
    if verbosity <= 0:
        return
    import logging  # noqa: PLC0415 — local import keeps cold-path cost off imports
    level = logging.DEBUG if verbosity >= 2 else logging.INFO
    logging.basicConfig(
        level=level,
        format="[%(name)s] %(message)s",
        stream=sys.stderr,
    )


def _force_utf8_stdio() -> None:
    """Reconfigure stdout/stderr to UTF-8 on platforms that default to

    something narrower (Windows cp1252).



    The JSONL output uses ``ensure_ascii=False`` so non-ASCII names,

    descriptions, and emoji flow through verbatim — which crashes the

    default Windows console with ``UnicodeEncodeError: 'charmap' codec``

    the moment a record contains a non-Latin-1 character. Real pulsemcp

    records routinely include CJK, emoji, and accented characters, so

    this is guaranteed to fire on any non-trivial run on Windows.



    ``reconfigure`` is a best-effort call: if the stream has already been

    replaced (e.g. in tests that capture stdout) or if the platform's

    stdio doesn't support reconfigure, the original encoding stays.

    """
    for stream in (sys.stdout, sys.stderr):
        reconfigure = getattr(stream, "reconfigure", None)
        if reconfigure is None:
            continue
        try:
            reconfigure(encoding="utf-8", errors="replace")
        except (OSError, ValueError):
            # Closed stream or stream that rejects reconfiguration —
            # not worth failing the run over.
            pass


def main() -> None:
    """Entry point for the ``ctx-mcp-fetch`` console script."""
    _force_utf8_stdio()
    parser = _build_parser()
    args = parser.parse_args()
    _configure_logging(args.verbose)

    if args.list_sources:
        sys.exit(_list_sources())

    if args.limit is not None and args.limit <= 0:
        print("Error: --limit must be a positive integer.", file=sys.stderr)
        sys.exit(2)

    if args.source == "all":
        if args.refresh:
            # --refresh on 'all' would silently re-fetch every source; that is
            # almost never what the operator intends, so we refuse it rather
            # than surprise them with a long network burst.
            print(
                "Error: --refresh is not supported with --source all; "
                "refresh one source at a time.",
                file=sys.stderr,
            )
            sys.exit(2)
        _, errors = _run_all(limit=args.limit, verbosity=args.verbose)
    else:
        _, errors = _run_one(
            args.source,
            limit=args.limit,
            refresh=args.refresh,
            verbosity=args.verbose,
        )

    sys.exit(1 if errors else 0)


if __name__ == "__main__":
    main()