File size: 2,094 Bytes
d094faf
 
701d9c5
d094faf
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
"""Build env-var dicts (or YAML-config snippets) that point an SDK at the proxy.

Two SDK shapes are covered today; add more here as agents arrive:

    anthropic_env(ep, model)   → Anthropic SDK / Claude Code CLI
    openai_env(ep, model)      → OpenAI SDK / Codex CLI

Plus `openai_yaml_block(ep, model)` for agents whose config files take
`base_url` / `api_key` / `model` fields directly (e.g. MLEvolve).

Usage from any agent runner:

    from agents.cliproxyapi import ProxyEndpoint, anthropic_env
    ep = ProxyEndpoint.from_env()
    subprocess.run(cmd, env={**os.environ, **anthropic_env(ep, model="...")})
"""

from __future__ import annotations

from .endpoint import ProxyEndpoint


def anthropic_env(ep: ProxyEndpoint, model: str | None = None) -> dict[str, str]:
    """Env vars consumed by anthropic-python and claude-code.

    The Anthropic SDK appends `/v1/messages` to ANTHROPIC_BASE_URL itself,
    so we hand it the proxy root (no trailing path).
    """
    env = {
        "ANTHROPIC_BASE_URL": ep.base_url(),
        "ANTHROPIC_API_KEY": ep.api_key,
        "ANTHROPIC_AUTH_TOKEN": ep.api_key,
    }
    if model:
        env["ANTHROPIC_MODEL"] = model
    return env


def openai_env(ep: ProxyEndpoint, model: str | None = None) -> dict[str, str]:
    """Env vars consumed by openai-python, codex-cli, and many compatible SDKs.

    The OpenAI SDK appends `/chat/completions` (and other paths) to
    OPENAI_BASE_URL, so we include the `/v1` prefix here.
    """
    env = {
        "OPENAI_BASE_URL": f"{ep.base_url()}/v1",
        "OPENAI_API_KEY": ep.api_key,
        "OPENAI_API_BASE": f"{ep.base_url()}/v1",  # legacy var, still common
    }
    if model:
        env["OPENAI_MODEL"] = model
    return env


def openai_yaml_block(ep: ProxyEndpoint, model: str) -> dict[str, str]:
    """Three-key dict for configs that name the proxy directly (e.g. MLEvolve).

    Returns:
        {"model": ..., "base_url": ".../v1", "api_key": ...}
    """
    return {
        "model": model,
        "base_url": f"{ep.base_url()}/v1",
        "api_key": ep.api_key,
    }