ohmyapi commited on
Commit
36d5e18
·
0 Parent(s):

Deploy emergent2api

Browse files
Dockerfile ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM python:3.12-slim
2
+
3
+ WORKDIR /app
4
+
5
+ RUN apt-get update && apt-get install -y --no-install-recommends \
6
+ gcc libpq-dev && \
7
+ rm -rf /var/lib/apt/lists/*
8
+
9
+ COPY requirements.txt requirements.txt
10
+ RUN pip install --no-cache-dir -r requirements.txt
11
+
12
+ COPY . .
13
+
14
+ ENV PORT=7860
15
+ EXPOSE 7860
16
+
17
+ CMD ["python", "-m", "emergent2api.app"]
README.md ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ title: Emergent2API
3
+ emoji: 🤖
4
+ colorFrom: blue
5
+ colorTo: purple
6
+ sdk: docker
7
+ app_port: 7860
8
+ pinned: false
9
+ ---
emergent2api/__init__.py ADDED
File without changes
emergent2api/app.py ADDED
@@ -0,0 +1,185 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Emergent2API — OpenAI/Anthropic-compatible API gateway for Emergent.sh accounts."""
2
+ from __future__ import annotations
3
+
4
+ import logging
5
+ import os
6
+
7
+ from fastapi import FastAPI, Request
8
+ from fastapi.middleware.cors import CORSMiddleware
9
+ from fastapi.responses import JSONResponse
10
+
11
+ from .config import settings
12
+ from . import database as db
13
+ from .pool import pool
14
+ from .routes import openai, anthropic, responses
15
+
16
+ logging.basicConfig(
17
+ level=logging.INFO,
18
+ format="%(asctime)s [%(name)s] %(levelname)s %(message)s",
19
+ )
20
+ logger = logging.getLogger("emergent2api")
21
+
22
+ app = FastAPI(
23
+ title="Emergent2API",
24
+ description="OpenAI/Anthropic-compatible API backed by Emergent.sh account pool",
25
+ version="0.1.0",
26
+ )
27
+
28
+ app.add_middleware(
29
+ CORSMiddleware,
30
+ allow_origins=["*"],
31
+ allow_credentials=True,
32
+ allow_methods=["*"],
33
+ allow_headers=["*"],
34
+ )
35
+
36
+
37
+ # ---------------------------------------------------------------------------
38
+ # Auth middleware
39
+ # ---------------------------------------------------------------------------
40
+
41
+ @app.middleware("http")
42
+ async def auth_middleware(request: Request, call_next):
43
+ path = request.url.path
44
+
45
+ # Skip auth for health check and docs
46
+ if path in ("/", "/health", "/docs", "/openapi.json", "/redoc"):
47
+ return await call_next(request)
48
+
49
+ # Check API key
50
+ auth_header = request.headers.get("authorization", "")
51
+ api_key = request.headers.get("x-api-key", "")
52
+
53
+ token = ""
54
+ if auth_header.startswith("Bearer "):
55
+ token = auth_header[7:]
56
+ elif api_key:
57
+ token = api_key
58
+
59
+ if token != settings.api_key:
60
+ return JSONResponse(
61
+ status_code=401,
62
+ content={"error": {"message": "Invalid API key", "type": "authentication_error"}},
63
+ )
64
+
65
+ return await call_next(request)
66
+
67
+
68
+ # ---------------------------------------------------------------------------
69
+ # Lifecycle
70
+ # ---------------------------------------------------------------------------
71
+
72
+ @app.on_event("startup")
73
+ async def startup():
74
+ logger.info("Starting Emergent2API...")
75
+ logger.info(f"Backend: {settings.backend}")
76
+ logger.info(f"Proxy: {settings.proxy or 'none'}")
77
+
78
+ await db.init_db()
79
+ count = await pool.count()
80
+ logger.info(f"Account pool: {count['active']} active / {count['total']} total")
81
+
82
+
83
+ @app.on_event("shutdown")
84
+ async def shutdown():
85
+ await db.close_db()
86
+ logger.info("Emergent2API stopped")
87
+
88
+
89
+ # ---------------------------------------------------------------------------
90
+ # Routes
91
+ # ---------------------------------------------------------------------------
92
+
93
+ app.include_router(openai.router)
94
+ app.include_router(anthropic.router)
95
+ app.include_router(responses.router)
96
+
97
+
98
+ @app.get("/")
99
+ async def root():
100
+ count = await pool.count()
101
+ return {
102
+ "service": "Emergent2API",
103
+ "version": "0.1.0",
104
+ "backend": settings.backend,
105
+ "accounts": count,
106
+ "endpoints": [
107
+ "POST /v1/chat/completions (OpenAI)",
108
+ "POST /v1/messages (Anthropic)",
109
+ "POST /v1/responses (OpenAI Response API)",
110
+ "GET /v1/models",
111
+ "GET /admin/accounts",
112
+ "POST /admin/accounts",
113
+ ],
114
+ }
115
+
116
+
117
+ @app.get("/health")
118
+ async def health():
119
+ count = await pool.count()
120
+ return {"status": "ok", "accounts": count}
121
+
122
+
123
+ # ---------------------------------------------------------------------------
124
+ # Admin endpoints
125
+ # ---------------------------------------------------------------------------
126
+
127
+ @app.get("/admin/accounts")
128
+ async def admin_list_accounts():
129
+ accounts = await db.get_active_accounts()
130
+ return {
131
+ "total": len(accounts),
132
+ "accounts": [
133
+ {
134
+ "id": a["id"],
135
+ "email": a["email"],
136
+ "balance": a["balance"],
137
+ "is_active": a["is_active"],
138
+ "last_used": a["last_used"].isoformat() if a["last_used"] else None,
139
+ "created_at": a["created_at"].isoformat() if a["created_at"] else None,
140
+ }
141
+ for a in accounts
142
+ ],
143
+ }
144
+
145
+
146
+ @app.post("/admin/accounts")
147
+ async def admin_add_account(request: Request):
148
+ """Add an account manually. Body: {email, password, jwt, refresh_token?, user_id?, balance?}"""
149
+ body = await request.json()
150
+ required = ["email", "password", "jwt"]
151
+ for field in required:
152
+ if field not in body:
153
+ return JSONResponse(
154
+ status_code=400,
155
+ content={"error": f"Missing required field: {field}"},
156
+ )
157
+ account_id = await db.upsert_account(body)
158
+ return {"id": account_id, "email": body["email"], "status": "added"}
159
+
160
+
161
+ @app.post("/admin/accounts/import")
162
+ async def admin_import_accounts(request: Request):
163
+ """Import accounts from JSONL. Body: {accounts: [{email, password, jwt, ...}, ...]}"""
164
+ body = await request.json()
165
+ accounts = body.get("accounts", [])
166
+ imported = 0
167
+ for acct in accounts:
168
+ if "email" in acct and "password" in acct and "jwt" in acct:
169
+ await db.upsert_account(acct)
170
+ imported += 1
171
+ return {"imported": imported, "total_submitted": len(accounts)}
172
+
173
+
174
+ # ---------------------------------------------------------------------------
175
+ # Entrypoint
176
+ # ---------------------------------------------------------------------------
177
+
178
+ if __name__ == "__main__":
179
+ import uvicorn
180
+ uvicorn.run(
181
+ "emergent2api.app:app",
182
+ host="0.0.0.0",
183
+ port=settings.port,
184
+ reload=False,
185
+ )
emergent2api/backends/__init__.py ADDED
File without changes
emergent2api/backends/base.py ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Abstract base for Emergent API backends."""
2
+ from __future__ import annotations
3
+
4
+ from abc import ABC, abstractmethod
5
+ from typing import Any, AsyncGenerator, Optional
6
+
7
+
8
+ class EmergentBackend(ABC):
9
+ @abstractmethod
10
+ async def chat(
11
+ self,
12
+ messages: list[dict[str, Any]],
13
+ model: str,
14
+ thinking: bool,
15
+ account: dict[str, Any],
16
+ stream: bool = False,
17
+ ) -> AsyncGenerator[dict[str, Any], None]:
18
+ """Yield response chunks. Each chunk is {"type": ..., "content": ...}.
19
+
20
+ Chunk types:
21
+ - "thinking": reasoning content (extended thinking)
22
+ - "text": main response text
23
+ - "done": final signal with full text
24
+ - "error": error message
25
+ """
26
+ ...
emergent2api/backends/integrations.py ADDED
@@ -0,0 +1,100 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Integrations API backend: proxy to integrations.emergentagent.com/llm."""
2
+ from __future__ import annotations
3
+
4
+ import asyncio
5
+ import json
6
+ import logging
7
+ from typing import Any, AsyncGenerator
8
+
9
+ import httpx
10
+
11
+ from ..config import settings
12
+ from .base import EmergentBackend
13
+
14
+ logger = logging.getLogger("emergent2api.integrations")
15
+
16
+
17
+ class IntegrationsBackend(EmergentBackend):
18
+ """Proxy to Emergent's OpenAI-compatible integrations endpoint.
19
+
20
+ Requires a US-based proxy since the endpoint has IP restrictions.
21
+ """
22
+
23
+ async def chat(
24
+ self,
25
+ messages: list[dict[str, Any]],
26
+ model: str,
27
+ thinking: bool,
28
+ account: dict[str, Any],
29
+ stream: bool = False,
30
+ ) -> AsyncGenerator[dict[str, Any], None]:
31
+ jwt = account["jwt"]
32
+ url = f"{settings.integrations_base}/v1/chat/completions"
33
+
34
+ body = {
35
+ "model": model,
36
+ "messages": messages,
37
+ "stream": stream,
38
+ }
39
+
40
+ proxy = settings.proxy or None
41
+ transport = httpx.AsyncHTTPTransport(proxy=proxy) if proxy else None
42
+
43
+ try:
44
+ async with httpx.AsyncClient(
45
+ transport=transport,
46
+ timeout=httpx.Timeout(120.0, connect=15.0),
47
+ ) as client:
48
+ if stream:
49
+ async with client.stream(
50
+ "POST",
51
+ url,
52
+ json=body,
53
+ headers={
54
+ "Authorization": f"Bearer {jwt}",
55
+ "Content-Type": "application/json",
56
+ },
57
+ ) as resp:
58
+ if resp.status_code != 200:
59
+ err = await resp.aread()
60
+ yield {"type": "error", "content": f"Integrations API: HTTP {resp.status_code} {err.decode()[:200]}"}
61
+ return
62
+
63
+ full_text = ""
64
+ async for line in resp.aiter_lines():
65
+ if not line.startswith("data: "):
66
+ continue
67
+ data_str = line[6:].strip()
68
+ if data_str == "[DONE]":
69
+ break
70
+ try:
71
+ chunk = json.loads(data_str)
72
+ delta = chunk.get("choices", [{}])[0].get("delta", {})
73
+ content = delta.get("content", "")
74
+ if content:
75
+ full_text += content
76
+ yield {"type": "text", "content": content}
77
+ except json.JSONDecodeError:
78
+ continue
79
+
80
+ yield {"type": "done", "content": full_text, "thinking": ""}
81
+ else:
82
+ resp = await client.post(
83
+ url,
84
+ json=body,
85
+ headers={
86
+ "Authorization": f"Bearer {jwt}",
87
+ "Content-Type": "application/json",
88
+ },
89
+ )
90
+ if resp.status_code != 200:
91
+ yield {"type": "error", "content": f"Integrations API: HTTP {resp.status_code} {resp.text[:200]}"}
92
+ return
93
+
94
+ data = resp.json()
95
+ content = data.get("choices", [{}])[0].get("message", {}).get("content", "")
96
+ yield {"type": "done", "content": content, "thinking": ""}
97
+
98
+ except Exception as e:
99
+ logger.error(f"Integrations backend error: {e}")
100
+ yield {"type": "error", "content": str(e)}
emergent2api/backends/jobs.py ADDED
@@ -0,0 +1,218 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Jobs API backend: submit task + poll trajectory for responses."""
2
+ from __future__ import annotations
3
+
4
+ import asyncio
5
+ import json
6
+ import logging
7
+ import time
8
+ import uuid
9
+ from typing import Any, AsyncGenerator
10
+
11
+ from curl_cffi import requests as cffi_requests
12
+
13
+ from ..config import settings
14
+ from .base import EmergentBackend
15
+
16
+ logger = logging.getLogger("emergent2api.jobs")
17
+
18
+
19
+ class JobsBackend(EmergentBackend):
20
+ """Wraps Emergent's Jobs API (submit-queue + trajectory polling) into a chat interface."""
21
+
22
+ async def chat(
23
+ self,
24
+ messages: list[dict[str, Any]],
25
+ model: str,
26
+ thinking: bool,
27
+ account: dict[str, Any],
28
+ stream: bool = False,
29
+ ) -> AsyncGenerator[dict[str, Any], None]:
30
+ jwt = account["jwt"]
31
+
32
+ # Flatten messages into a single task prompt
33
+ task = self._messages_to_task(messages)
34
+
35
+ # Submit job
36
+ ref_id = await asyncio.to_thread(
37
+ self._submit_job, jwt, task, model, thinking
38
+ )
39
+ if not ref_id:
40
+ yield {"type": "error", "content": "Failed to submit job to Emergent"}
41
+ return
42
+
43
+ logger.info(f"Job submitted: ref_id={ref_id}, model={model}")
44
+
45
+ # Poll trajectory for results
46
+ full_text = ""
47
+ full_thinking = ""
48
+ seen_items: set[str] = set()
49
+ start_time = time.time()
50
+ empty_polls = 0
51
+
52
+ while (time.time() - start_time) < settings.poll_timeout:
53
+ await asyncio.sleep(settings.poll_interval)
54
+
55
+ result = await asyncio.to_thread(self._poll_trajectory, jwt, ref_id)
56
+ if result is None:
57
+ empty_polls += 1
58
+ if empty_polls > 5:
59
+ continue
60
+ continue
61
+
62
+ data_items = result.get("data", [])
63
+ job_status = result.get("status", "")
64
+
65
+ new_content = False
66
+ for item in data_items:
67
+ item_id = str(item.get("id", item.get("request_id", "")))
68
+ if item_id in seen_items:
69
+ continue
70
+ seen_items.add(item_id)
71
+
72
+ payload = item.get("traj_payload", {})
73
+ reasoning = payload.get("reasoning_content", "")
74
+ thought = payload.get("thought", "")
75
+
76
+ if reasoning and reasoning != full_thinking:
77
+ delta = reasoning[len(full_thinking):]
78
+ if delta:
79
+ full_thinking = reasoning
80
+ new_content = True
81
+ if stream:
82
+ yield {"type": "thinking", "content": delta}
83
+
84
+ if thought and thought != full_text:
85
+ delta = thought[len(full_text):]
86
+ if delta:
87
+ full_text = thought
88
+ new_content = True
89
+ if stream:
90
+ yield {"type": "text", "content": delta}
91
+
92
+ # Check for completion
93
+ if job_status in ("completed", "failed", "cancelled"):
94
+ break
95
+ if not new_content and data_items:
96
+ last_payload = data_items[-1].get("traj_payload", {})
97
+ if last_payload.get("status") in ("completed", "done"):
98
+ break
99
+ final_thought = last_payload.get("thought", "")
100
+ if final_thought and final_thought == full_text and len(seen_items) > 1:
101
+ # No new content after seeing items — likely done
102
+ empty_polls += 1
103
+ if empty_polls > 8:
104
+ break
105
+
106
+ yield {
107
+ "type": "done",
108
+ "content": full_text,
109
+ "thinking": full_thinking,
110
+ }
111
+
112
+ def _submit_job(
113
+ self,
114
+ jwt: str,
115
+ task: str,
116
+ model_name: str,
117
+ thinking: bool,
118
+ ) -> str | None:
119
+ """Submit a job to Emergent's queue. Returns ref_id or None."""
120
+ s = self._make_session()
121
+ ref_id = str(uuid.uuid4())
122
+
123
+ conv_body = {
124
+ "client_ref_id": ref_id,
125
+ "payload": {
126
+ "processor_type": "env_only",
127
+ "is_cloud": True,
128
+ "env_image": "us-central1-docker.pkg.dev/emergent-default/emergent-container-hub/fastapi_react_mongo_shadcn_base_image_cloud_arm:release-26092025-2",
129
+ "branch": "",
130
+ "repository": "",
131
+ "enable_visual_edit": True,
132
+ "prompt_name": "auto_prompt_selector",
133
+ "prompt_version": "latest",
134
+ "work_space_dir": "",
135
+ "task": task,
136
+ "model_name": model_name,
137
+ "model_manually_selected": True,
138
+ "per_instance_cost_limit": 25,
139
+ "agentic_skills": [],
140
+ "plugin_version": "release-10092025-1",
141
+ "base64_image_list": [],
142
+ "human_timestamp": int(time.time() * 1000),
143
+ "asset_upload_enabled": True,
144
+ "is_pro_user": False,
145
+ "testMode": False,
146
+ "thinking_level": "thinking" if thinking else "none",
147
+ "job_mode": "public",
148
+ "mcp_id": [],
149
+ },
150
+ "model_name": model_name,
151
+ "resume": False,
152
+ "ads_metadata": {"app_version": settings.app_version},
153
+ }
154
+
155
+ try:
156
+ resp = s.post(
157
+ f"{settings.api_base}/jobs/v0/submit-queue/",
158
+ headers={
159
+ "Authorization": f"Bearer {jwt}",
160
+ "Origin": "https://app.emergent.sh",
161
+ "Referer": "https://app.emergent.sh/",
162
+ },
163
+ json=conv_body,
164
+ timeout=30,
165
+ )
166
+ if resp.status_code in (200, 201, 202):
167
+ return ref_id
168
+ logger.error(f"Submit failed: HTTP {resp.status_code} {resp.text[:200]}")
169
+ return None
170
+ except Exception as e:
171
+ logger.error(f"Submit error: {e}")
172
+ return None
173
+
174
+ def _poll_trajectory(self, jwt: str, ref_id: str) -> dict | None:
175
+ """Poll trajectory history for a given ref_id."""
176
+ s = self._make_session()
177
+ try:
178
+ resp = s.get(
179
+ f"{settings.api_base}/trajectories/v0/{ref_id}/history?limit=50",
180
+ headers={"Authorization": f"Bearer {jwt}"},
181
+ timeout=15,
182
+ )
183
+ if resp.status_code == 200:
184
+ return resp.json()
185
+ return None
186
+ except Exception:
187
+ return None
188
+
189
+ def _make_session(self) -> cffi_requests.Session:
190
+ s = cffi_requests.Session(impersonate="chrome")
191
+ if settings.proxy:
192
+ s.proxies = {"http": settings.proxy, "https": settings.proxy}
193
+ return s
194
+
195
+ @staticmethod
196
+ def _messages_to_task(messages: list[dict[str, Any]]) -> str:
197
+ """Convert chat messages into a single task string for Emergent."""
198
+ parts = []
199
+ for msg in messages:
200
+ role = msg.get("role", "user")
201
+ content = msg.get("content", "")
202
+ if isinstance(content, list):
203
+ text_parts = []
204
+ for item in content:
205
+ if isinstance(item, dict) and item.get("type") == "text":
206
+ text_parts.append(item.get("text", ""))
207
+ elif isinstance(item, str):
208
+ text_parts.append(item)
209
+ content = "\n".join(text_parts)
210
+
211
+ if role == "system":
212
+ parts.append(f"[System Instructions]\n{content}")
213
+ elif role == "assistant":
214
+ parts.append(f"[Previous Assistant Response]\n{content}")
215
+ else:
216
+ parts.append(content)
217
+
218
+ return "\n\n".join(parts)
emergent2api/config.py ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Configuration for emergent2api service."""
2
+ from __future__ import annotations
3
+
4
+ import os
5
+ from dataclasses import dataclass, field
6
+
7
+
8
+ @dataclass
9
+ class Settings:
10
+ api_key: str = field(default_factory=lambda: os.environ.get("EMERGENT2API_KEY", "sk-emergent2api"))
11
+ backend: str = field(default_factory=lambda: os.environ.get("EMERGENT_BACKEND", "jobs"))
12
+ proxy: str = field(default_factory=lambda: os.environ.get("EMERGENT_PROXY", ""))
13
+ poll_interval: float = field(default_factory=lambda: float(os.environ.get("EMERGENT_POLL_INTERVAL", "0.8")))
14
+ poll_timeout: float = field(default_factory=lambda: float(os.environ.get("EMERGENT_POLL_TIMEOUT", "120")))
15
+ database_url: str = field(default_factory=lambda: os.environ.get(
16
+ "DATABASE_URL",
17
+ "postgresql://neondb_owner:npg_cyGIuK58kePT@ep-muddy-sky-adych24h-pooler.c-2.us-east-1.aws.neon.tech/neondb?sslmode=require",
18
+ ))
19
+ port: int = field(default_factory=lambda: int(os.environ.get("PORT", "8000")))
20
+
21
+ # Supabase constants
22
+ supabase_anon_key: str = (
23
+ "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9."
24
+ "eyJpc3MiOiJzdXBhYmFzZSIsInJlZiI6InNua3N4d2t5dW1oZHlreXJoaGNoIiwi"
25
+ "cm9sZSI6ImFub24iLCJpYXQiOjE3MjQ3NzI2NDYsImV4cCI6MjA0MDM0ODY0Nn0."
26
+ "3unO6zdz2NilPL2xdxt7OjvZA19copj3Q7ulIjPVDLQ"
27
+ )
28
+ auth_base: str = "https://auth.emergent.sh/auth/v1"
29
+ api_base: str = "https://api.emergent.sh"
30
+ integrations_base: str = "https://integrations.emergentagent.com/llm"
31
+ app_version: str = "1.1.28"
32
+
33
+
34
+ settings = Settings()
emergent2api/database.py ADDED
@@ -0,0 +1,126 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """PostgreSQL database layer for account pool management."""
2
+ from __future__ import annotations
3
+
4
+ import asyncio
5
+ import json
6
+ import time
7
+ from datetime import datetime, timezone
8
+ from typing import Any, Optional
9
+
10
+ import asyncpg
11
+
12
+ from .config import settings
13
+
14
+ _pool: Optional[asyncpg.Pool] = None
15
+
16
+ SCHEMA_SQL = """
17
+ CREATE TABLE IF NOT EXISTS emergent_accounts (
18
+ id SERIAL PRIMARY KEY,
19
+ email TEXT NOT NULL UNIQUE,
20
+ password TEXT NOT NULL,
21
+ jwt TEXT NOT NULL,
22
+ refresh_token TEXT DEFAULT '',
23
+ user_id TEXT DEFAULT '',
24
+ balance REAL DEFAULT 0,
25
+ is_active BOOLEAN DEFAULT TRUE,
26
+ last_used TIMESTAMPTZ,
27
+ created_at TIMESTAMPTZ DEFAULT NOW(),
28
+ updated_at TIMESTAMPTZ DEFAULT NOW()
29
+ );
30
+
31
+ CREATE INDEX IF NOT EXISTS idx_emergent_active ON emergent_accounts (is_active) WHERE is_active = TRUE;
32
+ """
33
+
34
+
35
+ async def get_pool() -> asyncpg.Pool:
36
+ global _pool
37
+ if _pool is None:
38
+ _pool = await asyncpg.create_pool(
39
+ settings.database_url,
40
+ min_size=2,
41
+ max_size=10,
42
+ statement_cache_size=0,
43
+ )
44
+ return _pool
45
+
46
+
47
+ async def init_db() -> None:
48
+ pool = await get_pool()
49
+ async with pool.acquire() as conn:
50
+ await conn.execute(SCHEMA_SQL)
51
+
52
+
53
+ async def close_db() -> None:
54
+ global _pool
55
+ if _pool:
56
+ await _pool.close()
57
+ _pool = None
58
+
59
+
60
+ async def upsert_account(account: dict[str, Any]) -> int:
61
+ pool = await get_pool()
62
+ async with pool.acquire() as conn:
63
+ row = await conn.fetchrow(
64
+ """
65
+ INSERT INTO emergent_accounts (email, password, jwt, refresh_token, user_id, balance)
66
+ VALUES ($1, $2, $3, $4, $5, $6)
67
+ ON CONFLICT (email) DO UPDATE SET
68
+ jwt = EXCLUDED.jwt,
69
+ refresh_token = EXCLUDED.refresh_token,
70
+ balance = EXCLUDED.balance,
71
+ is_active = TRUE,
72
+ updated_at = NOW()
73
+ RETURNING id
74
+ """,
75
+ account["email"],
76
+ account["password"],
77
+ account["jwt"],
78
+ account.get("refresh_token", ""),
79
+ account.get("user_id", ""),
80
+ account.get("balance", 0),
81
+ )
82
+ return row["id"]
83
+
84
+
85
+ async def get_active_accounts() -> list[dict[str, Any]]:
86
+ pool = await get_pool()
87
+ async with pool.acquire() as conn:
88
+ rows = await conn.fetch(
89
+ "SELECT * FROM emergent_accounts WHERE is_active = TRUE ORDER BY last_used ASC NULLS FIRST"
90
+ )
91
+ return [dict(r) for r in rows]
92
+
93
+
94
+ async def mark_used(account_id: int) -> None:
95
+ pool = await get_pool()
96
+ async with pool.acquire() as conn:
97
+ await conn.execute(
98
+ "UPDATE emergent_accounts SET last_used = NOW() WHERE id = $1",
99
+ account_id,
100
+ )
101
+
102
+
103
+ async def deactivate_account(account_id: int) -> None:
104
+ pool = await get_pool()
105
+ async with pool.acquire() as conn:
106
+ await conn.execute(
107
+ "UPDATE emergent_accounts SET is_active = FALSE, updated_at = NOW() WHERE id = $1",
108
+ account_id,
109
+ )
110
+
111
+
112
+ async def update_jwt(account_id: int, jwt: str, refresh_token: str = "") -> None:
113
+ pool = await get_pool()
114
+ async with pool.acquire() as conn:
115
+ await conn.execute(
116
+ "UPDATE emergent_accounts SET jwt = $2, refresh_token = $3, updated_at = NOW() WHERE id = $1",
117
+ account_id, jwt, refresh_token,
118
+ )
119
+
120
+
121
+ async def get_account_count() -> dict[str, int]:
122
+ pool = await get_pool()
123
+ async with pool.acquire() as conn:
124
+ total = await conn.fetchval("SELECT COUNT(*) FROM emergent_accounts")
125
+ active = await conn.fetchval("SELECT COUNT(*) FROM emergent_accounts WHERE is_active = TRUE")
126
+ return {"total": total, "active": active}
emergent2api/models.py ADDED
@@ -0,0 +1,161 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Pydantic models for OpenAI, Anthropic, and Response API formats."""
2
+ from __future__ import annotations
3
+
4
+ import time
5
+ import uuid
6
+ from typing import Any, Optional, Union
7
+
8
+ from pydantic import BaseModel, Field
9
+
10
+ # ---------------------------------------------------------------------------
11
+ # Model mapping: user-facing name -> Emergent internal name
12
+ # ---------------------------------------------------------------------------
13
+
14
+ MODEL_MAP = {
15
+ "claude-opus-4-6": "claude-opus-4-6",
16
+ "claude-opus-4.6": "claude-opus-4-6",
17
+ "claude-sonnet-4-5": "claude-sonnet-4-5",
18
+ "claude-sonnet-4.5": "claude-sonnet-4-5",
19
+ "claude-sonnet-4-5-thinking": "claude-sonnet-4-5",
20
+ "claude-sonnet-4.5-thinking": "claude-sonnet-4-5",
21
+ }
22
+
23
+ AVAILABLE_MODELS = [
24
+ {"id": "claude-opus-4-6", "object": "model", "created": 1709251200, "owned_by": "emergent"},
25
+ {"id": "claude-sonnet-4-5", "object": "model", "created": 1709251200, "owned_by": "emergent"},
26
+ {"id": "claude-sonnet-4-5-thinking", "object": "model", "created": 1709251200, "owned_by": "emergent"},
27
+ ]
28
+
29
+
30
+ def resolve_model(model: str) -> tuple[str, bool]:
31
+ """Resolve user model name to (emergent_model, thinking_enabled)."""
32
+ thinking = "thinking" in model.lower()
33
+ emergent_model = MODEL_MAP.get(model, model)
34
+ if emergent_model not in ("claude-opus-4-6", "claude-sonnet-4-5"):
35
+ emergent_model = "claude-sonnet-4-5"
36
+ return emergent_model, thinking
37
+
38
+
39
+ # ---------------------------------------------------------------------------
40
+ # OpenAI Chat Completions
41
+ # ---------------------------------------------------------------------------
42
+
43
+ class OAIMessage(BaseModel):
44
+ role: str
45
+ content: Union[str, list[Any]]
46
+ name: Optional[str] = None
47
+
48
+
49
+ class OAIChatRequest(BaseModel):
50
+ model: str = "claude-sonnet-4-5"
51
+ messages: list[OAIMessage]
52
+ stream: bool = False
53
+ temperature: Optional[float] = None
54
+ max_tokens: Optional[int] = None
55
+ top_p: Optional[float] = None
56
+ user: Optional[str] = None
57
+
58
+
59
+ class OAIChatChoice(BaseModel):
60
+ index: int = 0
61
+ message: dict[str, Any]
62
+ finish_reason: Optional[str] = "stop"
63
+
64
+
65
+ class OAIChatResponse(BaseModel):
66
+ id: str = Field(default_factory=lambda: f"chatcmpl-{uuid.uuid4().hex[:24]}")
67
+ object: str = "chat.completion"
68
+ created: int = Field(default_factory=lambda: int(time.time()))
69
+ model: str = ""
70
+ choices: list[OAIChatChoice] = []
71
+ usage: dict[str, int] = Field(default_factory=lambda: {"prompt_tokens": 0, "completion_tokens": 0, "total_tokens": 0})
72
+
73
+
74
+ class OAIStreamDelta(BaseModel):
75
+ role: Optional[str] = None
76
+ content: Optional[str] = None
77
+
78
+
79
+ class OAIStreamChoice(BaseModel):
80
+ index: int = 0
81
+ delta: OAIStreamDelta
82
+ finish_reason: Optional[str] = None
83
+
84
+
85
+ class OAIStreamChunk(BaseModel):
86
+ id: str = ""
87
+ object: str = "chat.completion.chunk"
88
+ created: int = 0
89
+ model: str = ""
90
+ choices: list[OAIStreamChoice] = []
91
+
92
+
93
+ # ---------------------------------------------------------------------------
94
+ # Anthropic Messages API
95
+ # ---------------------------------------------------------------------------
96
+
97
+ class AnthropicMessage(BaseModel):
98
+ role: str
99
+ content: Union[str, list[Any]]
100
+
101
+
102
+ class AnthropicRequest(BaseModel):
103
+ model: str = "claude-sonnet-4-5"
104
+ messages: list[AnthropicMessage]
105
+ max_tokens: int = 4096
106
+ stream: bool = False
107
+ temperature: Optional[float] = None
108
+ top_p: Optional[float] = None
109
+ system: Optional[str] = None
110
+
111
+
112
+ class AnthropicContentBlock(BaseModel):
113
+ type: str = "text"
114
+ text: str = ""
115
+
116
+
117
+ class AnthropicResponse(BaseModel):
118
+ id: str = Field(default_factory=lambda: f"msg_{uuid.uuid4().hex[:24]}")
119
+ type: str = "message"
120
+ role: str = "assistant"
121
+ content: list[AnthropicContentBlock] = []
122
+ model: str = ""
123
+ stop_reason: Optional[str] = "end_turn"
124
+ stop_sequence: Optional[str] = None
125
+ usage: dict[str, int] = Field(default_factory=lambda: {"input_tokens": 0, "output_tokens": 0})
126
+
127
+
128
+ # ---------------------------------------------------------------------------
129
+ # OpenAI Response API
130
+ # ---------------------------------------------------------------------------
131
+
132
+ class ResponseAPIInput(BaseModel):
133
+ role: str = "user"
134
+ content: Union[str, list[Any]] = ""
135
+
136
+
137
+ class ResponseAPIRequest(BaseModel):
138
+ model: str = "claude-sonnet-4-5"
139
+ input: Union[str, list[ResponseAPIInput]] = ""
140
+ stream: bool = False
141
+ instructions: Optional[str] = None
142
+ max_output_tokens: Optional[int] = None
143
+ temperature: Optional[float] = None
144
+
145
+
146
+ class ResponseAPIOutput(BaseModel):
147
+ type: str = "message"
148
+ id: str = Field(default_factory=lambda: f"msg_{uuid.uuid4().hex[:16]}")
149
+ status: str = "completed"
150
+ role: str = "assistant"
151
+ content: list[dict[str, Any]] = Field(default_factory=list)
152
+
153
+
154
+ class ResponseAPIResponse(BaseModel):
155
+ id: str = Field(default_factory=lambda: f"resp_{uuid.uuid4().hex[:24]}")
156
+ object: str = "response"
157
+ created_at: int = Field(default_factory=lambda: int(time.time()))
158
+ status: str = "completed"
159
+ model: str = ""
160
+ output: list[ResponseAPIOutput] = []
161
+ usage: dict[str, int] = Field(default_factory=lambda: {"input_tokens": 0, "output_tokens": 0, "total_tokens": 0})
emergent2api/pool.py ADDED
@@ -0,0 +1,118 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Account pool manager with round-robin selection and JWT refresh."""
2
+ from __future__ import annotations
3
+
4
+ import asyncio
5
+ import json
6
+ import logging
7
+ import time
8
+ from typing import Any, Optional
9
+
10
+ from curl_cffi import requests as cffi_requests
11
+
12
+ from .config import settings
13
+ from . import database as db
14
+
15
+ logger = logging.getLogger("emergent2api.pool")
16
+
17
+
18
+ class AccountPool:
19
+ """Thread-safe round-robin account pool backed by PostgreSQL."""
20
+
21
+ def __init__(self):
22
+ self._index = 0
23
+ self._lock = asyncio.Lock()
24
+ self._cache: list[dict[str, Any]] = []
25
+ self._cache_ts: float = 0
26
+ self._cache_ttl: float = 30 # refresh cache every 30s
27
+
28
+ async def _refresh_cache(self, force: bool = False) -> None:
29
+ now = time.time()
30
+ if not force and self._cache and (now - self._cache_ts) < self._cache_ttl:
31
+ return
32
+ self._cache = await db.get_active_accounts()
33
+ self._cache_ts = now
34
+ logger.info(f"Pool cache refreshed: {len(self._cache)} active accounts")
35
+
36
+ async def acquire(self) -> Optional[dict[str, Any]]:
37
+ """Get next account via round-robin. Returns account dict or None."""
38
+ async with self._lock:
39
+ await self._refresh_cache()
40
+ if not self._cache:
41
+ return None
42
+ account = self._cache[self._index % len(self._cache)]
43
+ self._index = (self._index + 1) % len(self._cache)
44
+ await db.mark_used(account["id"])
45
+ return account
46
+
47
+ async def release_bad(self, account_id: int) -> None:
48
+ """Mark an account as inactive after auth failure."""
49
+ await db.deactivate_account(account_id)
50
+ async with self._lock:
51
+ await self._refresh_cache(force=True)
52
+ logger.warning(f"Account {account_id} deactivated")
53
+
54
+ async def refresh_jwt(self, account: dict[str, Any]) -> Optional[str]:
55
+ """Re-login to get a fresh JWT using stored email/password."""
56
+ try:
57
+ s = cffi_requests.Session(impersonate="chrome")
58
+ if settings.proxy:
59
+ s.proxies = {"http": settings.proxy, "https": settings.proxy}
60
+
61
+ resp = s.post(
62
+ f"{settings.auth_base}/token?grant_type=password",
63
+ json={
64
+ "email": account["email"],
65
+ "password": account["password"],
66
+ "gotrue_meta_security": {},
67
+ },
68
+ headers={
69
+ "Apikey": settings.supabase_anon_key,
70
+ "Authorization": f"Bearer {settings.supabase_anon_key}",
71
+ "X-Client-Info": "supabase-js-web/2.58.0",
72
+ },
73
+ timeout=15,
74
+ )
75
+ if resp.status_code != 200:
76
+ logger.error(f"JWT refresh failed for {account['email']}: HTTP {resp.status_code}")
77
+ return None
78
+
79
+ data = resp.json()
80
+ new_jwt = data["access_token"]
81
+ new_refresh = data.get("refresh_token", "")
82
+ await db.update_jwt(account["id"], new_jwt, new_refresh)
83
+ logger.info(f"JWT refreshed for {account['email']}")
84
+
85
+ # Re-initialize user details (mandatory)
86
+ self._init_user_detail(s, new_jwt)
87
+
88
+ return new_jwt
89
+ except Exception as e:
90
+ logger.error(f"JWT refresh error for {account['email']}: {e}")
91
+ return None
92
+
93
+ @staticmethod
94
+ def _init_user_detail(s: cffi_requests.Session, jwt: str) -> None:
95
+ """Call get_user_detail — mandatory for account activation."""
96
+ try:
97
+ s.post(
98
+ f"{settings.api_base}/user/details",
99
+ data=json.dumps({
100
+ "ads_metadata": {
101
+ "app_version": settings.app_version,
102
+ "showError": "You are unauthorized to use this application",
103
+ }
104
+ }),
105
+ headers={
106
+ "Content-Type": "application/json",
107
+ "Authorization": f"Bearer {jwt}",
108
+ },
109
+ timeout=15,
110
+ )
111
+ except Exception:
112
+ pass
113
+
114
+ async def count(self) -> dict[str, int]:
115
+ return await db.get_account_count()
116
+
117
+
118
+ pool = AccountPool()
emergent2api/routes/__init__.py ADDED
File without changes
emergent2api/routes/anthropic.py ADDED
@@ -0,0 +1,121 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Anthropic-compatible /v1/messages endpoint."""
2
+ from __future__ import annotations
3
+
4
+ import json
5
+ import time
6
+ import uuid
7
+
8
+ from fastapi import APIRouter, Request
9
+ from fastapi.responses import JSONResponse, StreamingResponse
10
+
11
+ from ..models import (
12
+ AnthropicRequest,
13
+ AnthropicResponse,
14
+ AnthropicContentBlock,
15
+ resolve_model,
16
+ )
17
+ from ..pool import pool
18
+ from ..backends.jobs import JobsBackend
19
+ from ..backends.integrations import IntegrationsBackend
20
+ from ..config import settings
21
+
22
+ router = APIRouter()
23
+
24
+
25
+ def _get_backend():
26
+ if settings.backend == "integrations":
27
+ return IntegrationsBackend()
28
+ return JobsBackend()
29
+
30
+
31
+ @router.post("/v1/messages")
32
+ async def messages(request: Request):
33
+ body = await request.json()
34
+ req = AnthropicRequest(**body)
35
+
36
+ account = await pool.acquire()
37
+ if not account:
38
+ return JSONResponse(
39
+ status_code=503,
40
+ content={"type": "error", "error": {"type": "overloaded_error", "message": "No active accounts available"}},
41
+ )
42
+
43
+ emergent_model, thinking = resolve_model(req.model)
44
+ backend = _get_backend()
45
+
46
+ # Build messages list (prepend system if present)
47
+ messages = []
48
+ if req.system:
49
+ messages.append({"role": "system", "content": req.system})
50
+ for m in req.messages:
51
+ messages.append({"role": m.role, "content": m.content})
52
+
53
+ if req.stream:
54
+ return StreamingResponse(
55
+ _stream_anthropic(backend, messages, emergent_model, thinking, account, req.model),
56
+ media_type="text/event-stream",
57
+ headers={
58
+ "Cache-Control": "no-cache",
59
+ "Connection": "keep-alive",
60
+ "X-Accel-Buffering": "no",
61
+ },
62
+ )
63
+
64
+ # Non-streaming
65
+ full_text = ""
66
+ error_msg = ""
67
+ async for chunk in backend.chat(messages, emergent_model, thinking, account, stream=False):
68
+ if chunk["type"] == "error":
69
+ error_msg = chunk["content"]
70
+ break
71
+ if chunk["type"] == "done":
72
+ full_text = chunk["content"]
73
+
74
+ if error_msg:
75
+ return JSONResponse(
76
+ status_code=500,
77
+ content={"type": "error", "error": {"type": "api_error", "message": error_msg}},
78
+ )
79
+
80
+ response = AnthropicResponse(
81
+ model=req.model,
82
+ content=[AnthropicContentBlock(text=full_text)],
83
+ )
84
+ return response.model_dump()
85
+
86
+
87
+ async def _stream_anthropic(backend, messages, emergent_model, thinking, account, user_model):
88
+ """Generate SSE stream in Anthropic Messages format."""
89
+ msg_id = f"msg_{uuid.uuid4().hex[:24]}"
90
+
91
+ # message_start
92
+ yield f"event: message_start\ndata: {json.dumps({'type': 'message_start', 'message': {'id': msg_id, 'type': 'message', 'role': 'assistant', 'content': [], 'model': user_model, 'stop_reason': None, 'stop_sequence': None, 'usage': {'input_tokens': 0, 'output_tokens': 0}}})}\n\n"
93
+
94
+ # content_block_start
95
+ yield f"event: content_block_start\ndata: {json.dumps({'type': 'content_block_start', 'index': 0, 'content_block': {'type': 'text', 'text': ''}})}\n\n"
96
+
97
+ total_output = 0
98
+ async for chunk in backend.chat(messages, emergent_model, thinking, account, stream=True):
99
+ if chunk["type"] == "error":
100
+ err_text = chunk["content"]
101
+ yield f"event: content_block_delta\ndata: {json.dumps({'type': 'content_block_delta', 'index': 0, 'delta': {'type': 'text_delta', 'text': f'[Error: {err_text}]'}})}\n\n"
102
+ break
103
+
104
+ if chunk["type"] in ("text", "thinking"):
105
+ content = chunk["content"]
106
+ if not content:
107
+ continue
108
+ total_output += len(content)
109
+ yield f"event: content_block_delta\ndata: {json.dumps({'type': 'content_block_delta', 'index': 0, 'delta': {'type': 'text_delta', 'text': content}})}\n\n"
110
+
111
+ if chunk["type"] == "done":
112
+ break
113
+
114
+ # content_block_stop
115
+ yield f"event: content_block_stop\ndata: {json.dumps({'type': 'content_block_stop', 'index': 0})}\n\n"
116
+
117
+ # message_delta
118
+ yield f"event: message_delta\ndata: {json.dumps({'type': 'message_delta', 'delta': {'stop_reason': 'end_turn', 'stop_sequence': None}, 'usage': {'output_tokens': total_output}})}\n\n"
119
+
120
+ # message_stop
121
+ yield f"event: message_stop\ndata: {json.dumps({'type': 'message_stop'})}\n\n"
emergent2api/routes/openai.py ADDED
@@ -0,0 +1,148 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """OpenAI-compatible /v1/chat/completions and /v1/models endpoints."""
2
+ from __future__ import annotations
3
+
4
+ import json
5
+ import time
6
+ import uuid
7
+
8
+ from fastapi import APIRouter, Request
9
+ from fastapi.responses import JSONResponse, StreamingResponse
10
+
11
+ from ..models import (
12
+ AVAILABLE_MODELS,
13
+ OAIChatRequest,
14
+ OAIChatResponse,
15
+ OAIChatChoice,
16
+ OAIStreamChunk,
17
+ OAIStreamChoice,
18
+ OAIStreamDelta,
19
+ resolve_model,
20
+ )
21
+ from ..pool import pool
22
+ from ..backends.jobs import JobsBackend
23
+ from ..backends.integrations import IntegrationsBackend
24
+ from ..config import settings
25
+
26
+ router = APIRouter()
27
+
28
+ def _get_backend():
29
+ if settings.backend == "integrations":
30
+ return IntegrationsBackend()
31
+ return JobsBackend()
32
+
33
+
34
+ @router.get("/v1/models")
35
+ async def list_models():
36
+ return {"object": "list", "data": AVAILABLE_MODELS}
37
+
38
+
39
+ @router.post("/v1/chat/completions")
40
+ async def chat_completions(request: Request):
41
+ body = await request.json()
42
+ req = OAIChatRequest(**body)
43
+
44
+ account = await pool.acquire()
45
+ if not account:
46
+ return JSONResponse(
47
+ status_code=503,
48
+ content={"error": {"message": "No active accounts available", "type": "server_error"}},
49
+ )
50
+
51
+ emergent_model, thinking = resolve_model(req.model)
52
+ backend = _get_backend()
53
+ messages = [{"role": m.role, "content": m.content} for m in req.messages]
54
+
55
+ if req.stream:
56
+ return StreamingResponse(
57
+ _stream_response(backend, messages, emergent_model, thinking, account, req.model),
58
+ media_type="text/event-stream",
59
+ headers={
60
+ "Cache-Control": "no-cache",
61
+ "Connection": "keep-alive",
62
+ "X-Accel-Buffering": "no",
63
+ },
64
+ )
65
+
66
+ # Non-streaming
67
+ full_text = ""
68
+ full_thinking = ""
69
+ error_msg = ""
70
+ async for chunk in backend.chat(messages, emergent_model, thinking, account, stream=False):
71
+ if chunk["type"] == "error":
72
+ error_msg = chunk["content"]
73
+ break
74
+ if chunk["type"] == "done":
75
+ full_text = chunk["content"]
76
+ full_thinking = chunk.get("thinking", "")
77
+
78
+ if error_msg:
79
+ return JSONResponse(
80
+ status_code=500,
81
+ content={"error": {"message": error_msg, "type": "server_error"}},
82
+ )
83
+
84
+ response = OAIChatResponse(
85
+ model=req.model,
86
+ choices=[OAIChatChoice(
87
+ message={"role": "assistant", "content": full_text},
88
+ finish_reason="stop",
89
+ )],
90
+ )
91
+ return response.model_dump()
92
+
93
+
94
+ async def _stream_response(backend, messages, emergent_model, thinking, account, user_model):
95
+ """Generate SSE stream in OpenAI chat completions format."""
96
+ chat_id = f"chatcmpl-{uuid.uuid4().hex[:24]}"
97
+ created = int(time.time())
98
+
99
+ # Initial chunk with role
100
+ initial = OAIStreamChunk(
101
+ id=chat_id,
102
+ created=created,
103
+ model=user_model,
104
+ choices=[OAIStreamChoice(
105
+ delta=OAIStreamDelta(role="assistant"),
106
+ )],
107
+ )
108
+ yield f"data: {json.dumps(initial.model_dump())}\n\n"
109
+
110
+ async for chunk in backend.chat(messages, emergent_model, thinking, account, stream=True):
111
+ if chunk["type"] == "error":
112
+ error_chunk = {
113
+ "id": chat_id, "object": "chat.completion.chunk",
114
+ "created": created, "model": user_model,
115
+ "choices": [{"index": 0, "delta": {"content": f"\n[Error: {chunk['content']}]"}, "finish_reason": None}],
116
+ }
117
+ yield f"data: {json.dumps(error_chunk)}\n\n"
118
+ break
119
+
120
+ if chunk["type"] in ("text", "thinking"):
121
+ content = chunk["content"]
122
+ if not content:
123
+ continue
124
+ stream_chunk = OAIStreamChunk(
125
+ id=chat_id,
126
+ created=created,
127
+ model=user_model,
128
+ choices=[OAIStreamChoice(
129
+ delta=OAIStreamDelta(content=content),
130
+ )],
131
+ )
132
+ yield f"data: {json.dumps(stream_chunk.model_dump())}\n\n"
133
+
134
+ if chunk["type"] == "done":
135
+ break
136
+
137
+ # Final chunk
138
+ final = OAIStreamChunk(
139
+ id=chat_id,
140
+ created=created,
141
+ model=user_model,
142
+ choices=[OAIStreamChoice(
143
+ delta=OAIStreamDelta(),
144
+ finish_reason="stop",
145
+ )],
146
+ )
147
+ yield f"data: {json.dumps(final.model_dump())}\n\n"
148
+ yield "data: [DONE]\n\n"
emergent2api/routes/responses.py ADDED
@@ -0,0 +1,138 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """OpenAI Response API /v1/responses endpoint."""
2
+ from __future__ import annotations
3
+
4
+ import json
5
+ import time
6
+ import uuid
7
+
8
+ from fastapi import APIRouter, Request
9
+ from fastapi.responses import JSONResponse, StreamingResponse
10
+
11
+ from ..models import (
12
+ ResponseAPIRequest,
13
+ ResponseAPIResponse,
14
+ ResponseAPIOutput,
15
+ resolve_model,
16
+ )
17
+ from ..pool import pool
18
+ from ..backends.jobs import JobsBackend
19
+ from ..backends.integrations import IntegrationsBackend
20
+ from ..config import settings
21
+
22
+ router = APIRouter()
23
+
24
+
25
+ def _get_backend():
26
+ if settings.backend == "integrations":
27
+ return IntegrationsBackend()
28
+ return JobsBackend()
29
+
30
+
31
+ def _input_to_messages(req: ResponseAPIRequest) -> list[dict]:
32
+ """Convert Response API input format to chat messages."""
33
+ messages = []
34
+ if req.instructions:
35
+ messages.append({"role": "system", "content": req.instructions})
36
+
37
+ if isinstance(req.input, str):
38
+ messages.append({"role": "user", "content": req.input})
39
+ elif isinstance(req.input, list):
40
+ for item in req.input:
41
+ messages.append({"role": item.role, "content": item.content})
42
+
43
+ return messages
44
+
45
+
46
+ @router.post("/v1/responses")
47
+ async def responses(request: Request):
48
+ body = await request.json()
49
+ req = ResponseAPIRequest(**body)
50
+
51
+ account = await pool.acquire()
52
+ if not account:
53
+ return JSONResponse(
54
+ status_code=503,
55
+ content={"error": {"message": "No active accounts available", "type": "server_error"}},
56
+ )
57
+
58
+ emergent_model, thinking = resolve_model(req.model)
59
+ backend = _get_backend()
60
+ messages = _input_to_messages(req)
61
+
62
+ if req.stream:
63
+ return StreamingResponse(
64
+ _stream_responses(backend, messages, emergent_model, thinking, account, req.model),
65
+ media_type="text/event-stream",
66
+ headers={
67
+ "Cache-Control": "no-cache",
68
+ "Connection": "keep-alive",
69
+ "X-Accel-Buffering": "no",
70
+ },
71
+ )
72
+
73
+ # Non-streaming
74
+ full_text = ""
75
+ error_msg = ""
76
+ async for chunk in backend.chat(messages, emergent_model, thinking, account, stream=False):
77
+ if chunk["type"] == "error":
78
+ error_msg = chunk["content"]
79
+ break
80
+ if chunk["type"] == "done":
81
+ full_text = chunk["content"]
82
+
83
+ if error_msg:
84
+ return JSONResponse(
85
+ status_code=500,
86
+ content={"error": {"message": error_msg, "type": "server_error"}},
87
+ )
88
+
89
+ resp_id = f"resp_{uuid.uuid4().hex[:24]}"
90
+ output_item = ResponseAPIOutput(
91
+ content=[{"type": "output_text", "text": full_text}],
92
+ )
93
+ response = ResponseAPIResponse(
94
+ id=resp_id,
95
+ model=req.model,
96
+ output=[output_item],
97
+ )
98
+ return response.model_dump()
99
+
100
+
101
+ async def _stream_responses(backend, messages, emergent_model, thinking, account, user_model):
102
+ """Generate SSE stream in OpenAI Response API format."""
103
+ resp_id = f"resp_{uuid.uuid4().hex[:24]}"
104
+ item_id = f"msg_{uuid.uuid4().hex[:16]}"
105
+ output_idx = 0
106
+
107
+ # response.created
108
+ yield f"event: response.created\ndata: {json.dumps({'type': 'response.created', 'response': {'id': resp_id, 'object': 'response', 'status': 'in_progress', 'model': user_model}})}\n\n"
109
+
110
+ # response.output_item.added
111
+ yield f"event: response.output_item.added\ndata: {json.dumps({'type': 'response.output_item.added', 'output_index': output_idx, 'item': {'type': 'message', 'id': item_id, 'role': 'assistant', 'status': 'in_progress', 'content': []}})}\n\n"
112
+
113
+ # response.content_part.added
114
+ yield f"event: response.content_part.added\ndata: {json.dumps({'type': 'response.content_part.added', 'item_id': item_id, 'output_index': output_idx, 'content_index': 0, 'part': {'type': 'output_text', 'text': ''}})}\n\n"
115
+
116
+ async for chunk in backend.chat(messages, emergent_model, thinking, account, stream=True):
117
+ if chunk["type"] == "error":
118
+ err_text = chunk["content"]
119
+ yield f"event: response.output_text.delta\ndata: {json.dumps({'type': 'response.output_text.delta', 'item_id': item_id, 'output_index': output_idx, 'content_index': 0, 'delta': f'[Error: {err_text}]'})}\n\n"
120
+ break
121
+
122
+ if chunk["type"] in ("text", "thinking"):
123
+ content = chunk["content"]
124
+ if not content:
125
+ continue
126
+ yield f"event: response.output_text.delta\ndata: {json.dumps({'type': 'response.output_text.delta', 'item_id': item_id, 'output_index': output_idx, 'content_index': 0, 'delta': content})}\n\n"
127
+
128
+ if chunk["type"] == "done":
129
+ break
130
+
131
+ # response.content_part.done
132
+ yield f"event: response.content_part.done\ndata: {json.dumps({'type': 'response.content_part.done', 'item_id': item_id, 'output_index': output_idx, 'content_index': 0})}\n\n"
133
+
134
+ # response.output_item.done
135
+ yield f"event: response.output_item.done\ndata: {json.dumps({'type': 'response.output_item.done', 'output_index': output_idx, 'item': {'type': 'message', 'id': item_id, 'role': 'assistant', 'status': 'completed'}})}\n\n"
136
+
137
+ # response.completed
138
+ yield f"event: response.completed\ndata: {json.dumps({'type': 'response.completed', 'response': {'id': resp_id, 'object': 'response', 'status': 'completed', 'model': user_model}})}\n\n"
requirements.txt ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ fastapi>=0.115
2
+ uvicorn[standard]>=0.30
3
+ pydantic-settings>=2.0
4
+ asyncpg>=0.29
5
+ httpx>=0.27
6
+ curl_cffi>=0.7