Spaces:
Paused
refactor: fingerprint alignment, auto-maintenance, Docker deployment
Browse filesPhase 1 - Eliminate fingerprint detection gaps:
- Dynamic sec-ch-ua from config chromium_version (aligned with TLS profile)
- Inject full fingerprint headers into curl-fetch (OAuth/appcast)
- Unify Accept-Encoding based on curl-impersonate detection
- Optimize header ordering (single sort pass)
Phase 2 - Auto-maintenance:
- Structured YAML mutation utility (replace regex hacks)
- Auto full-update pipeline on new version detection
- Config hot-reload after updates
- Exponential backoff token refresh (5 attempts + 10m recovery)
- Immediate persistence for critical data (tokens, cf_clearance)
- Cookie v2 format with expiry timestamps
- Graceful shutdown with 10s timeout protection
- Usage counters auto-reset on rate limit window rollover
Phase 3 - Docker deployment:
- Dockerfile, docker-compose.yml, .dockerignore
- Deployment docs in README
Fixes:
- Extract planType from JWT /auth claim (was looking in /profile)
- Backfill missing planType on startup for existing accounts
- Show full date for rate limit reset times in dashboard
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
- .dockerignore +9 -0
- Dockerfile +23 -0
- README.md +24 -0
- config/default.yaml +1 -0
- config/fingerprint.yaml +0 -1
- docker-compose.yml +11 -0
- public/dashboard.html +19 -1
- src/auth/account-pool.ts +47 -25
- src/auth/jwt-utils.ts +14 -8
- src/auth/refresh-scheduler.ts +70 -9
- src/auth/types.ts +2 -0
- src/config.ts +30 -7
- src/fingerprint/manager.ts +75 -13
- src/index.ts +20 -6
- src/middleware/logger.ts +3 -2
- src/middleware/request-id.ts +13 -0
- src/proxy/codex-api.ts +6 -5
- src/proxy/cookie-jar.ts +122 -43
- src/routes/accounts.ts +6 -1
- src/routes/shared/proxy-handler.ts +8 -0
- src/session/manager.ts +14 -0
- src/tls/curl-binary.ts +9 -0
- src/tls/curl-fetch.ts +27 -4
- src/translation/codex-event-extractor.ts +56 -0
- src/translation/codex-to-anthropic.ts +15 -46
- src/translation/codex-to-gemini.ts +15 -46
- src/translation/codex-to-openai.ts +15 -65
- src/types/codex-events.ts +121 -0
- src/update-checker.ts +69 -5
- src/utils/yaml-mutate.ts +22 -0
|
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
node_modules/
|
| 2 |
+
dist/
|
| 3 |
+
data/
|
| 4 |
+
bin/
|
| 5 |
+
tmp/
|
| 6 |
+
.asar-out/
|
| 7 |
+
.git/
|
| 8 |
+
docs/
|
| 9 |
+
stitch-*/
|
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
FROM node:20-slim
|
| 2 |
+
|
| 3 |
+
# Install unzip (full-update pipeline) and ca-certificates
|
| 4 |
+
RUN apt-get update && \
|
| 5 |
+
apt-get install -y --no-install-recommends unzip ca-certificates && \
|
| 6 |
+
rm -rf /var/lib/apt/lists/*
|
| 7 |
+
|
| 8 |
+
WORKDIR /app
|
| 9 |
+
|
| 10 |
+
# Install dependencies (postinstall downloads curl-impersonate for Linux)
|
| 11 |
+
COPY package*.json ./
|
| 12 |
+
RUN npm ci --omit=dev
|
| 13 |
+
|
| 14 |
+
# Copy source and build
|
| 15 |
+
COPY . .
|
| 16 |
+
RUN npm run build
|
| 17 |
+
|
| 18 |
+
# Persistent data mount point
|
| 19 |
+
VOLUME /app/data
|
| 20 |
+
|
| 21 |
+
EXPOSE 8080
|
| 22 |
+
|
| 23 |
+
CMD ["node", "dist/index.js"]
|
|
@@ -198,6 +198,30 @@ for await (const chunk of stream) {
|
|
| 198 |
}
|
| 199 |
```
|
| 200 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 201 |
## ⚙️ 配置说明 (Configuration)
|
| 202 |
|
| 203 |
所有配置位于 `config/default.yaml`:
|
|
|
|
| 198 |
}
|
| 199 |
```
|
| 200 |
|
| 201 |
+
## 🐳 部署方式 (Deployment)
|
| 202 |
+
|
| 203 |
+
### Docker 部署(推荐,所有平台通用)
|
| 204 |
+
|
| 205 |
+
```bash
|
| 206 |
+
git clone https://github.com/icebear0828/codex-proxy.git
|
| 207 |
+
cd codex-proxy
|
| 208 |
+
docker compose up -d
|
| 209 |
+
# 打开 http://localhost:8080 登录
|
| 210 |
+
```
|
| 211 |
+
|
| 212 |
+
### 原生部署(macOS / Linux)
|
| 213 |
+
|
| 214 |
+
```bash
|
| 215 |
+
git clone https://github.com/icebear0828/codex-proxy.git
|
| 216 |
+
cd codex-proxy
|
| 217 |
+
npm install
|
| 218 |
+
npm run build
|
| 219 |
+
npm start
|
| 220 |
+
# 打开 http://localhost:8080 登录
|
| 221 |
+
```
|
| 222 |
+
|
| 223 |
+
> Docker 部署会自动安装 curl-impersonate(Linux 版)。原生部署依赖 `npm install` 的 postinstall 脚本自动下载。
|
| 224 |
+
|
| 225 |
## ⚙️ 配置说明 (Configuration)
|
| 226 |
|
| 227 |
所有配置位于 `config/default.yaml`:
|
|
@@ -8,6 +8,7 @@ client:
|
|
| 8 |
build_number: "669"
|
| 9 |
platform: "darwin"
|
| 10 |
arch: "arm64"
|
|
|
|
| 11 |
|
| 12 |
model:
|
| 13 |
default: "gpt-5.3-codex"
|
|
|
|
| 8 |
build_number: "669"
|
| 9 |
platform: "darwin"
|
| 10 |
arch: "arm64"
|
| 11 |
+
chromium_version: "136"
|
| 12 |
|
| 13 |
model:
|
| 14 |
default: "gpt-5.3-codex"
|
|
@@ -20,7 +20,6 @@ header_order:
|
|
| 20 |
default_headers:
|
| 21 |
Accept-Encoding: "gzip, deflate, br, zstd"
|
| 22 |
Accept-Language: "en-US,en;q=0.9"
|
| 23 |
-
sec-ch-ua: '"Chromium";v="134", "Not:A-Brand";v="24"'
|
| 24 |
sec-ch-ua-mobile: "?0"
|
| 25 |
sec-ch-ua-platform: '"macOS"'
|
| 26 |
sec-fetch-site: "same-origin"
|
|
|
|
| 20 |
default_headers:
|
| 21 |
Accept-Encoding: "gzip, deflate, br, zstd"
|
| 22 |
Accept-Language: "en-US,en;q=0.9"
|
|
|
|
| 23 |
sec-ch-ua-mobile: "?0"
|
| 24 |
sec-ch-ua-platform: '"macOS"'
|
| 25 |
sec-fetch-site: "same-origin"
|
|
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
services:
|
| 2 |
+
codex-proxy:
|
| 3 |
+
build: .
|
| 4 |
+
ports:
|
| 5 |
+
- "8080:8080"
|
| 6 |
+
volumes:
|
| 7 |
+
- ./data:/app/data
|
| 8 |
+
- ./config:/app/config
|
| 9 |
+
restart: unless-stopped
|
| 10 |
+
environment:
|
| 11 |
+
- NODE_ENV=production
|
|
@@ -391,6 +391,24 @@ function formatNumber(n) {
|
|
| 391 |
return String(n);
|
| 392 |
}
|
| 393 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 394 |
function statusBadge(status) {
|
| 395 |
const map = {
|
| 396 |
active: ['bg-green-100 text-green-700 border-green-200 dark:bg-[#11281d] dark:text-primary dark:border-[#1a442e]', 'active'],
|
|
@@ -459,7 +477,7 @@ function renderAccounts(accounts) {
|
|
| 459 |
: pct >= 90 ? 'text-red-500'
|
| 460 |
: pct >= 60 ? 'text-amber-600 dark:text-amber-500'
|
| 461 |
: 'text-primary';
|
| 462 |
-
const resetAt = rl.reset_at ?
|
| 463 |
|
| 464 |
quotaHtml = `
|
| 465 |
<div class="pt-3 mt-3 border-t border-slate-100 dark:border-border-dark">
|
|
|
|
| 391 |
return String(n);
|
| 392 |
}
|
| 393 |
|
| 394 |
+
function formatResetTime(unixSec) {
|
| 395 |
+
const d = new Date(unixSec * 1000);
|
| 396 |
+
const now = new Date();
|
| 397 |
+
const time = d.toLocaleTimeString(undefined, { hour: '2-digit', minute: '2-digit', second: '2-digit' });
|
| 398 |
+
// Same calendar day → just show time
|
| 399 |
+
if (d.getFullYear() === now.getFullYear() && d.getMonth() === now.getMonth() && d.getDate() === now.getDate()) {
|
| 400 |
+
return time;
|
| 401 |
+
}
|
| 402 |
+
// Tomorrow
|
| 403 |
+
const tomorrow = new Date(now); tomorrow.setDate(tomorrow.getDate() + 1);
|
| 404 |
+
if (d.getFullYear() === tomorrow.getFullYear() && d.getMonth() === tomorrow.getMonth() && d.getDate() === tomorrow.getDate()) {
|
| 405 |
+
return (currentLangCode === 'zh' ? '明天 ' : 'Tomorrow ') + time;
|
| 406 |
+
}
|
| 407 |
+
// Other dates → full date + time
|
| 408 |
+
const date = d.toLocaleDateString(undefined, { month: 'short', day: 'numeric' });
|
| 409 |
+
return date + ' ' + time;
|
| 410 |
+
}
|
| 411 |
+
|
| 412 |
function statusBadge(status) {
|
| 413 |
const map = {
|
| 414 |
active: ['bg-green-100 text-green-700 border-green-200 dark:bg-[#11281d] dark:text-primary dark:border-[#1a442e]', 'active'],
|
|
|
|
| 477 |
: pct >= 90 ? 'text-red-500'
|
| 478 |
: pct >= 60 ? 'text-amber-600 dark:text-amber-500'
|
| 479 |
: 'text-primary';
|
| 480 |
+
const resetAt = rl.reset_at ? formatResetTime(rl.reset_at) : null;
|
| 481 |
|
| 482 |
quotaHtml = `
|
| 483 |
<div class="pt-3 mt-3 border-t border-slate-100 dark:border-border-dark">
|
|
@@ -158,7 +158,7 @@ export class AccountPool {
|
|
| 158 |
existing.email = profile?.email ?? existing.email;
|
| 159 |
existing.planType = profile?.chatgpt_plan_type ?? existing.planType;
|
| 160 |
existing.status = isTokenExpired(token) ? "expired" : "active";
|
| 161 |
-
this.
|
| 162 |
return existing.id;
|
| 163 |
}
|
| 164 |
}
|
|
@@ -185,7 +185,7 @@ export class AccountPool {
|
|
| 185 |
};
|
| 186 |
|
| 187 |
this.accounts.set(id, entry);
|
| 188 |
-
this.
|
| 189 |
return id;
|
| 190 |
}
|
| 191 |
|
|
@@ -212,7 +212,7 @@ export class AccountPool {
|
|
| 212 |
entry.planType = profile?.chatgpt_plan_type ?? entry.planType;
|
| 213 |
entry.accountId = extractChatGptAccountId(newToken) ?? entry.accountId;
|
| 214 |
entry.status = "active";
|
| 215 |
-
this.
|
| 216 |
}
|
| 217 |
|
| 218 |
markStatus(entryId: string, status: AccountEntry["status"]): void {
|
|
@@ -231,11 +231,36 @@ export class AccountPool {
|
|
| 231 |
output_tokens: 0,
|
| 232 |
last_used: null,
|
| 233 |
rate_limit_until: null,
|
|
|
|
| 234 |
};
|
| 235 |
this.schedulePersist();
|
| 236 |
return true;
|
| 237 |
}
|
| 238 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 239 |
// ── Query ───────────────────────────────────────────────────────
|
| 240 |
|
| 241 |
getAccounts(): AccountInfo[] {
|
|
@@ -254,8 +279,6 @@ export class AccountPool {
|
|
| 254 |
return [...this.accounts.values()];
|
| 255 |
}
|
| 256 |
|
| 257 |
-
// ── Backward-compatible shim (for routes that still expect AuthManager) ──
|
| 258 |
-
|
| 259 |
isAuthenticated(): boolean {
|
| 260 |
const now = new Date();
|
| 261 |
for (const entry of this.accounts.values()) {
|
|
@@ -265,21 +288,6 @@ export class AccountPool {
|
|
| 265 |
return false;
|
| 266 |
}
|
| 267 |
|
| 268 |
-
/** @deprecated Use acquire() instead. */
|
| 269 |
-
async getToken(): Promise<string | null> {
|
| 270 |
-
const acq = this.acquire();
|
| 271 |
-
if (!acq) return null;
|
| 272 |
-
// Release immediately — shim usage doesn't track per-request
|
| 273 |
-
this.acquireLocks.delete(acq.entryId);
|
| 274 |
-
return acq.token;
|
| 275 |
-
}
|
| 276 |
-
|
| 277 |
-
/** @deprecated Use acquire() instead. */
|
| 278 |
-
getAccountId(): string | null {
|
| 279 |
-
const first = [...this.accounts.values()].find((a) => a.status === "active");
|
| 280 |
-
return first?.accountId ?? null;
|
| 281 |
-
}
|
| 282 |
-
|
| 283 |
/** @deprecated Use getAccounts() instead. */
|
| 284 |
getUserInfo(): { email?: string; accountId?: string; planType?: string } | null {
|
| 285 |
const first = [...this.accounts.values()].find((a) => a.status === "active");
|
|
@@ -304,11 +312,6 @@ export class AccountPool {
|
|
| 304 |
return false;
|
| 305 |
}
|
| 306 |
|
| 307 |
-
/** @deprecated Use addAccount() instead. */
|
| 308 |
-
setToken(token: string): void {
|
| 309 |
-
this.addAccount(token);
|
| 310 |
-
}
|
| 311 |
-
|
| 312 |
/** @deprecated Use removeAccount() instead. */
|
| 313 |
clearToken(): void {
|
| 314 |
this.accounts.clear();
|
|
@@ -416,11 +419,30 @@ export class AccountPool {
|
|
| 416 |
const raw = readFileSync(ACCOUNTS_FILE, "utf-8");
|
| 417 |
const data = JSON.parse(raw) as AccountsFile;
|
| 418 |
if (Array.isArray(data.accounts)) {
|
|
|
|
| 419 |
for (const entry of data.accounts) {
|
| 420 |
if (entry.id && entry.token) {
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 421 |
this.accounts.set(entry.id, entry);
|
| 422 |
}
|
| 423 |
}
|
|
|
|
| 424 |
}
|
| 425 |
} catch (err) {
|
| 426 |
console.warn("[AccountPool] Failed to load accounts:", err instanceof Error ? err.message : err);
|
|
|
|
| 158 |
existing.email = profile?.email ?? existing.email;
|
| 159 |
existing.planType = profile?.chatgpt_plan_type ?? existing.planType;
|
| 160 |
existing.status = isTokenExpired(token) ? "expired" : "active";
|
| 161 |
+
this.persistNow(); // Critical data — persist immediately
|
| 162 |
return existing.id;
|
| 163 |
}
|
| 164 |
}
|
|
|
|
| 185 |
};
|
| 186 |
|
| 187 |
this.accounts.set(id, entry);
|
| 188 |
+
this.persistNow(); // Critical data — persist immediately
|
| 189 |
return id;
|
| 190 |
}
|
| 191 |
|
|
|
|
| 212 |
entry.planType = profile?.chatgpt_plan_type ?? entry.planType;
|
| 213 |
entry.accountId = extractChatGptAccountId(newToken) ?? entry.accountId;
|
| 214 |
entry.status = "active";
|
| 215 |
+
this.persistNow(); // Critical data — persist immediately
|
| 216 |
}
|
| 217 |
|
| 218 |
markStatus(entryId: string, status: AccountEntry["status"]): void {
|
|
|
|
| 231 |
output_tokens: 0,
|
| 232 |
last_used: null,
|
| 233 |
rate_limit_until: null,
|
| 234 |
+
window_reset_at: entry.usage.window_reset_at ?? null,
|
| 235 |
};
|
| 236 |
this.schedulePersist();
|
| 237 |
return true;
|
| 238 |
}
|
| 239 |
|
| 240 |
+
/**
|
| 241 |
+
* Check if the rate limit window has rolled over.
|
| 242 |
+
* If so, auto-reset local usage counters to stay in sync.
|
| 243 |
+
* Called after fetching quota from OpenAI API.
|
| 244 |
+
*/
|
| 245 |
+
syncRateLimitWindow(entryId: string, newResetAt: number | null): void {
|
| 246 |
+
if (newResetAt == null) return;
|
| 247 |
+
const entry = this.accounts.get(entryId);
|
| 248 |
+
if (!entry) return;
|
| 249 |
+
|
| 250 |
+
const oldResetAt = entry.usage.window_reset_at;
|
| 251 |
+
if (oldResetAt != null && oldResetAt !== newResetAt) {
|
| 252 |
+
// Window rolled over — reset local counters
|
| 253 |
+
console.log(`[AccountPool] Rate limit window rolled for ${entryId} (${entry.email ?? "?"}), resetting usage counters`);
|
| 254 |
+
entry.usage.request_count = 0;
|
| 255 |
+
entry.usage.input_tokens = 0;
|
| 256 |
+
entry.usage.output_tokens = 0;
|
| 257 |
+
entry.usage.last_used = null;
|
| 258 |
+
entry.usage.rate_limit_until = null;
|
| 259 |
+
}
|
| 260 |
+
entry.usage.window_reset_at = newResetAt;
|
| 261 |
+
this.schedulePersist();
|
| 262 |
+
}
|
| 263 |
+
|
| 264 |
// ── Query ───────────────────────────────────────────────────────
|
| 265 |
|
| 266 |
getAccounts(): AccountInfo[] {
|
|
|
|
| 279 |
return [...this.accounts.values()];
|
| 280 |
}
|
| 281 |
|
|
|
|
|
|
|
| 282 |
isAuthenticated(): boolean {
|
| 283 |
const now = new Date();
|
| 284 |
for (const entry of this.accounts.values()) {
|
|
|
|
| 288 |
return false;
|
| 289 |
}
|
| 290 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 291 |
/** @deprecated Use getAccounts() instead. */
|
| 292 |
getUserInfo(): { email?: string; accountId?: string; planType?: string } | null {
|
| 293 |
const first = [...this.accounts.values()].find((a) => a.status === "active");
|
|
|
|
| 312 |
return false;
|
| 313 |
}
|
| 314 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 315 |
/** @deprecated Use removeAccount() instead. */
|
| 316 |
clearToken(): void {
|
| 317 |
this.accounts.clear();
|
|
|
|
| 419 |
const raw = readFileSync(ACCOUNTS_FILE, "utf-8");
|
| 420 |
const data = JSON.parse(raw) as AccountsFile;
|
| 421 |
if (Array.isArray(data.accounts)) {
|
| 422 |
+
let needsPersist = false;
|
| 423 |
for (const entry of data.accounts) {
|
| 424 |
if (entry.id && entry.token) {
|
| 425 |
+
// Backfill missing fields from JWT (e.g. planType was null before fix)
|
| 426 |
+
if (!entry.planType || !entry.email || !entry.accountId) {
|
| 427 |
+
const profile = extractUserProfile(entry.token);
|
| 428 |
+
const accountId = extractChatGptAccountId(entry.token);
|
| 429 |
+
if (!entry.planType && profile?.chatgpt_plan_type) {
|
| 430 |
+
entry.planType = profile.chatgpt_plan_type;
|
| 431 |
+
needsPersist = true;
|
| 432 |
+
}
|
| 433 |
+
if (!entry.email && profile?.email) {
|
| 434 |
+
entry.email = profile.email;
|
| 435 |
+
needsPersist = true;
|
| 436 |
+
}
|
| 437 |
+
if (!entry.accountId && accountId) {
|
| 438 |
+
entry.accountId = accountId;
|
| 439 |
+
needsPersist = true;
|
| 440 |
+
}
|
| 441 |
+
}
|
| 442 |
this.accounts.set(entry.id, entry);
|
| 443 |
}
|
| 444 |
}
|
| 445 |
+
if (needsPersist) this.persistNow();
|
| 446 |
}
|
| 447 |
} catch (err) {
|
| 448 |
console.warn("[AccountPool] Failed to load accounts:", err instanceof Error ? err.message : err);
|
|
@@ -39,14 +39,20 @@ export function extractUserProfile(
|
|
| 39 |
const payload = decodeJwtPayload(token);
|
| 40 |
if (!payload) return null;
|
| 41 |
try {
|
| 42 |
-
const profile = payload["https://api.openai.com/profile"];
|
| 43 |
-
|
| 44 |
-
|
| 45 |
-
|
| 46 |
-
|
| 47 |
-
|
| 48 |
-
|
| 49 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 50 |
}
|
| 51 |
} catch {
|
| 52 |
// ignore
|
|
|
|
| 39 |
const payload = decodeJwtPayload(token);
|
| 40 |
if (!payload) return null;
|
| 41 |
try {
|
| 42 |
+
const profile = payload["https://api.openai.com/profile"] as Record<string, unknown> | undefined;
|
| 43 |
+
const auth = payload["https://api.openai.com/auth"] as Record<string, unknown> | undefined;
|
| 44 |
+
|
| 45 |
+
const email = typeof profile?.email === "string" ? profile.email : undefined;
|
| 46 |
+
// chatgpt_plan_type lives in the /auth claim, not /profile
|
| 47 |
+
const chatgpt_plan_type =
|
| 48 |
+
(typeof auth?.chatgpt_plan_type === "string" ? auth.chatgpt_plan_type : undefined) ??
|
| 49 |
+
(typeof profile?.chatgpt_plan_type === "string" ? profile.chatgpt_plan_type : undefined);
|
| 50 |
+
const chatgpt_user_id =
|
| 51 |
+
(typeof auth?.chatgpt_user_id === "string" ? auth.chatgpt_user_id : undefined) ??
|
| 52 |
+
(typeof profile?.chatgpt_user_id === "string" ? profile.chatgpt_user_id : undefined);
|
| 53 |
+
|
| 54 |
+
if (email || chatgpt_plan_type || chatgpt_user_id) {
|
| 55 |
+
return { email, chatgpt_user_id, chatgpt_plan_type };
|
| 56 |
}
|
| 57 |
} catch {
|
| 58 |
// ignore
|
|
@@ -2,6 +2,12 @@
|
|
| 2 |
* RefreshScheduler — per-account JWT auto-refresh.
|
| 3 |
* Schedules a refresh at `exp - margin` for each account.
|
| 4 |
* Uses OAuth refresh_token instead of Codex CLI.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 5 |
*/
|
| 6 |
|
| 7 |
import { getConfig } from "../config.js";
|
|
@@ -10,6 +16,13 @@ import { refreshAccessToken } from "./oauth-pkce.js";
|
|
| 10 |
import { jitter, jitterInt } from "../utils/jitter.js";
|
| 11 |
import type { AccountPool } from "./account-pool.js";
|
| 12 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 13 |
export class RefreshScheduler {
|
| 14 |
private timers: Map<string, ReturnType<typeof setTimeout>> = new Map();
|
| 15 |
private pool: AccountPool;
|
|
@@ -22,8 +35,24 @@ export class RefreshScheduler {
|
|
| 22 |
/** Schedule refresh for all accounts in the pool. */
|
| 23 |
scheduleAll(): void {
|
| 24 |
for (const entry of this.pool.getAllEntries()) {
|
| 25 |
-
if (entry.status === "active"
|
| 26 |
this.scheduleOne(entry.id, entry.token);
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 27 |
}
|
| 28 |
}
|
| 29 |
}
|
|
@@ -87,7 +116,7 @@ export class RefreshScheduler {
|
|
| 87 |
|
| 88 |
if (!entry.refreshToken) {
|
| 89 |
console.warn(
|
| 90 |
-
`[RefreshScheduler] Account ${entryId} has no refresh_token, cannot auto-refresh`,
|
| 91 |
);
|
| 92 |
this.pool.markStatus(entryId, "expired");
|
| 93 |
return;
|
|
@@ -96,8 +125,7 @@ export class RefreshScheduler {
|
|
| 96 |
console.log(`[RefreshScheduler] Refreshing account ${entryId} (${entry.email ?? "?"})`);
|
| 97 |
this.pool.markStatus(entryId, "refreshing");
|
| 98 |
|
| 99 |
-
|
| 100 |
-
for (let attempt = 1; attempt <= maxAttempts; attempt++) {
|
| 101 |
try {
|
| 102 |
const tokens = await refreshAccessToken(entry.refreshToken);
|
| 103 |
// Update token and refresh_token (if a new one was issued)
|
|
@@ -111,15 +139,48 @@ export class RefreshScheduler {
|
|
| 111 |
return;
|
| 112 |
} catch (err) {
|
| 113 |
const msg = err instanceof Error ? err.message : String(err);
|
| 114 |
-
|
| 115 |
-
|
| 116 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 117 |
await new Promise((r) => setTimeout(r, retryDelay));
|
| 118 |
} else {
|
| 119 |
-
console.error(
|
| 120 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 121 |
}
|
| 122 |
}
|
| 123 |
}
|
| 124 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 125 |
}
|
|
|
|
| 2 |
* RefreshScheduler — per-account JWT auto-refresh.
|
| 3 |
* Schedules a refresh at `exp - margin` for each account.
|
| 4 |
* Uses OAuth refresh_token instead of Codex CLI.
|
| 5 |
+
*
|
| 6 |
+
* Features:
|
| 7 |
+
* - Exponential backoff (5 attempts: 5s → 15s → 45s → 135s → 300s)
|
| 8 |
+
* - Permanent failure detection (invalid_grant / invalid_token)
|
| 9 |
+
* - Recovery scheduling (10 min) for temporary failures
|
| 10 |
+
* - Crash recovery: "refreshing" → immediate retry, "expired" + refreshToken → delayed retry
|
| 11 |
*/
|
| 12 |
|
| 13 |
import { getConfig } from "../config.js";
|
|
|
|
| 16 |
import { jitter, jitterInt } from "../utils/jitter.js";
|
| 17 |
import type { AccountPool } from "./account-pool.js";
|
| 18 |
|
| 19 |
+
/** Errors that indicate the refresh token itself is invalid (permanent failure). */
|
| 20 |
+
const PERMANENT_ERRORS = ["invalid_grant", "invalid_token", "access_denied"];
|
| 21 |
+
|
| 22 |
+
const MAX_ATTEMPTS = 5;
|
| 23 |
+
const BASE_DELAY_MS = 5_000;
|
| 24 |
+
const RECOVERY_DELAY_MS = 10 * 60 * 1000; // 10 minutes
|
| 25 |
+
|
| 26 |
export class RefreshScheduler {
|
| 27 |
private timers: Map<string, ReturnType<typeof setTimeout>> = new Map();
|
| 28 |
private pool: AccountPool;
|
|
|
|
| 35 |
/** Schedule refresh for all accounts in the pool. */
|
| 36 |
scheduleAll(): void {
|
| 37 |
for (const entry of this.pool.getAllEntries()) {
|
| 38 |
+
if (entry.status === "active") {
|
| 39 |
this.scheduleOne(entry.id, entry.token);
|
| 40 |
+
} else if (entry.status === "refreshing") {
|
| 41 |
+
// Crash recovery: was mid-refresh when process died
|
| 42 |
+
console.log(`[RefreshScheduler] Account ${entry.id}: recovering from 'refreshing' state`);
|
| 43 |
+
this.doRefresh(entry.id);
|
| 44 |
+
} else if (entry.status === "expired" && entry.refreshToken) {
|
| 45 |
+
// Attempt recovery for expired accounts that still have a refresh token
|
| 46 |
+
const delay = jitterInt(30_000, 0.3);
|
| 47 |
+
console.log(`[RefreshScheduler] Account ${entry.id}: expired with refresh_token, recovery attempt in ${Math.round(delay / 1000)}s`);
|
| 48 |
+
const timer = setTimeout(() => {
|
| 49 |
+
this.timers.delete(entry.id);
|
| 50 |
+
this.doRefresh(entry.id);
|
| 51 |
+
}, delay);
|
| 52 |
+
if (timer.unref) timer.unref();
|
| 53 |
+
this.timers.set(entry.id, timer);
|
| 54 |
+
} else if (entry.status === "expired" && !entry.refreshToken) {
|
| 55 |
+
console.warn(`[RefreshScheduler] Account ${entry.id}: expired with no refresh_token. Re-login required at /`);
|
| 56 |
}
|
| 57 |
}
|
| 58 |
}
|
|
|
|
| 116 |
|
| 117 |
if (!entry.refreshToken) {
|
| 118 |
console.warn(
|
| 119 |
+
`[RefreshScheduler] Account ${entryId} has no refresh_token, cannot auto-refresh. Re-login required at /`,
|
| 120 |
);
|
| 121 |
this.pool.markStatus(entryId, "expired");
|
| 122 |
return;
|
|
|
|
| 125 |
console.log(`[RefreshScheduler] Refreshing account ${entryId} (${entry.email ?? "?"})`);
|
| 126 |
this.pool.markStatus(entryId, "refreshing");
|
| 127 |
|
| 128 |
+
for (let attempt = 1; attempt <= MAX_ATTEMPTS; attempt++) {
|
|
|
|
| 129 |
try {
|
| 130 |
const tokens = await refreshAccessToken(entry.refreshToken);
|
| 131 |
// Update token and refresh_token (if a new one was issued)
|
|
|
|
| 139 |
return;
|
| 140 |
} catch (err) {
|
| 141 |
const msg = err instanceof Error ? err.message : String(err);
|
| 142 |
+
|
| 143 |
+
// Check for permanent failures
|
| 144 |
+
if (PERMANENT_ERRORS.some((e) => msg.toLowerCase().includes(e))) {
|
| 145 |
+
console.error(`[RefreshScheduler] Permanent failure for ${entryId}: ${msg}`);
|
| 146 |
+
this.pool.markStatus(entryId, "expired");
|
| 147 |
+
return;
|
| 148 |
+
}
|
| 149 |
+
|
| 150 |
+
if (attempt < MAX_ATTEMPTS) {
|
| 151 |
+
// Exponential backoff: 5s, 15s, 45s, 135s, 300s (capped)
|
| 152 |
+
const backoff = Math.min(BASE_DELAY_MS * Math.pow(3, attempt - 1), 300_000);
|
| 153 |
+
const retryDelay = jitterInt(backoff, 0.3);
|
| 154 |
+
console.warn(
|
| 155 |
+
`[RefreshScheduler] Attempt ${attempt}/${MAX_ATTEMPTS} failed for ${entryId}: ${msg}, retrying in ${Math.round(retryDelay / 1000)}s...`,
|
| 156 |
+
);
|
| 157 |
await new Promise((r) => setTimeout(r, retryDelay));
|
| 158 |
} else {
|
| 159 |
+
console.error(
|
| 160 |
+
`[RefreshScheduler] All ${MAX_ATTEMPTS} attempts failed for ${entryId}: ${msg}`,
|
| 161 |
+
);
|
| 162 |
+
// Don't mark expired — schedule recovery attempt in 10 minutes
|
| 163 |
+
this.pool.markStatus(entryId, "active"); // keep active so it can still be used
|
| 164 |
+
this.scheduleRecovery(entryId);
|
| 165 |
}
|
| 166 |
}
|
| 167 |
}
|
| 168 |
}
|
| 169 |
+
|
| 170 |
+
/**
|
| 171 |
+
* Schedule a recovery refresh attempt after all retries are exhausted.
|
| 172 |
+
* Gives the server time to recover from temporary issues.
|
| 173 |
+
*/
|
| 174 |
+
private scheduleRecovery(entryId: string): void {
|
| 175 |
+
const delay = jitterInt(RECOVERY_DELAY_MS, 0.2);
|
| 176 |
+
console.log(
|
| 177 |
+
`[RefreshScheduler] Recovery attempt for ${entryId} in ${Math.round(delay / 60000)}m`,
|
| 178 |
+
);
|
| 179 |
+
const timer = setTimeout(() => {
|
| 180 |
+
this.timers.delete(entryId);
|
| 181 |
+
this.doRefresh(entryId);
|
| 182 |
+
}, delay);
|
| 183 |
+
if (timer.unref) timer.unref();
|
| 184 |
+
this.timers.set(entryId, timer);
|
| 185 |
+
}
|
| 186 |
}
|
|
@@ -15,6 +15,8 @@ export interface AccountUsage {
|
|
| 15 |
output_tokens: number;
|
| 16 |
last_used: string | null;
|
| 17 |
rate_limit_until: string | null;
|
|
|
|
|
|
|
| 18 |
}
|
| 19 |
|
| 20 |
export interface AccountEntry {
|
|
|
|
| 15 |
output_tokens: number;
|
| 16 |
last_used: string | null;
|
| 17 |
rate_limit_until: string | null;
|
| 18 |
+
/** Tracks the current rate limit window end (Unix seconds). When window rolls over, counters reset. */
|
| 19 |
+
window_reset_at?: number | null;
|
| 20 |
}
|
| 21 |
|
| 22 |
export interface AccountEntry {
|
|
@@ -6,7 +6,7 @@ import { z } from "zod";
|
|
| 6 |
const ConfigSchema = z.object({
|
| 7 |
api: z.object({
|
| 8 |
base_url: z.string().default("https://chatgpt.com/backend-api"),
|
| 9 |
-
timeout_seconds: z.number().default(60),
|
| 10 |
}),
|
| 11 |
client: z.object({
|
| 12 |
originator: z.string().default("Codex Desktop"),
|
|
@@ -14,6 +14,7 @@ const ConfigSchema = z.object({
|
|
| 14 |
build_number: z.string().default("517"),
|
| 15 |
platform: z.string().default("darwin"),
|
| 16 |
arch: z.string().default("arm64"),
|
|
|
|
| 17 |
}),
|
| 18 |
model: z.object({
|
| 19 |
default: z.string().default("gpt-5.3-codex"),
|
|
@@ -22,16 +23,16 @@ const ConfigSchema = z.object({
|
|
| 22 |
auth: z.object({
|
| 23 |
jwt_token: z.string().nullable().default(null),
|
| 24 |
chatgpt_oauth: z.boolean().default(true),
|
| 25 |
-
refresh_margin_seconds: z.number().default(300),
|
| 26 |
rotation_strategy: z.enum(["least_used", "round_robin"]).default("least_used"),
|
| 27 |
-
rate_limit_backoff_seconds: z.number().default(60),
|
| 28 |
oauth_client_id: z.string().default("app_EMoamEEZ73f0CkXaXp7hrann"),
|
| 29 |
oauth_auth_endpoint: z.string().default("https://auth.openai.com/oauth/authorize"),
|
| 30 |
oauth_token_endpoint: z.string().default("https://auth.openai.com/oauth/token"),
|
| 31 |
}),
|
| 32 |
server: z.object({
|
| 33 |
host: z.string().default("0.0.0.0"),
|
| 34 |
-
port: z.number().default(8080),
|
| 35 |
proxy_api_key: z.string().nullable().default(null),
|
| 36 |
}),
|
| 37 |
environment: z.object({
|
|
@@ -39,8 +40,8 @@ const ConfigSchema = z.object({
|
|
| 39 |
default_branch: z.string().default("main"),
|
| 40 |
}),
|
| 41 |
session: z.object({
|
| 42 |
-
ttl_minutes: z.number().default(60),
|
| 43 |
-
cleanup_interval_minutes: z.number().default(5),
|
| 44 |
}),
|
| 45 |
tls: z.object({
|
| 46 |
curl_binary: z.string().default("auto"),
|
|
@@ -85,7 +86,10 @@ function applyEnvOverrides(raw: Record<string, unknown>): Record<string, unknown
|
|
| 85 |
(raw.client as Record<string, unknown>).arch = process.env.CODEX_ARCH;
|
| 86 |
}
|
| 87 |
if (process.env.PORT) {
|
| 88 |
-
|
|
|
|
|
|
|
|
|
|
| 89 |
}
|
| 90 |
const proxyEnv = process.env.HTTPS_PROXY || process.env.https_proxy;
|
| 91 |
if (proxyEnv) {
|
|
@@ -129,3 +133,22 @@ export function mutateClientConfig(patch: Partial<AppConfig["client"]>): void {
|
|
| 129 |
if (!_config) throw new Error("Config not loaded");
|
| 130 |
Object.assign(_config.client, patch);
|
| 131 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 6 |
const ConfigSchema = z.object({
|
| 7 |
api: z.object({
|
| 8 |
base_url: z.string().default("https://chatgpt.com/backend-api"),
|
| 9 |
+
timeout_seconds: z.number().min(1).default(60),
|
| 10 |
}),
|
| 11 |
client: z.object({
|
| 12 |
originator: z.string().default("Codex Desktop"),
|
|
|
|
| 14 |
build_number: z.string().default("517"),
|
| 15 |
platform: z.string().default("darwin"),
|
| 16 |
arch: z.string().default("arm64"),
|
| 17 |
+
chromium_version: z.string().default("136"),
|
| 18 |
}),
|
| 19 |
model: z.object({
|
| 20 |
default: z.string().default("gpt-5.3-codex"),
|
|
|
|
| 23 |
auth: z.object({
|
| 24 |
jwt_token: z.string().nullable().default(null),
|
| 25 |
chatgpt_oauth: z.boolean().default(true),
|
| 26 |
+
refresh_margin_seconds: z.number().min(0).default(300),
|
| 27 |
rotation_strategy: z.enum(["least_used", "round_robin"]).default("least_used"),
|
| 28 |
+
rate_limit_backoff_seconds: z.number().min(1).default(60),
|
| 29 |
oauth_client_id: z.string().default("app_EMoamEEZ73f0CkXaXp7hrann"),
|
| 30 |
oauth_auth_endpoint: z.string().default("https://auth.openai.com/oauth/authorize"),
|
| 31 |
oauth_token_endpoint: z.string().default("https://auth.openai.com/oauth/token"),
|
| 32 |
}),
|
| 33 |
server: z.object({
|
| 34 |
host: z.string().default("0.0.0.0"),
|
| 35 |
+
port: z.number().min(1).max(65535).default(8080),
|
| 36 |
proxy_api_key: z.string().nullable().default(null),
|
| 37 |
}),
|
| 38 |
environment: z.object({
|
|
|
|
| 40 |
default_branch: z.string().default("main"),
|
| 41 |
}),
|
| 42 |
session: z.object({
|
| 43 |
+
ttl_minutes: z.number().min(1).default(60),
|
| 44 |
+
cleanup_interval_minutes: z.number().min(1).default(5),
|
| 45 |
}),
|
| 46 |
tls: z.object({
|
| 47 |
curl_binary: z.string().default("auto"),
|
|
|
|
| 86 |
(raw.client as Record<string, unknown>).arch = process.env.CODEX_ARCH;
|
| 87 |
}
|
| 88 |
if (process.env.PORT) {
|
| 89 |
+
const parsed = parseInt(process.env.PORT, 10);
|
| 90 |
+
if (!isNaN(parsed)) {
|
| 91 |
+
(raw.server as Record<string, unknown>).port = parsed;
|
| 92 |
+
}
|
| 93 |
}
|
| 94 |
const proxyEnv = process.env.HTTPS_PROXY || process.env.https_proxy;
|
| 95 |
if (proxyEnv) {
|
|
|
|
| 133 |
if (!_config) throw new Error("Config not loaded");
|
| 134 |
Object.assign(_config.client, patch);
|
| 135 |
}
|
| 136 |
+
|
| 137 |
+
/** Reload config from disk (hot-reload after full-update). */
|
| 138 |
+
export function reloadConfig(configDir?: string): AppConfig {
|
| 139 |
+
_config = null;
|
| 140 |
+
return loadConfig(configDir);
|
| 141 |
+
}
|
| 142 |
+
|
| 143 |
+
/** Reload fingerprint from disk (hot-reload after full-update). */
|
| 144 |
+
export function reloadFingerprint(configDir?: string): FingerprintConfig {
|
| 145 |
+
_fingerprint = null;
|
| 146 |
+
return loadFingerprint(configDir);
|
| 147 |
+
}
|
| 148 |
+
|
| 149 |
+
/** Reload both config and fingerprint from disk. */
|
| 150 |
+
export function reloadAllConfigs(configDir?: string): void {
|
| 151 |
+
reloadConfig(configDir);
|
| 152 |
+
reloadFingerprint(configDir);
|
| 153 |
+
console.log("[Config] Hot-reloaded config and fingerprint from disk");
|
| 154 |
+
}
|
|
@@ -29,6 +29,59 @@ function orderHeaders(
|
|
| 29 |
return ordered;
|
| 30 |
}
|
| 31 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 32 |
export function buildHeaders(
|
| 33 |
token: string,
|
| 34 |
accountId?: string | null,
|
|
@@ -44,17 +97,10 @@ export function buildHeaders(
|
|
| 44 |
|
| 45 |
raw["originator"] = config.client.originator;
|
| 46 |
|
| 47 |
-
|
| 48 |
-
|
| 49 |
-
|
| 50 |
-
|
| 51 |
-
raw["User-Agent"] = ua;
|
| 52 |
-
|
| 53 |
-
// Add browser-level default headers (Accept-Encoding, Accept-Language, etc.)
|
| 54 |
-
if (fp.default_headers) {
|
| 55 |
-
for (const [key, value] of Object.entries(fp.default_headers)) {
|
| 56 |
-
raw[key] = value;
|
| 57 |
-
}
|
| 58 |
}
|
| 59 |
|
| 60 |
return orderHeaders(raw, fp.header_order);
|
|
@@ -64,9 +110,25 @@ export function buildHeadersWithContentType(
|
|
| 64 |
token: string,
|
| 65 |
accountId?: string | null,
|
| 66 |
): Record<string, string> {
|
| 67 |
-
const config = getConfig();
|
| 68 |
const fp = getFingerprint();
|
| 69 |
-
const
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 70 |
raw["Content-Type"] = "application/json";
|
|
|
|
|
|
|
| 71 |
return orderHeaders(raw, fp.header_order);
|
| 72 |
}
|
|
|
|
| 29 |
return ordered;
|
| 30 |
}
|
| 31 |
|
| 32 |
+
/**
|
| 33 |
+
* Build the dynamic sec-ch-ua value based on chromium_version from config.
|
| 34 |
+
*/
|
| 35 |
+
function buildSecChUa(): string {
|
| 36 |
+
const cv = getConfig().client.chromium_version;
|
| 37 |
+
return `"Chromium";v="${cv}", "Not:A-Brand";v="24"`;
|
| 38 |
+
}
|
| 39 |
+
|
| 40 |
+
/**
|
| 41 |
+
* Build the User-Agent string from config + fingerprint template.
|
| 42 |
+
*/
|
| 43 |
+
function buildUserAgent(): string {
|
| 44 |
+
const config = getConfig();
|
| 45 |
+
const fp = getFingerprint();
|
| 46 |
+
return fp.user_agent_template
|
| 47 |
+
.replace("{version}", config.client.app_version)
|
| 48 |
+
.replace("{platform}", config.client.platform)
|
| 49 |
+
.replace("{arch}", config.client.arch);
|
| 50 |
+
}
|
| 51 |
+
|
| 52 |
+
/**
|
| 53 |
+
* Build raw headers (unordered) with all fingerprint fields.
|
| 54 |
+
* Does NOT include Authorization, ChatGPT-Account-Id, Content-Type, or Accept.
|
| 55 |
+
*/
|
| 56 |
+
function buildRawDefaultHeaders(): Record<string, string> {
|
| 57 |
+
const fp = getFingerprint();
|
| 58 |
+
const raw: Record<string, string> = {};
|
| 59 |
+
|
| 60 |
+
raw["User-Agent"] = buildUserAgent();
|
| 61 |
+
raw["sec-ch-ua"] = buildSecChUa();
|
| 62 |
+
|
| 63 |
+
// Add static default headers (Accept-Encoding, Accept-Language, sec-fetch-*, etc.)
|
| 64 |
+
if (fp.default_headers) {
|
| 65 |
+
for (const [key, value] of Object.entries(fp.default_headers)) {
|
| 66 |
+
raw[key] = value;
|
| 67 |
+
}
|
| 68 |
+
}
|
| 69 |
+
|
| 70 |
+
return raw;
|
| 71 |
+
}
|
| 72 |
+
|
| 73 |
+
/**
|
| 74 |
+
* Build anonymous headers for non-authenticated requests (OAuth, appcast, etc.).
|
| 75 |
+
* Contains User-Agent, sec-ch-ua, Accept-Encoding, Accept-Language, sec-fetch-*
|
| 76 |
+
* but NOT Authorization, Cookie, or ChatGPT-Account-Id.
|
| 77 |
+
* Headers are ordered per fingerprint config.
|
| 78 |
+
*/
|
| 79 |
+
export function buildAnonymousHeaders(): Record<string, string> {
|
| 80 |
+
const fp = getFingerprint();
|
| 81 |
+
const raw = buildRawDefaultHeaders();
|
| 82 |
+
return orderHeaders(raw, fp.header_order);
|
| 83 |
+
}
|
| 84 |
+
|
| 85 |
export function buildHeaders(
|
| 86 |
token: string,
|
| 87 |
accountId?: string | null,
|
|
|
|
| 97 |
|
| 98 |
raw["originator"] = config.client.originator;
|
| 99 |
|
| 100 |
+
// Merge default headers (User-Agent, sec-ch-ua, Accept-Encoding, etc.)
|
| 101 |
+
const defaults = buildRawDefaultHeaders();
|
| 102 |
+
for (const [key, value] of Object.entries(defaults)) {
|
| 103 |
+
raw[key] = value;
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 104 |
}
|
| 105 |
|
| 106 |
return orderHeaders(raw, fp.header_order);
|
|
|
|
| 110 |
token: string,
|
| 111 |
accountId?: string | null,
|
| 112 |
): Record<string, string> {
|
|
|
|
| 113 |
const fp = getFingerprint();
|
| 114 |
+
const config = getConfig();
|
| 115 |
+
const raw: Record<string, string> = {};
|
| 116 |
+
|
| 117 |
+
raw["Authorization"] = `Bearer ${token}`;
|
| 118 |
+
|
| 119 |
+
const acctId = accountId ?? extractChatGptAccountId(token);
|
| 120 |
+
if (acctId) raw["ChatGPT-Account-Id"] = acctId;
|
| 121 |
+
|
| 122 |
+
raw["originator"] = config.client.originator;
|
| 123 |
+
|
| 124 |
+
// Merge default headers
|
| 125 |
+
const defaults = buildRawDefaultHeaders();
|
| 126 |
+
for (const [key, value] of Object.entries(defaults)) {
|
| 127 |
+
raw[key] = value;
|
| 128 |
+
}
|
| 129 |
+
|
| 130 |
raw["Content-Type"] = "application/json";
|
| 131 |
+
|
| 132 |
+
// Single orderHeaders call (no double-sorting)
|
| 133 |
return orderHeaders(raw, fp.header_order);
|
| 134 |
}
|
|
@@ -100,14 +100,28 @@ async function main() {
|
|
| 100 |
port,
|
| 101 |
});
|
| 102 |
|
| 103 |
-
// Graceful shutdown
|
|
|
|
| 104 |
const shutdown = () => {
|
|
|
|
|
|
|
| 105 |
console.log("\n[Shutdown] Cleaning up...");
|
| 106 |
-
|
| 107 |
-
|
| 108 |
-
|
| 109 |
-
|
| 110 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 111 |
process.exit(0);
|
| 112 |
};
|
| 113 |
|
|
|
|
| 100 |
port,
|
| 101 |
});
|
| 102 |
|
| 103 |
+
// Graceful shutdown with timeout protection
|
| 104 |
+
let shutdownCalled = false;
|
| 105 |
const shutdown = () => {
|
| 106 |
+
if (shutdownCalled) return;
|
| 107 |
+
shutdownCalled = true;
|
| 108 |
console.log("\n[Shutdown] Cleaning up...");
|
| 109 |
+
const forceExit = setTimeout(() => {
|
| 110 |
+
console.error("[Shutdown] Timeout after 10s — forcing exit");
|
| 111 |
+
process.exit(1);
|
| 112 |
+
}, 10_000);
|
| 113 |
+
if (forceExit.unref) forceExit.unref();
|
| 114 |
+
|
| 115 |
+
try {
|
| 116 |
+
stopUpdateChecker();
|
| 117 |
+
refreshScheduler.destroy(); // Cancel timers first
|
| 118 |
+
sessionManager.destroy();
|
| 119 |
+
cookieJar.destroy(); // Flush cookies
|
| 120 |
+
accountPool.destroy(); // Flush accounts
|
| 121 |
+
} catch (err) {
|
| 122 |
+
console.error("[Shutdown] Error during cleanup:", err instanceof Error ? err.message : err);
|
| 123 |
+
}
|
| 124 |
+
clearTimeout(forceExit);
|
| 125 |
process.exit(0);
|
| 126 |
};
|
| 127 |
|
|
@@ -4,12 +4,13 @@ export async function logger(c: Context, next: Next): Promise<void> {
|
|
| 4 |
const start = Date.now();
|
| 5 |
const method = c.req.method;
|
| 6 |
const path = c.req.path;
|
|
|
|
| 7 |
|
| 8 |
-
console.log(`→ ${method} ${path}`);
|
| 9 |
|
| 10 |
await next();
|
| 11 |
|
| 12 |
const ms = Date.now() - start;
|
| 13 |
const status = c.res.status;
|
| 14 |
-
console.log(`← ${method} ${path} ${status} ${ms}ms`);
|
| 15 |
}
|
|
|
|
| 4 |
const start = Date.now();
|
| 5 |
const method = c.req.method;
|
| 6 |
const path = c.req.path;
|
| 7 |
+
const rid = c.get("requestId") ?? "-";
|
| 8 |
|
| 9 |
+
console.log(`→ ${method} ${path} [${rid}]`);
|
| 10 |
|
| 11 |
await next();
|
| 12 |
|
| 13 |
const ms = Date.now() - start;
|
| 14 |
const status = c.res.status;
|
| 15 |
+
console.log(`← ${method} ${path} ${status} ${ms}ms [${rid}]`);
|
| 16 |
}
|
|
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import type { Context, Next } from "hono";
|
| 2 |
+
import { randomUUID } from "crypto";
|
| 3 |
+
|
| 4 |
+
/**
|
| 5 |
+
* Middleware that generates a unique request ID for each request.
|
| 6 |
+
* Sets X-Request-Id response header and stores it in c.set() for logging.
|
| 7 |
+
*/
|
| 8 |
+
export async function requestId(c: Context, next: Next): Promise<void> {
|
| 9 |
+
const id = c.req.header("x-request-id") ?? randomUUID().slice(0, 8);
|
| 10 |
+
c.set("requestId", id);
|
| 11 |
+
c.header("X-Request-Id", id);
|
| 12 |
+
await next();
|
| 13 |
+
}
|
|
@@ -11,7 +11,7 @@
|
|
| 11 |
|
| 12 |
import { spawn, execFile } from "child_process";
|
| 13 |
import { getConfig } from "../config.js";
|
| 14 |
-
import { resolveCurlBinary, getChromeTlsArgs, getProxyArgs } from "../tls/curl-binary.js";
|
| 15 |
import {
|
| 16 |
buildHeaders,
|
| 17 |
buildHeadersWithContentType,
|
|
@@ -235,10 +235,11 @@ export class CodexApi {
|
|
| 235 |
buildHeaders(this.token, this.accountId),
|
| 236 |
);
|
| 237 |
headers["Accept"] = "application/json";
|
| 238 |
-
//
|
| 239 |
-
//
|
| 240 |
-
|
| 241 |
-
|
|
|
|
| 242 |
|
| 243 |
// Build curl args (Chrome TLS profile + proxy + request params)
|
| 244 |
const args = [...getChromeTlsArgs(), ...getProxyArgs(), "-s", "--compressed", "--max-time", "15"];
|
|
|
|
| 11 |
|
| 12 |
import { spawn, execFile } from "child_process";
|
| 13 |
import { getConfig } from "../config.js";
|
| 14 |
+
import { resolveCurlBinary, getChromeTlsArgs, getProxyArgs, isImpersonate } from "../tls/curl-binary.js";
|
| 15 |
import {
|
| 16 |
buildHeaders,
|
| 17 |
buildHeadersWithContentType,
|
|
|
|
| 235 |
buildHeaders(this.token, this.accountId),
|
| 236 |
);
|
| 237 |
headers["Accept"] = "application/json";
|
| 238 |
+
// When using system curl (not curl-impersonate), downgrade Accept-Encoding
|
| 239 |
+
// to encodings it can always decompress. curl-impersonate supports br/zstd.
|
| 240 |
+
if (!isImpersonate()) {
|
| 241 |
+
headers["Accept-Encoding"] = "gzip, deflate";
|
| 242 |
+
}
|
| 243 |
|
| 244 |
// Build curl args (Chrome TLS profile + proxy + request params)
|
| 245 |
const args = [...getChromeTlsArgs(), ...getProxyArgs(), "-s", "--compressed", "--max-time", "15"];
|
|
@@ -6,6 +6,8 @@
|
|
| 6 |
*
|
| 7 |
* Cookies are auto-captured from every ChatGPT API response's Set-Cookie
|
| 8 |
* headers, and can also be set manually via the management API.
|
|
|
|
|
|
|
| 9 |
*/
|
| 10 |
|
| 11 |
import {
|
|
@@ -19,12 +21,31 @@ import { resolve, dirname } from "path";
|
|
| 19 |
|
| 20 |
const COOKIE_FILE = resolve(process.cwd(), "data", "cookies.json");
|
| 21 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 22 |
export class CookieJar {
|
| 23 |
-
private cookies: Map<string, Record<string,
|
| 24 |
private persistTimer: ReturnType<typeof setTimeout> | null = null;
|
|
|
|
| 25 |
|
| 26 |
constructor() {
|
| 27 |
this.load();
|
|
|
|
|
|
|
|
|
|
|
|
|
| 28 |
}
|
| 29 |
|
| 30 |
/**
|
|
@@ -41,10 +62,12 @@ export class CookieJar {
|
|
| 41 |
if (eq === -1) continue;
|
| 42 |
const name = part.slice(0, eq).trim();
|
| 43 |
const value = part.slice(eq + 1).trim();
|
| 44 |
-
if (name) existing[name] = value;
|
| 45 |
}
|
| 46 |
} else {
|
| 47 |
-
|
|
|
|
|
|
|
| 48 |
}
|
| 49 |
|
| 50 |
this.cookies.set(accountId, existing);
|
|
@@ -58,9 +81,13 @@ export class CookieJar {
|
|
| 58 |
getCookieHeader(accountId: string): string | null {
|
| 59 |
const cookies = this.cookies.get(accountId);
|
| 60 |
if (!cookies || Object.keys(cookies).length === 0) return null;
|
| 61 |
-
|
| 62 |
-
|
| 63 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 64 |
}
|
| 65 |
|
| 66 |
/**
|
|
@@ -68,36 +95,11 @@ export class CookieJar {
|
|
| 68 |
* Call this after every successful fetch to chatgpt.com.
|
| 69 |
*/
|
| 70 |
capture(accountId: string, response: Response): void {
|
| 71 |
-
// getSetCookie() returns individual Set-Cookie header values
|
| 72 |
const setCookies =
|
| 73 |
typeof response.headers.getSetCookie === "function"
|
| 74 |
? response.headers.getSetCookie()
|
| 75 |
: [];
|
| 76 |
-
|
| 77 |
-
if (setCookies.length === 0) return;
|
| 78 |
-
|
| 79 |
-
const existing = this.cookies.get(accountId) ?? {};
|
| 80 |
-
let changed = false;
|
| 81 |
-
|
| 82 |
-
for (const raw of setCookies) {
|
| 83 |
-
// Format: "name=value; Path=/; Domain=...; ..."
|
| 84 |
-
const semi = raw.indexOf(";");
|
| 85 |
-
const pair = semi === -1 ? raw : raw.slice(0, semi);
|
| 86 |
-
const eq = pair.indexOf("=");
|
| 87 |
-
if (eq === -1) continue;
|
| 88 |
-
|
| 89 |
-
const name = pair.slice(0, eq).trim();
|
| 90 |
-
const value = pair.slice(eq + 1).trim();
|
| 91 |
-
if (name && existing[name] !== value) {
|
| 92 |
-
existing[name] = value;
|
| 93 |
-
changed = true;
|
| 94 |
-
}
|
| 95 |
-
}
|
| 96 |
-
|
| 97 |
-
if (changed) {
|
| 98 |
-
this.cookies.set(accountId, existing);
|
| 99 |
-
this.schedulePersist();
|
| 100 |
-
}
|
| 101 |
}
|
| 102 |
|
| 103 |
/**
|
|
@@ -108,30 +110,65 @@ export class CookieJar {
|
|
| 108 |
|
| 109 |
const existing = this.cookies.get(accountId) ?? {};
|
| 110 |
let changed = false;
|
|
|
|
| 111 |
|
| 112 |
for (const raw of setCookies) {
|
| 113 |
-
const
|
| 114 |
-
const pair =
|
| 115 |
const eq = pair.indexOf("=");
|
| 116 |
if (eq === -1) continue;
|
| 117 |
|
| 118 |
const name = pair.slice(0, eq).trim();
|
| 119 |
const value = pair.slice(eq + 1).trim();
|
| 120 |
-
if (
|
| 121 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 122 |
changed = true;
|
|
|
|
| 123 |
}
|
| 124 |
}
|
| 125 |
|
| 126 |
if (changed) {
|
| 127 |
this.cookies.set(accountId, existing);
|
| 128 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 129 |
}
|
| 130 |
}
|
| 131 |
|
| 132 |
/** Get raw cookie record for an account. */
|
| 133 |
get(accountId: string): Record<string, string> | null {
|
| 134 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 135 |
}
|
| 136 |
|
| 137 |
/** Clear all cookies for an account. */
|
|
@@ -141,6 +178,21 @@ export class CookieJar {
|
|
| 141 |
}
|
| 142 |
}
|
| 143 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 144 |
// ── Persistence ──────────────────────────────────────────────────
|
| 145 |
|
| 146 |
private schedulePersist(): void {
|
|
@@ -159,7 +211,15 @@ export class CookieJar {
|
|
| 159 |
try {
|
| 160 |
const dir = dirname(COOKIE_FILE);
|
| 161 |
if (!existsSync(dir)) mkdirSync(dir, { recursive: true });
|
| 162 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 163 |
const tmpFile = COOKIE_FILE + ".tmp";
|
| 164 |
writeFileSync(tmpFile, JSON.stringify(data, null, 2), "utf-8");
|
| 165 |
renameSync(tmpFile, COOKIE_FILE);
|
|
@@ -172,10 +232,28 @@ export class CookieJar {
|
|
| 172 |
try {
|
| 173 |
if (!existsSync(COOKIE_FILE)) return;
|
| 174 |
const raw = readFileSync(COOKIE_FILE, "utf-8");
|
| 175 |
-
const data = JSON.parse(raw)
|
| 176 |
-
|
| 177 |
-
|
| 178 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 179 |
}
|
| 180 |
}
|
| 181 |
} catch (err) {
|
|
@@ -188,6 +266,7 @@ export class CookieJar {
|
|
| 188 |
clearTimeout(this.persistTimer);
|
| 189 |
this.persistTimer = null;
|
| 190 |
}
|
|
|
|
| 191 |
this.persistNow();
|
| 192 |
}
|
| 193 |
}
|
|
|
|
| 6 |
*
|
| 7 |
* Cookies are auto-captured from every ChatGPT API response's Set-Cookie
|
| 8 |
* headers, and can also be set manually via the management API.
|
| 9 |
+
*
|
| 10 |
+
* Persistence format v2: includes expiry timestamps.
|
| 11 |
*/
|
| 12 |
|
| 13 |
import {
|
|
|
|
| 21 |
|
| 22 |
const COOKIE_FILE = resolve(process.cwd(), "data", "cookies.json");
|
| 23 |
|
| 24 |
+
interface StoredCookie {
|
| 25 |
+
value: string;
|
| 26 |
+
expires: number | null; // Unix ms timestamp, null = session cookie
|
| 27 |
+
}
|
| 28 |
+
|
| 29 |
+
/** v2 persistence format */
|
| 30 |
+
interface CookieFileV2 {
|
| 31 |
+
_version: 2;
|
| 32 |
+
accounts: Record<string, Record<string, { value: string; expires: number | null }>>;
|
| 33 |
+
}
|
| 34 |
+
|
| 35 |
+
/** Critical cookie names that trigger immediate persistence on change */
|
| 36 |
+
const CRITICAL_COOKIES = new Set(["cf_clearance", "__cf_bm"]);
|
| 37 |
+
|
| 38 |
export class CookieJar {
|
| 39 |
+
private cookies: Map<string, Record<string, StoredCookie>> = new Map();
|
| 40 |
private persistTimer: ReturnType<typeof setTimeout> | null = null;
|
| 41 |
+
private cleanupTimer: ReturnType<typeof setInterval>;
|
| 42 |
|
| 43 |
constructor() {
|
| 44 |
this.load();
|
| 45 |
+
this.cleanupExpired();
|
| 46 |
+
// Clean up expired cookies every 5 minutes
|
| 47 |
+
this.cleanupTimer = setInterval(() => this.cleanupExpired(), 5 * 60 * 1000);
|
| 48 |
+
if (this.cleanupTimer.unref) this.cleanupTimer.unref();
|
| 49 |
}
|
| 50 |
|
| 51 |
/**
|
|
|
|
| 62 |
if (eq === -1) continue;
|
| 63 |
const name = part.slice(0, eq).trim();
|
| 64 |
const value = part.slice(eq + 1).trim();
|
| 65 |
+
if (name) existing[name] = { value, expires: null };
|
| 66 |
}
|
| 67 |
} else {
|
| 68 |
+
for (const [k, v] of Object.entries(cookies)) {
|
| 69 |
+
existing[k] = { value: v, expires: null };
|
| 70 |
+
}
|
| 71 |
}
|
| 72 |
|
| 73 |
this.cookies.set(accountId, existing);
|
|
|
|
| 81 |
getCookieHeader(accountId: string): string | null {
|
| 82 |
const cookies = this.cookies.get(accountId);
|
| 83 |
if (!cookies || Object.keys(cookies).length === 0) return null;
|
| 84 |
+
const now = Date.now();
|
| 85 |
+
const pairs: string[] = [];
|
| 86 |
+
for (const [k, c] of Object.entries(cookies)) {
|
| 87 |
+
if (c.expires !== null && c.expires <= now) continue; // skip expired
|
| 88 |
+
pairs.push(`${k}=${c.value}`);
|
| 89 |
+
}
|
| 90 |
+
return pairs.length > 0 ? pairs.join("; ") : null;
|
| 91 |
}
|
| 92 |
|
| 93 |
/**
|
|
|
|
| 95 |
* Call this after every successful fetch to chatgpt.com.
|
| 96 |
*/
|
| 97 |
capture(accountId: string, response: Response): void {
|
|
|
|
| 98 |
const setCookies =
|
| 99 |
typeof response.headers.getSetCookie === "function"
|
| 100 |
? response.headers.getSetCookie()
|
| 101 |
: [];
|
| 102 |
+
this.captureRaw(accountId, setCookies);
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 103 |
}
|
| 104 |
|
| 105 |
/**
|
|
|
|
| 110 |
|
| 111 |
const existing = this.cookies.get(accountId) ?? {};
|
| 112 |
let changed = false;
|
| 113 |
+
let hasCritical = false;
|
| 114 |
|
| 115 |
for (const raw of setCookies) {
|
| 116 |
+
const parts = raw.split(";").map((s) => s.trim());
|
| 117 |
+
const pair = parts[0];
|
| 118 |
const eq = pair.indexOf("=");
|
| 119 |
if (eq === -1) continue;
|
| 120 |
|
| 121 |
const name = pair.slice(0, eq).trim();
|
| 122 |
const value = pair.slice(eq + 1).trim();
|
| 123 |
+
if (!name) continue;
|
| 124 |
+
|
| 125 |
+
// Parse expiry from attributes
|
| 126 |
+
let expires: number | null = null;
|
| 127 |
+
for (let i = 1; i < parts.length; i++) {
|
| 128 |
+
const attr = parts[i];
|
| 129 |
+
const attrLower = attr.toLowerCase();
|
| 130 |
+
if (attrLower.startsWith("max-age=")) {
|
| 131 |
+
const seconds = parseInt(attr.slice(8), 10);
|
| 132 |
+
if (!isNaN(seconds)) {
|
| 133 |
+
expires = seconds <= 0 ? 0 : Date.now() + seconds * 1000;
|
| 134 |
+
}
|
| 135 |
+
break; // Max-Age takes precedence over Expires
|
| 136 |
+
}
|
| 137 |
+
if (attrLower.startsWith("expires=")) {
|
| 138 |
+
const date = new Date(attr.slice(8));
|
| 139 |
+
if (!isNaN(date.getTime())) {
|
| 140 |
+
expires = date.getTime();
|
| 141 |
+
}
|
| 142 |
+
}
|
| 143 |
+
}
|
| 144 |
+
|
| 145 |
+
const prev = existing[name];
|
| 146 |
+
if (!prev || prev.value !== value || prev.expires !== expires) {
|
| 147 |
+
existing[name] = { value, expires };
|
| 148 |
changed = true;
|
| 149 |
+
if (CRITICAL_COOKIES.has(name)) hasCritical = true;
|
| 150 |
}
|
| 151 |
}
|
| 152 |
|
| 153 |
if (changed) {
|
| 154 |
this.cookies.set(accountId, existing);
|
| 155 |
+
if (hasCritical) {
|
| 156 |
+
this.persistNow(); // Critical cookie — persist immediately
|
| 157 |
+
} else {
|
| 158 |
+
this.schedulePersist();
|
| 159 |
+
}
|
| 160 |
}
|
| 161 |
}
|
| 162 |
|
| 163 |
/** Get raw cookie record for an account. */
|
| 164 |
get(accountId: string): Record<string, string> | null {
|
| 165 |
+
const cookies = this.cookies.get(accountId);
|
| 166 |
+
if (!cookies) return null;
|
| 167 |
+
const result: Record<string, string> = {};
|
| 168 |
+
for (const [k, c] of Object.entries(cookies)) {
|
| 169 |
+
result[k] = c.value;
|
| 170 |
+
}
|
| 171 |
+
return result;
|
| 172 |
}
|
| 173 |
|
| 174 |
/** Clear all cookies for an account. */
|
|
|
|
| 178 |
}
|
| 179 |
}
|
| 180 |
|
| 181 |
+
/** Remove expired cookies from all accounts. */
|
| 182 |
+
private cleanupExpired(): void {
|
| 183 |
+
const now = Date.now();
|
| 184 |
+
let changed = false;
|
| 185 |
+
for (const [, cookies] of this.cookies) {
|
| 186 |
+
for (const [name, c] of Object.entries(cookies)) {
|
| 187 |
+
if (c.expires !== null && c.expires <= now) {
|
| 188 |
+
delete cookies[name];
|
| 189 |
+
changed = true;
|
| 190 |
+
}
|
| 191 |
+
}
|
| 192 |
+
}
|
| 193 |
+
if (changed) this.schedulePersist();
|
| 194 |
+
}
|
| 195 |
+
|
| 196 |
// ── Persistence ──────────────────────────────────────────────────
|
| 197 |
|
| 198 |
private schedulePersist(): void {
|
|
|
|
| 211 |
try {
|
| 212 |
const dir = dirname(COOKIE_FILE);
|
| 213 |
if (!existsSync(dir)) mkdirSync(dir, { recursive: true });
|
| 214 |
+
|
| 215 |
+
// Persist v2 format with expiry info
|
| 216 |
+
const data: CookieFileV2 = { _version: 2, accounts: {} };
|
| 217 |
+
for (const [acct, cookies] of this.cookies) {
|
| 218 |
+
data.accounts[acct] = {};
|
| 219 |
+
for (const [k, c] of Object.entries(cookies)) {
|
| 220 |
+
data.accounts[acct][k] = { value: c.value, expires: c.expires };
|
| 221 |
+
}
|
| 222 |
+
}
|
| 223 |
const tmpFile = COOKIE_FILE + ".tmp";
|
| 224 |
writeFileSync(tmpFile, JSON.stringify(data, null, 2), "utf-8");
|
| 225 |
renameSync(tmpFile, COOKIE_FILE);
|
|
|
|
| 232 |
try {
|
| 233 |
if (!existsSync(COOKIE_FILE)) return;
|
| 234 |
const raw = readFileSync(COOKIE_FILE, "utf-8");
|
| 235 |
+
const data = JSON.parse(raw);
|
| 236 |
+
|
| 237 |
+
if (data && data._version === 2 && data.accounts) {
|
| 238 |
+
// v2 format: { _version: 2, accounts: { acct: { name: { value, expires } } } }
|
| 239 |
+
for (const [acct, cookies] of Object.entries(data.accounts as Record<string, Record<string, { value: string; expires: number | null }>>)) {
|
| 240 |
+
const record: Record<string, StoredCookie> = {};
|
| 241 |
+
for (const [k, c] of Object.entries(cookies)) {
|
| 242 |
+
record[k] = { value: c.value, expires: c.expires ?? null };
|
| 243 |
+
}
|
| 244 |
+
this.cookies.set(acct, record);
|
| 245 |
+
}
|
| 246 |
+
} else {
|
| 247 |
+
// v1 format: { acct: { name: "value" } } (no expiry)
|
| 248 |
+
for (const [key, val] of Object.entries(data as Record<string, unknown>)) {
|
| 249 |
+
if (key === "_version") continue;
|
| 250 |
+
if (typeof val === "object" && val !== null) {
|
| 251 |
+
const record: Record<string, StoredCookie> = {};
|
| 252 |
+
for (const [k, v] of Object.entries(val as Record<string, string>)) {
|
| 253 |
+
record[k] = { value: v, expires: null };
|
| 254 |
+
}
|
| 255 |
+
this.cookies.set(key, record);
|
| 256 |
+
}
|
| 257 |
}
|
| 258 |
}
|
| 259 |
} catch (err) {
|
|
|
|
| 266 |
clearTimeout(this.persistTimer);
|
| 267 |
this.persistTimer = null;
|
| 268 |
}
|
| 269 |
+
clearInterval(this.cleanupTimer);
|
| 270 |
this.persistNow();
|
| 271 |
}
|
| 272 |
}
|
|
@@ -95,7 +95,12 @@ export function createAccountRoutes(
|
|
| 95 |
try {
|
| 96 |
const api = makeApi(acct.id, entry.token, entry.accountId);
|
| 97 |
const usage = await api.getUsage();
|
| 98 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 99 |
} catch {
|
| 100 |
return acct; // skip on error — no quota field
|
| 101 |
}
|
|
|
|
| 95 |
try {
|
| 96 |
const api = makeApi(acct.id, entry.token, entry.accountId);
|
| 97 |
const usage = await api.getUsage();
|
| 98 |
+
// Sync rate limit window — auto-reset local counters on window rollover
|
| 99 |
+
const resetAt = usage.rate_limit.primary_window?.reset_at ?? null;
|
| 100 |
+
pool.syncRateLimitWindow(acct.id, resetAt);
|
| 101 |
+
// Re-read usage after potential reset
|
| 102 |
+
const freshAcct = pool.getAccounts().find((a) => a.id === acct.id) ?? acct;
|
| 103 |
+
return { ...freshAcct, quota: toQuota(usage) };
|
| 104 |
} catch {
|
| 105 |
return acct; // skip on error — no quota field
|
| 106 |
}
|
|
@@ -157,6 +157,14 @@ export async function handleProxyRequest(
|
|
| 157 |
);
|
| 158 |
if (err.status === 429) {
|
| 159 |
accountPool.markRateLimited(entryId);
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 160 |
c.status(429);
|
| 161 |
return c.json(fmt.format429(err.message));
|
| 162 |
}
|
|
|
|
| 157 |
);
|
| 158 |
if (err.status === 429) {
|
| 159 |
accountPool.markRateLimited(entryId);
|
| 160 |
+
// Note: markRateLimited releases the lock but does not increment
|
| 161 |
+
// request_count. We intentionally count 429s as requests for
|
| 162 |
+
// accurate load tracking across accounts.
|
| 163 |
+
const entry = accountPool.getEntry(entryId);
|
| 164 |
+
if (entry) {
|
| 165 |
+
entry.usage.request_count++;
|
| 166 |
+
entry.usage.last_used = new Date().toISOString();
|
| 167 |
+
}
|
| 168 |
c.status(429);
|
| 169 |
return c.json(fmt.format429(err.message));
|
| 170 |
}
|
|
@@ -1,6 +1,8 @@
|
|
| 1 |
import { createHash } from "crypto";
|
| 2 |
import { getConfig } from "../config.js";
|
| 3 |
|
|
|
|
|
|
|
| 4 |
interface Session {
|
| 5 |
taskId: string;
|
| 6 |
turnId: string;
|
|
@@ -69,6 +71,18 @@ export class SessionManager {
|
|
| 69 |
messages: Array<{ role: string; content: string }>,
|
| 70 |
): void {
|
| 71 |
const hash = this.hashMessages(messages);
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 72 |
this.sessions.set(taskId, {
|
| 73 |
taskId,
|
| 74 |
turnId,
|
|
|
|
| 1 |
import { createHash } from "crypto";
|
| 2 |
import { getConfig } from "../config.js";
|
| 3 |
|
| 4 |
+
const MAX_SESSIONS = 10000;
|
| 5 |
+
|
| 6 |
interface Session {
|
| 7 |
taskId: string;
|
| 8 |
turnId: string;
|
|
|
|
| 71 |
messages: Array<{ role: string; content: string }>,
|
| 72 |
): void {
|
| 73 |
const hash = this.hashMessages(messages);
|
| 74 |
+
// Evict oldest session if at capacity
|
| 75 |
+
if (this.sessions.size >= MAX_SESSIONS) {
|
| 76 |
+
let oldestKey: string | null = null;
|
| 77 |
+
let oldestTime = Infinity;
|
| 78 |
+
for (const [key, s] of this.sessions) {
|
| 79 |
+
if (s.createdAt < oldestTime) {
|
| 80 |
+
oldestTime = s.createdAt;
|
| 81 |
+
oldestKey = key;
|
| 82 |
+
}
|
| 83 |
+
}
|
| 84 |
+
if (oldestKey) this.sessions.delete(oldestKey);
|
| 85 |
+
}
|
| 86 |
this.sessions.set(taskId, {
|
| 87 |
taskId,
|
| 88 |
turnId,
|
|
@@ -210,6 +210,15 @@ export function getProxyArgs(): string[] {
|
|
| 210 |
return [];
|
| 211 |
}
|
| 212 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 213 |
/**
|
| 214 |
* Reset the cached binary path (useful for testing).
|
| 215 |
*/
|
|
|
|
| 210 |
return [];
|
| 211 |
}
|
| 212 |
|
| 213 |
+
/**
|
| 214 |
+
* Check if the resolved curl binary is curl-impersonate.
|
| 215 |
+
* When true, it supports br/zstd decompression natively.
|
| 216 |
+
*/
|
| 217 |
+
export function isImpersonate(): boolean {
|
| 218 |
+
resolveCurlBinary(); // ensure resolved
|
| 219 |
+
return _isImpersonate;
|
| 220 |
+
}
|
| 221 |
+
|
| 222 |
/**
|
| 223 |
* Reset the cached binary path (useful for testing).
|
| 224 |
*/
|
|
@@ -8,7 +8,8 @@
|
|
| 8 |
*/
|
| 9 |
|
| 10 |
import { execFile } from "child_process";
|
| 11 |
-
import { resolveCurlBinary, getChromeTlsArgs } from "./curl-binary.js";
|
|
|
|
| 12 |
|
| 13 |
export interface CurlFetchResponse {
|
| 14 |
status: number;
|
|
@@ -24,12 +25,23 @@ const STATUS_SEPARATOR = "\n__CURL_HTTP_STATUS__";
|
|
| 24 |
export function curlFetchGet(url: string): Promise<CurlFetchResponse> {
|
| 25 |
const args = [
|
| 26 |
...getChromeTlsArgs(),
|
|
|
|
| 27 |
"-s", "-S",
|
| 28 |
"--compressed",
|
| 29 |
"--max-time", "30",
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 30 |
"-w", STATUS_SEPARATOR + "%{http_code}",
|
| 31 |
url,
|
| 32 |
-
|
| 33 |
|
| 34 |
return execCurl(args);
|
| 35 |
}
|
|
@@ -44,15 +56,26 @@ export function curlFetchPost(
|
|
| 44 |
): Promise<CurlFetchResponse> {
|
| 45 |
const args = [
|
| 46 |
...getChromeTlsArgs(),
|
|
|
|
| 47 |
"-s", "-S",
|
| 48 |
"--compressed",
|
| 49 |
"--max-time", "30",
|
| 50 |
"-X", "POST",
|
| 51 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 52 |
"-d", body,
|
| 53 |
"-w", STATUS_SEPARATOR + "%{http_code}",
|
| 54 |
url,
|
| 55 |
-
|
| 56 |
|
| 57 |
return execCurl(args);
|
| 58 |
}
|
|
|
|
| 8 |
*/
|
| 9 |
|
| 10 |
import { execFile } from "child_process";
|
| 11 |
+
import { resolveCurlBinary, getChromeTlsArgs, getProxyArgs } from "./curl-binary.js";
|
| 12 |
+
import { buildAnonymousHeaders } from "../fingerprint/manager.js";
|
| 13 |
|
| 14 |
export interface CurlFetchResponse {
|
| 15 |
status: number;
|
|
|
|
| 25 |
export function curlFetchGet(url: string): Promise<CurlFetchResponse> {
|
| 26 |
const args = [
|
| 27 |
...getChromeTlsArgs(),
|
| 28 |
+
...getProxyArgs(),
|
| 29 |
"-s", "-S",
|
| 30 |
"--compressed",
|
| 31 |
"--max-time", "30",
|
| 32 |
+
];
|
| 33 |
+
|
| 34 |
+
// Inject fingerprint headers (User-Agent, sec-ch-ua, Accept-Encoding, etc.)
|
| 35 |
+
const fpHeaders = buildAnonymousHeaders();
|
| 36 |
+
for (const [key, value] of Object.entries(fpHeaders)) {
|
| 37 |
+
args.push("-H", `${key}: ${value}`);
|
| 38 |
+
}
|
| 39 |
+
args.push("-H", "Expect:");
|
| 40 |
+
|
| 41 |
+
args.push(
|
| 42 |
"-w", STATUS_SEPARATOR + "%{http_code}",
|
| 43 |
url,
|
| 44 |
+
);
|
| 45 |
|
| 46 |
return execCurl(args);
|
| 47 |
}
|
|
|
|
| 56 |
): Promise<CurlFetchResponse> {
|
| 57 |
const args = [
|
| 58 |
...getChromeTlsArgs(),
|
| 59 |
+
...getProxyArgs(),
|
| 60 |
"-s", "-S",
|
| 61 |
"--compressed",
|
| 62 |
"--max-time", "30",
|
| 63 |
"-X", "POST",
|
| 64 |
+
];
|
| 65 |
+
|
| 66 |
+
// Inject fingerprint headers (User-Agent, sec-ch-ua, Accept-Encoding, etc.)
|
| 67 |
+
const fpHeaders = buildAnonymousHeaders();
|
| 68 |
+
for (const [key, value] of Object.entries(fpHeaders)) {
|
| 69 |
+
args.push("-H", `${key}: ${value}`);
|
| 70 |
+
}
|
| 71 |
+
args.push("-H", `Content-Type: ${contentType}`);
|
| 72 |
+
args.push("-H", "Expect:");
|
| 73 |
+
|
| 74 |
+
args.push(
|
| 75 |
"-d", body,
|
| 76 |
"-w", STATUS_SEPARATOR + "%{http_code}",
|
| 77 |
url,
|
| 78 |
+
);
|
| 79 |
|
| 80 |
return execCurl(args);
|
| 81 |
}
|
|
@@ -0,0 +1,56 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
/**
|
| 2 |
+
* Shared Codex SSE event data extraction layer.
|
| 3 |
+
*
|
| 4 |
+
* The three translation files (OpenAI, Anthropic, Gemini) all extract
|
| 5 |
+
* the same data from Codex events — this module centralizes that logic.
|
| 6 |
+
*/
|
| 7 |
+
|
| 8 |
+
import type { CodexApi, CodexSSEEvent } from "../proxy/codex-api.js";
|
| 9 |
+
import {
|
| 10 |
+
parseCodexEvent,
|
| 11 |
+
type TypedCodexEvent,
|
| 12 |
+
} from "../types/codex-events.js";
|
| 13 |
+
|
| 14 |
+
export interface UsageInfo {
|
| 15 |
+
input_tokens: number;
|
| 16 |
+
output_tokens: number;
|
| 17 |
+
}
|
| 18 |
+
|
| 19 |
+
export interface ExtractedEvent {
|
| 20 |
+
typed: TypedCodexEvent;
|
| 21 |
+
responseId?: string;
|
| 22 |
+
textDelta?: string;
|
| 23 |
+
usage?: UsageInfo;
|
| 24 |
+
}
|
| 25 |
+
|
| 26 |
+
/**
|
| 27 |
+
* Iterate over a Codex SSE stream, parsing + extracting common fields.
|
| 28 |
+
* Yields ExtractedEvent with pre-extracted responseId, textDelta, and usage.
|
| 29 |
+
*/
|
| 30 |
+
export async function* iterateCodexEvents(
|
| 31 |
+
codexApi: CodexApi,
|
| 32 |
+
rawResponse: Response,
|
| 33 |
+
): AsyncGenerator<ExtractedEvent> {
|
| 34 |
+
for await (const raw of codexApi.parseStream(rawResponse)) {
|
| 35 |
+
const typed = parseCodexEvent(raw);
|
| 36 |
+
const extracted: ExtractedEvent = { typed };
|
| 37 |
+
|
| 38 |
+
switch (typed.type) {
|
| 39 |
+
case "response.created":
|
| 40 |
+
case "response.in_progress":
|
| 41 |
+
if (typed.response.id) extracted.responseId = typed.response.id;
|
| 42 |
+
break;
|
| 43 |
+
|
| 44 |
+
case "response.output_text.delta":
|
| 45 |
+
extracted.textDelta = typed.delta;
|
| 46 |
+
break;
|
| 47 |
+
|
| 48 |
+
case "response.completed":
|
| 49 |
+
if (typed.response.id) extracted.responseId = typed.response.id;
|
| 50 |
+
if (typed.response.usage) extracted.usage = typed.response.usage;
|
| 51 |
+
break;
|
| 52 |
+
}
|
| 53 |
+
|
| 54 |
+
yield extracted;
|
| 55 |
+
}
|
| 56 |
+
}
|
|
@@ -15,6 +15,7 @@ import type {
|
|
| 15 |
AnthropicMessagesResponse,
|
| 16 |
AnthropicUsage,
|
| 17 |
} from "../types/anthropic.js";
|
|
|
|
| 18 |
|
| 19 |
export interface AnthropicUsageInfo {
|
| 20 |
input_tokens: number;
|
|
@@ -64,37 +65,25 @@ export async function* streamCodexToAnthropic(
|
|
| 64 |
});
|
| 65 |
|
| 66 |
// 3. Process Codex stream events
|
| 67 |
-
for await (const evt of
|
| 68 |
-
|
| 69 |
-
|
| 70 |
-
switch (evt.event) {
|
| 71 |
-
case "response.created":
|
| 72 |
-
case "response.in_progress": {
|
| 73 |
-
const resp = data.response as Record<string, unknown> | undefined;
|
| 74 |
-
if (resp?.id) {
|
| 75 |
-
onResponseId?.(resp.id as string);
|
| 76 |
-
}
|
| 77 |
-
break;
|
| 78 |
-
}
|
| 79 |
|
|
|
|
| 80 |
case "response.output_text.delta": {
|
| 81 |
-
|
| 82 |
-
if (delta) {
|
| 83 |
yield formatSSE("content_block_delta", {
|
| 84 |
type: "content_block_delta",
|
| 85 |
index: 0,
|
| 86 |
-
delta: { type: "text_delta", text:
|
| 87 |
});
|
| 88 |
}
|
| 89 |
break;
|
| 90 |
}
|
| 91 |
|
| 92 |
case "response.completed": {
|
| 93 |
-
|
| 94 |
-
|
| 95 |
-
|
| 96 |
-
inputTokens = u.input_tokens ?? 0;
|
| 97 |
-
outputTokens = u.output_tokens ?? 0;
|
| 98 |
onUsage?.({ input_tokens: inputTokens, output_tokens: outputTokens });
|
| 99 |
}
|
| 100 |
break;
|
|
@@ -140,32 +129,12 @@ export async function collectCodexToAnthropicResponse(
|
|
| 140 |
let outputTokens = 0;
|
| 141 |
let responseId: string | null = null;
|
| 142 |
|
| 143 |
-
for await (const evt of
|
| 144 |
-
|
| 145 |
-
|
| 146 |
-
|
| 147 |
-
|
| 148 |
-
|
| 149 |
-
const resp = data.response as Record<string, unknown> | undefined;
|
| 150 |
-
if (resp?.id) responseId = resp.id as string;
|
| 151 |
-
break;
|
| 152 |
-
}
|
| 153 |
-
|
| 154 |
-
case "response.output_text.delta": {
|
| 155 |
-
fullText += (data.delta as string) ?? "";
|
| 156 |
-
break;
|
| 157 |
-
}
|
| 158 |
-
|
| 159 |
-
case "response.completed": {
|
| 160 |
-
const resp = data.response as Record<string, unknown> | undefined;
|
| 161 |
-
if (resp?.id) responseId = resp.id as string;
|
| 162 |
-
if (resp?.usage) {
|
| 163 |
-
const u = resp.usage as Record<string, number>;
|
| 164 |
-
inputTokens = u.input_tokens ?? 0;
|
| 165 |
-
outputTokens = u.output_tokens ?? 0;
|
| 166 |
-
}
|
| 167 |
-
break;
|
| 168 |
-
}
|
| 169 |
}
|
| 170 |
}
|
| 171 |
|
|
|
|
| 15 |
AnthropicMessagesResponse,
|
| 16 |
AnthropicUsage,
|
| 17 |
} from "../types/anthropic.js";
|
| 18 |
+
import { iterateCodexEvents } from "./codex-event-extractor.js";
|
| 19 |
|
| 20 |
export interface AnthropicUsageInfo {
|
| 21 |
input_tokens: number;
|
|
|
|
| 65 |
});
|
| 66 |
|
| 67 |
// 3. Process Codex stream events
|
| 68 |
+
for await (const evt of iterateCodexEvents(codexApi, rawResponse)) {
|
| 69 |
+
if (evt.responseId) onResponseId?.(evt.responseId);
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 70 |
|
| 71 |
+
switch (evt.typed.type) {
|
| 72 |
case "response.output_text.delta": {
|
| 73 |
+
if (evt.textDelta) {
|
|
|
|
| 74 |
yield formatSSE("content_block_delta", {
|
| 75 |
type: "content_block_delta",
|
| 76 |
index: 0,
|
| 77 |
+
delta: { type: "text_delta", text: evt.textDelta },
|
| 78 |
});
|
| 79 |
}
|
| 80 |
break;
|
| 81 |
}
|
| 82 |
|
| 83 |
case "response.completed": {
|
| 84 |
+
if (evt.usage) {
|
| 85 |
+
inputTokens = evt.usage.input_tokens;
|
| 86 |
+
outputTokens = evt.usage.output_tokens;
|
|
|
|
|
|
|
| 87 |
onUsage?.({ input_tokens: inputTokens, output_tokens: outputTokens });
|
| 88 |
}
|
| 89 |
break;
|
|
|
|
| 129 |
let outputTokens = 0;
|
| 130 |
let responseId: string | null = null;
|
| 131 |
|
| 132 |
+
for await (const evt of iterateCodexEvents(codexApi, rawResponse)) {
|
| 133 |
+
if (evt.responseId) responseId = evt.responseId;
|
| 134 |
+
if (evt.textDelta) fullText += evt.textDelta;
|
| 135 |
+
if (evt.usage) {
|
| 136 |
+
inputTokens = evt.usage.input_tokens;
|
| 137 |
+
outputTokens = evt.usage.output_tokens;
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 138 |
}
|
| 139 |
}
|
| 140 |
|
|
@@ -14,6 +14,7 @@ import type {
|
|
| 14 |
GeminiGenerateContentResponse,
|
| 15 |
GeminiUsageMetadata,
|
| 16 |
} from "../types/gemini.js";
|
|
|
|
| 17 |
|
| 18 |
export interface GeminiUsageInfo {
|
| 19 |
input_tokens: number;
|
|
@@ -34,27 +35,17 @@ export async function* streamCodexToGemini(
|
|
| 34 |
let inputTokens = 0;
|
| 35 |
let outputTokens = 0;
|
| 36 |
|
| 37 |
-
for await (const evt of
|
| 38 |
-
|
| 39 |
-
|
| 40 |
-
switch (evt.event) {
|
| 41 |
-
case "response.created":
|
| 42 |
-
case "response.in_progress": {
|
| 43 |
-
const resp = data.response as Record<string, unknown> | undefined;
|
| 44 |
-
if (resp?.id) {
|
| 45 |
-
onResponseId?.(resp.id as string);
|
| 46 |
-
}
|
| 47 |
-
break;
|
| 48 |
-
}
|
| 49 |
|
|
|
|
| 50 |
case "response.output_text.delta": {
|
| 51 |
-
|
| 52 |
-
if (delta) {
|
| 53 |
const chunk: GeminiGenerateContentResponse = {
|
| 54 |
candidates: [
|
| 55 |
{
|
| 56 |
content: {
|
| 57 |
-
parts: [{ text:
|
| 58 |
role: "model",
|
| 59 |
},
|
| 60 |
index: 0,
|
|
@@ -68,11 +59,9 @@ export async function* streamCodexToGemini(
|
|
| 68 |
}
|
| 69 |
|
| 70 |
case "response.completed": {
|
| 71 |
-
|
| 72 |
-
|
| 73 |
-
|
| 74 |
-
inputTokens = u.input_tokens ?? 0;
|
| 75 |
-
outputTokens = u.output_tokens ?? 0;
|
| 76 |
onUsage?.({ input_tokens: inputTokens, output_tokens: outputTokens });
|
| 77 |
}
|
| 78 |
|
|
@@ -120,32 +109,12 @@ export async function collectCodexToGeminiResponse(
|
|
| 120 |
let outputTokens = 0;
|
| 121 |
let responseId: string | null = null;
|
| 122 |
|
| 123 |
-
for await (const evt of
|
| 124 |
-
|
| 125 |
-
|
| 126 |
-
|
| 127 |
-
|
| 128 |
-
|
| 129 |
-
const resp = data.response as Record<string, unknown> | undefined;
|
| 130 |
-
if (resp?.id) responseId = resp.id as string;
|
| 131 |
-
break;
|
| 132 |
-
}
|
| 133 |
-
|
| 134 |
-
case "response.output_text.delta": {
|
| 135 |
-
fullText += (data.delta as string) ?? "";
|
| 136 |
-
break;
|
| 137 |
-
}
|
| 138 |
-
|
| 139 |
-
case "response.completed": {
|
| 140 |
-
const resp = data.response as Record<string, unknown> | undefined;
|
| 141 |
-
if (resp?.id) responseId = resp.id as string;
|
| 142 |
-
if (resp?.usage) {
|
| 143 |
-
const u = resp.usage as Record<string, number>;
|
| 144 |
-
inputTokens = u.input_tokens ?? 0;
|
| 145 |
-
outputTokens = u.output_tokens ?? 0;
|
| 146 |
-
}
|
| 147 |
-
break;
|
| 148 |
-
}
|
| 149 |
}
|
| 150 |
}
|
| 151 |
|
|
|
|
| 14 |
GeminiGenerateContentResponse,
|
| 15 |
GeminiUsageMetadata,
|
| 16 |
} from "../types/gemini.js";
|
| 17 |
+
import { iterateCodexEvents } from "./codex-event-extractor.js";
|
| 18 |
|
| 19 |
export interface GeminiUsageInfo {
|
| 20 |
input_tokens: number;
|
|
|
|
| 35 |
let inputTokens = 0;
|
| 36 |
let outputTokens = 0;
|
| 37 |
|
| 38 |
+
for await (const evt of iterateCodexEvents(codexApi, rawResponse)) {
|
| 39 |
+
if (evt.responseId) onResponseId?.(evt.responseId);
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 40 |
|
| 41 |
+
switch (evt.typed.type) {
|
| 42 |
case "response.output_text.delta": {
|
| 43 |
+
if (evt.textDelta) {
|
|
|
|
| 44 |
const chunk: GeminiGenerateContentResponse = {
|
| 45 |
candidates: [
|
| 46 |
{
|
| 47 |
content: {
|
| 48 |
+
parts: [{ text: evt.textDelta }],
|
| 49 |
role: "model",
|
| 50 |
},
|
| 51 |
index: 0,
|
|
|
|
| 59 |
}
|
| 60 |
|
| 61 |
case "response.completed": {
|
| 62 |
+
if (evt.usage) {
|
| 63 |
+
inputTokens = evt.usage.input_tokens;
|
| 64 |
+
outputTokens = evt.usage.output_tokens;
|
|
|
|
|
|
|
| 65 |
onUsage?.({ input_tokens: inputTokens, output_tokens: outputTokens });
|
| 66 |
}
|
| 67 |
|
|
|
|
| 109 |
let outputTokens = 0;
|
| 110 |
let responseId: string | null = null;
|
| 111 |
|
| 112 |
+
for await (const evt of iterateCodexEvents(codexApi, rawResponse)) {
|
| 113 |
+
if (evt.responseId) responseId = evt.responseId;
|
| 114 |
+
if (evt.textDelta) fullText += evt.textDelta;
|
| 115 |
+
if (evt.usage) {
|
| 116 |
+
inputTokens = evt.usage.input_tokens;
|
| 117 |
+
outputTokens = evt.usage.output_tokens;
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 118 |
}
|
| 119 |
}
|
| 120 |
|
|
@@ -11,16 +11,14 @@
|
|
| 11 |
*/
|
| 12 |
|
| 13 |
import { randomUUID } from "crypto";
|
| 14 |
-
import type {
|
| 15 |
import type {
|
| 16 |
ChatCompletionResponse,
|
| 17 |
ChatCompletionChunk,
|
| 18 |
} from "../types/openai.js";
|
|
|
|
| 19 |
|
| 20 |
-
export
|
| 21 |
-
input_tokens: number;
|
| 22 |
-
output_tokens: number;
|
| 23 |
-
}
|
| 24 |
|
| 25 |
/** Format an SSE chunk for streaming output */
|
| 26 |
function formatSSE(chunk: ChatCompletionChunk): string {
|
|
@@ -41,7 +39,6 @@ export async function* streamCodexToOpenAI(
|
|
| 41 |
): AsyncGenerator<string> {
|
| 42 |
const chunkId = `chatcmpl-${randomUUID().replace(/-/g, "").slice(0, 24)}`;
|
| 43 |
const created = Math.floor(Date.now() / 1000);
|
| 44 |
-
let responseId: string | null = null;
|
| 45 |
|
| 46 |
// Send initial role chunk
|
| 47 |
yield formatSSE({
|
|
@@ -58,25 +55,12 @@ export async function* streamCodexToOpenAI(
|
|
| 58 |
],
|
| 59 |
});
|
| 60 |
|
| 61 |
-
for await (const evt of
|
| 62 |
-
|
| 63 |
-
|
| 64 |
-
switch (evt.event) {
|
| 65 |
-
case "response.created":
|
| 66 |
-
case "response.in_progress": {
|
| 67 |
-
// Extract response ID for headers and multi-turn
|
| 68 |
-
const resp = data.response as Record<string, unknown> | undefined;
|
| 69 |
-
if (resp?.id) {
|
| 70 |
-
responseId = resp.id as string;
|
| 71 |
-
onResponseId?.(responseId);
|
| 72 |
-
}
|
| 73 |
-
break;
|
| 74 |
-
}
|
| 75 |
|
|
|
|
| 76 |
case "response.output_text.delta": {
|
| 77 |
-
|
| 78 |
-
const delta = (data.delta as string) ?? "";
|
| 79 |
-
if (delta) {
|
| 80 |
yield formatSSE({
|
| 81 |
id: chunkId,
|
| 82 |
object: "chat.completion.chunk",
|
|
@@ -85,7 +69,7 @@ export async function* streamCodexToOpenAI(
|
|
| 85 |
choices: [
|
| 86 |
{
|
| 87 |
index: 0,
|
| 88 |
-
delta: { content:
|
| 89 |
finish_reason: null,
|
| 90 |
},
|
| 91 |
],
|
|
@@ -95,18 +79,7 @@ export async function* streamCodexToOpenAI(
|
|
| 95 |
}
|
| 96 |
|
| 97 |
case "response.completed": {
|
| 98 |
-
|
| 99 |
-
if (onUsage) {
|
| 100 |
-
const resp = data.response as Record<string, unknown> | undefined;
|
| 101 |
-
if (resp?.usage) {
|
| 102 |
-
const u = resp.usage as Record<string, number>;
|
| 103 |
-
onUsage({
|
| 104 |
-
input_tokens: u.input_tokens ?? 0,
|
| 105 |
-
output_tokens: u.output_tokens ?? 0,
|
| 106 |
-
});
|
| 107 |
-
}
|
| 108 |
-
}
|
| 109 |
-
// Send final chunk with finish_reason
|
| 110 |
yield formatSSE({
|
| 111 |
id: chunkId,
|
| 112 |
object: "chat.completion.chunk",
|
|
@@ -122,8 +95,6 @@ export async function* streamCodexToOpenAI(
|
|
| 122 |
});
|
| 123 |
break;
|
| 124 |
}
|
| 125 |
-
|
| 126 |
-
// Ignore other events (reasoning, content_part, output_item, etc.)
|
| 127 |
}
|
| 128 |
}
|
| 129 |
|
|
@@ -147,33 +118,12 @@ export async function collectCodexResponse(
|
|
| 147 |
let completionTokens = 0;
|
| 148 |
let responseId: string | null = null;
|
| 149 |
|
| 150 |
-
for await (const evt of
|
| 151 |
-
|
| 152 |
-
|
| 153 |
-
|
| 154 |
-
|
| 155 |
-
|
| 156 |
-
const resp = data.response as Record<string, unknown> | undefined;
|
| 157 |
-
if (resp?.id) responseId = resp.id as string;
|
| 158 |
-
break;
|
| 159 |
-
}
|
| 160 |
-
|
| 161 |
-
case "response.output_text.delta": {
|
| 162 |
-
const delta = (data.delta as string) ?? "";
|
| 163 |
-
fullText += delta;
|
| 164 |
-
break;
|
| 165 |
-
}
|
| 166 |
-
|
| 167 |
-
case "response.completed": {
|
| 168 |
-
const resp = data.response as Record<string, unknown> | undefined;
|
| 169 |
-
if (resp?.id) responseId = resp.id as string;
|
| 170 |
-
if (resp?.usage) {
|
| 171 |
-
const usage = resp.usage as Record<string, number>;
|
| 172 |
-
promptTokens = usage.input_tokens ?? 0;
|
| 173 |
-
completionTokens = usage.output_tokens ?? 0;
|
| 174 |
-
}
|
| 175 |
-
break;
|
| 176 |
-
}
|
| 177 |
}
|
| 178 |
}
|
| 179 |
|
|
|
|
| 11 |
*/
|
| 12 |
|
| 13 |
import { randomUUID } from "crypto";
|
| 14 |
+
import type { CodexApi } from "../proxy/codex-api.js";
|
| 15 |
import type {
|
| 16 |
ChatCompletionResponse,
|
| 17 |
ChatCompletionChunk,
|
| 18 |
} from "../types/openai.js";
|
| 19 |
+
import { iterateCodexEvents, type UsageInfo } from "./codex-event-extractor.js";
|
| 20 |
|
| 21 |
+
export type { UsageInfo };
|
|
|
|
|
|
|
|
|
|
| 22 |
|
| 23 |
/** Format an SSE chunk for streaming output */
|
| 24 |
function formatSSE(chunk: ChatCompletionChunk): string {
|
|
|
|
| 39 |
): AsyncGenerator<string> {
|
| 40 |
const chunkId = `chatcmpl-${randomUUID().replace(/-/g, "").slice(0, 24)}`;
|
| 41 |
const created = Math.floor(Date.now() / 1000);
|
|
|
|
| 42 |
|
| 43 |
// Send initial role chunk
|
| 44 |
yield formatSSE({
|
|
|
|
| 55 |
],
|
| 56 |
});
|
| 57 |
|
| 58 |
+
for await (const evt of iterateCodexEvents(codexApi, rawResponse)) {
|
| 59 |
+
if (evt.responseId) onResponseId?.(evt.responseId);
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 60 |
|
| 61 |
+
switch (evt.typed.type) {
|
| 62 |
case "response.output_text.delta": {
|
| 63 |
+
if (evt.textDelta) {
|
|
|
|
|
|
|
| 64 |
yield formatSSE({
|
| 65 |
id: chunkId,
|
| 66 |
object: "chat.completion.chunk",
|
|
|
|
| 69 |
choices: [
|
| 70 |
{
|
| 71 |
index: 0,
|
| 72 |
+
delta: { content: evt.textDelta },
|
| 73 |
finish_reason: null,
|
| 74 |
},
|
| 75 |
],
|
|
|
|
| 79 |
}
|
| 80 |
|
| 81 |
case "response.completed": {
|
| 82 |
+
if (evt.usage) onUsage?.(evt.usage);
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 83 |
yield formatSSE({
|
| 84 |
id: chunkId,
|
| 85 |
object: "chat.completion.chunk",
|
|
|
|
| 95 |
});
|
| 96 |
break;
|
| 97 |
}
|
|
|
|
|
|
|
| 98 |
}
|
| 99 |
}
|
| 100 |
|
|
|
|
| 118 |
let completionTokens = 0;
|
| 119 |
let responseId: string | null = null;
|
| 120 |
|
| 121 |
+
for await (const evt of iterateCodexEvents(codexApi, rawResponse)) {
|
| 122 |
+
if (evt.responseId) responseId = evt.responseId;
|
| 123 |
+
if (evt.textDelta) fullText += evt.textDelta;
|
| 124 |
+
if (evt.usage) {
|
| 125 |
+
promptTokens = evt.usage.input_tokens;
|
| 126 |
+
completionTokens = evt.usage.output_tokens;
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 127 |
}
|
| 128 |
}
|
| 129 |
|
|
@@ -0,0 +1,121 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
/**
|
| 2 |
+
* Type-safe Codex SSE event definitions and type guards.
|
| 3 |
+
*
|
| 4 |
+
* The Codex Responses API sends these SSE events during streaming.
|
| 5 |
+
* Using discriminated unions eliminates unsafe `as` casts in translators.
|
| 6 |
+
*/
|
| 7 |
+
|
| 8 |
+
import type { CodexSSEEvent } from "../proxy/codex-api.js";
|
| 9 |
+
|
| 10 |
+
// ── Event data shapes ────────────────────────────────────────────
|
| 11 |
+
|
| 12 |
+
export interface CodexResponseData {
|
| 13 |
+
id?: string;
|
| 14 |
+
usage?: {
|
| 15 |
+
input_tokens: number;
|
| 16 |
+
output_tokens: number;
|
| 17 |
+
};
|
| 18 |
+
[key: string]: unknown;
|
| 19 |
+
}
|
| 20 |
+
|
| 21 |
+
export interface CodexCreatedEvent {
|
| 22 |
+
type: "response.created";
|
| 23 |
+
response: CodexResponseData;
|
| 24 |
+
}
|
| 25 |
+
|
| 26 |
+
export interface CodexInProgressEvent {
|
| 27 |
+
type: "response.in_progress";
|
| 28 |
+
response: CodexResponseData;
|
| 29 |
+
}
|
| 30 |
+
|
| 31 |
+
export interface CodexTextDeltaEvent {
|
| 32 |
+
type: "response.output_text.delta";
|
| 33 |
+
delta: string;
|
| 34 |
+
}
|
| 35 |
+
|
| 36 |
+
export interface CodexTextDoneEvent {
|
| 37 |
+
type: "response.output_text.done";
|
| 38 |
+
text: string;
|
| 39 |
+
}
|
| 40 |
+
|
| 41 |
+
export interface CodexCompletedEvent {
|
| 42 |
+
type: "response.completed";
|
| 43 |
+
response: CodexResponseData;
|
| 44 |
+
}
|
| 45 |
+
|
| 46 |
+
export interface CodexUnknownEvent {
|
| 47 |
+
type: "unknown";
|
| 48 |
+
raw: unknown;
|
| 49 |
+
}
|
| 50 |
+
|
| 51 |
+
export type TypedCodexEvent =
|
| 52 |
+
| CodexCreatedEvent
|
| 53 |
+
| CodexInProgressEvent
|
| 54 |
+
| CodexTextDeltaEvent
|
| 55 |
+
| CodexTextDoneEvent
|
| 56 |
+
| CodexCompletedEvent
|
| 57 |
+
| CodexUnknownEvent;
|
| 58 |
+
|
| 59 |
+
// ── Type guard / parser ──────────────────────────────────────────
|
| 60 |
+
|
| 61 |
+
function isRecord(v: unknown): v is Record<string, unknown> {
|
| 62 |
+
return typeof v === "object" && v !== null && !Array.isArray(v);
|
| 63 |
+
}
|
| 64 |
+
|
| 65 |
+
function parseResponseData(data: unknown): CodexResponseData | undefined {
|
| 66 |
+
if (!isRecord(data)) return undefined;
|
| 67 |
+
const resp = data.response;
|
| 68 |
+
if (!isRecord(resp)) return undefined;
|
| 69 |
+
const result: CodexResponseData = {};
|
| 70 |
+
if (typeof resp.id === "string") result.id = resp.id;
|
| 71 |
+
if (isRecord(resp.usage)) {
|
| 72 |
+
result.usage = {
|
| 73 |
+
input_tokens: typeof resp.usage.input_tokens === "number" ? resp.usage.input_tokens : 0,
|
| 74 |
+
output_tokens: typeof resp.usage.output_tokens === "number" ? resp.usage.output_tokens : 0,
|
| 75 |
+
};
|
| 76 |
+
}
|
| 77 |
+
return result;
|
| 78 |
+
}
|
| 79 |
+
|
| 80 |
+
/**
|
| 81 |
+
* Parse a raw CodexSSEEvent into a typed event.
|
| 82 |
+
* Safely extracts fields with runtime checks — no `as` casts.
|
| 83 |
+
*/
|
| 84 |
+
export function parseCodexEvent(evt: CodexSSEEvent): TypedCodexEvent {
|
| 85 |
+
const data = evt.data;
|
| 86 |
+
|
| 87 |
+
switch (evt.event) {
|
| 88 |
+
case "response.created": {
|
| 89 |
+
const resp = parseResponseData(data);
|
| 90 |
+
return resp
|
| 91 |
+
? { type: "response.created", response: resp }
|
| 92 |
+
: { type: "unknown", raw: data };
|
| 93 |
+
}
|
| 94 |
+
case "response.in_progress": {
|
| 95 |
+
const resp = parseResponseData(data);
|
| 96 |
+
return resp
|
| 97 |
+
? { type: "response.in_progress", response: resp }
|
| 98 |
+
: { type: "unknown", raw: data };
|
| 99 |
+
}
|
| 100 |
+
case "response.output_text.delta": {
|
| 101 |
+
if (isRecord(data) && typeof data.delta === "string") {
|
| 102 |
+
return { type: "response.output_text.delta", delta: data.delta };
|
| 103 |
+
}
|
| 104 |
+
return { type: "unknown", raw: data };
|
| 105 |
+
}
|
| 106 |
+
case "response.output_text.done": {
|
| 107 |
+
if (isRecord(data) && typeof data.text === "string") {
|
| 108 |
+
return { type: "response.output_text.done", text: data.text };
|
| 109 |
+
}
|
| 110 |
+
return { type: "unknown", raw: data };
|
| 111 |
+
}
|
| 112 |
+
case "response.completed": {
|
| 113 |
+
const resp = parseResponseData(data);
|
| 114 |
+
return resp
|
| 115 |
+
? { type: "response.completed", response: resp }
|
| 116 |
+
: { type: "unknown", raw: data };
|
| 117 |
+
}
|
| 118 |
+
default:
|
| 119 |
+
return { type: "unknown", raw: data };
|
| 120 |
+
}
|
| 121 |
+
}
|
|
@@ -5,10 +5,12 @@
|
|
| 5 |
|
| 6 |
import { readFileSync, writeFileSync, mkdirSync } from "fs";
|
| 7 |
import { resolve } from "path";
|
|
|
|
| 8 |
import yaml from "js-yaml";
|
| 9 |
-
import { mutateClientConfig } from "./config.js";
|
| 10 |
import { jitterInt } from "./utils/jitter.js";
|
| 11 |
import { curlFetchGet } from "./tls/curl-fetch.js";
|
|
|
|
| 12 |
|
| 13 |
const CONFIG_PATH = resolve(process.cwd(), "config/default.yaml");
|
| 14 |
const STATE_PATH = resolve(process.cwd(), "data/update-state.json");
|
|
@@ -27,6 +29,7 @@ export interface UpdateState {
|
|
| 27 |
|
| 28 |
let _currentState: UpdateState | null = null;
|
| 29 |
let _pollTimer: ReturnType<typeof setTimeout> | null = null;
|
|
|
|
| 30 |
|
| 31 |
function loadCurrentConfig(): { app_version: string; build_number: string } {
|
| 32 |
const raw = yaml.load(readFileSync(CONFIG_PATH, "utf-8")) as Record<string, unknown>;
|
|
@@ -61,13 +64,71 @@ function parseAppcast(xml: string): {
|
|
| 61 |
}
|
| 62 |
|
| 63 |
function applyVersionUpdate(version: string, build: string): void {
|
| 64 |
-
|
| 65 |
-
|
| 66 |
-
|
| 67 |
-
|
| 68 |
mutateClientConfig({ app_version: version, build_number: build });
|
| 69 |
}
|
| 70 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 71 |
export async function checkForUpdate(): Promise<UpdateState> {
|
| 72 |
const current = loadCurrentConfig();
|
| 73 |
const res = await curlFetchGet(APPCAST_URL);
|
|
@@ -109,6 +170,9 @@ export async function checkForUpdate(): Promise<UpdateState> {
|
|
| 109 |
state.current_build = build!;
|
| 110 |
state.update_available = false;
|
| 111 |
console.log(`[UpdateChecker] Auto-applied: v${version} (build ${build})`);
|
|
|
|
|
|
|
|
|
|
| 112 |
}
|
| 113 |
|
| 114 |
return state;
|
|
|
|
| 5 |
|
| 6 |
import { readFileSync, writeFileSync, mkdirSync } from "fs";
|
| 7 |
import { resolve } from "path";
|
| 8 |
+
import { fork } from "child_process";
|
| 9 |
import yaml from "js-yaml";
|
| 10 |
+
import { mutateClientConfig, reloadAllConfigs } from "./config.js";
|
| 11 |
import { jitterInt } from "./utils/jitter.js";
|
| 12 |
import { curlFetchGet } from "./tls/curl-fetch.js";
|
| 13 |
+
import { mutateYaml } from "./utils/yaml-mutate.js";
|
| 14 |
|
| 15 |
const CONFIG_PATH = resolve(process.cwd(), "config/default.yaml");
|
| 16 |
const STATE_PATH = resolve(process.cwd(), "data/update-state.json");
|
|
|
|
| 29 |
|
| 30 |
let _currentState: UpdateState | null = null;
|
| 31 |
let _pollTimer: ReturnType<typeof setTimeout> | null = null;
|
| 32 |
+
let _updateInProgress = false;
|
| 33 |
|
| 34 |
function loadCurrentConfig(): { app_version: string; build_number: string } {
|
| 35 |
const raw = yaml.load(readFileSync(CONFIG_PATH, "utf-8")) as Record<string, unknown>;
|
|
|
|
| 64 |
}
|
| 65 |
|
| 66 |
function applyVersionUpdate(version: string, build: string): void {
|
| 67 |
+
mutateYaml(CONFIG_PATH, (data: any) => {
|
| 68 |
+
data.client.app_version = version;
|
| 69 |
+
data.client.build_number = build;
|
| 70 |
+
});
|
| 71 |
mutateClientConfig({ app_version: version, build_number: build });
|
| 72 |
}
|
| 73 |
|
| 74 |
+
/**
|
| 75 |
+
* Trigger the full-update pipeline in a background child process.
|
| 76 |
+
* Downloads new Codex.app, extracts fingerprint, and applies config updates.
|
| 77 |
+
* Protected by a lock to prevent concurrent runs.
|
| 78 |
+
*/
|
| 79 |
+
function triggerFullUpdate(): void {
|
| 80 |
+
if (_updateInProgress) {
|
| 81 |
+
console.log("[UpdateChecker] Full update already in progress, skipping");
|
| 82 |
+
return;
|
| 83 |
+
}
|
| 84 |
+
_updateInProgress = true;
|
| 85 |
+
console.log("[UpdateChecker] Triggering full-update pipeline...");
|
| 86 |
+
|
| 87 |
+
const child = fork(
|
| 88 |
+
resolve(process.cwd(), "scripts/full-update.ts"),
|
| 89 |
+
["--force"],
|
| 90 |
+
{
|
| 91 |
+
execArgv: ["--import", "tsx"],
|
| 92 |
+
stdio: "pipe",
|
| 93 |
+
cwd: process.cwd(),
|
| 94 |
+
},
|
| 95 |
+
);
|
| 96 |
+
|
| 97 |
+
let output = "";
|
| 98 |
+
child.stdout?.on("data", (chunk: Buffer) => {
|
| 99 |
+
output += chunk.toString();
|
| 100 |
+
});
|
| 101 |
+
child.stderr?.on("data", (chunk: Buffer) => {
|
| 102 |
+
output += chunk.toString();
|
| 103 |
+
});
|
| 104 |
+
|
| 105 |
+
child.on("exit", (code) => {
|
| 106 |
+
_updateInProgress = false;
|
| 107 |
+
if (code === 0) {
|
| 108 |
+
console.log("[UpdateChecker] Full update completed. Reloading config...");
|
| 109 |
+
try {
|
| 110 |
+
reloadAllConfigs();
|
| 111 |
+
} catch (err) {
|
| 112 |
+
console.error("[UpdateChecker] Failed to reload config after update:", err instanceof Error ? err.message : err);
|
| 113 |
+
}
|
| 114 |
+
} else {
|
| 115 |
+
console.warn(`[UpdateChecker] Full update exited with code ${code}`);
|
| 116 |
+
if (output) {
|
| 117 |
+
// Log last few lines for debugging
|
| 118 |
+
const lines = output.trim().split("\n").slice(-5);
|
| 119 |
+
for (const line of lines) {
|
| 120 |
+
console.warn(`[UpdateChecker] ${line}`);
|
| 121 |
+
}
|
| 122 |
+
}
|
| 123 |
+
}
|
| 124 |
+
});
|
| 125 |
+
|
| 126 |
+
child.on("error", (err) => {
|
| 127 |
+
_updateInProgress = false;
|
| 128 |
+
console.error("[UpdateChecker] Failed to spawn full-update:", err.message);
|
| 129 |
+
});
|
| 130 |
+
}
|
| 131 |
+
|
| 132 |
export async function checkForUpdate(): Promise<UpdateState> {
|
| 133 |
const current = loadCurrentConfig();
|
| 134 |
const res = await curlFetchGet(APPCAST_URL);
|
|
|
|
| 170 |
state.current_build = build!;
|
| 171 |
state.update_available = false;
|
| 172 |
console.log(`[UpdateChecker] Auto-applied: v${version} (build ${build})`);
|
| 173 |
+
|
| 174 |
+
// Trigger full-update pipeline in background (download + fingerprint extraction)
|
| 175 |
+
triggerFullUpdate();
|
| 176 |
}
|
| 177 |
|
| 178 |
return state;
|
|
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
/**
|
| 2 |
+
* Structured YAML file mutation — parse, mutate, write back.
|
| 3 |
+
*
|
| 4 |
+
* Avoids fragile regex-based replacements.
|
| 5 |
+
* Note: js-yaml.dump() does not preserve comments.
|
| 6 |
+
*/
|
| 7 |
+
|
| 8 |
+
import { readFileSync, writeFileSync, renameSync } from "fs";
|
| 9 |
+
import yaml from "js-yaml";
|
| 10 |
+
|
| 11 |
+
/**
|
| 12 |
+
* Load a YAML file, apply a mutator function, and atomically write it back.
|
| 13 |
+
* Uses tmp-file + rename for crash safety.
|
| 14 |
+
*/
|
| 15 |
+
export function mutateYaml(filePath: string, mutator: (data: any) => void): void {
|
| 16 |
+
const raw = readFileSync(filePath, "utf-8");
|
| 17 |
+
const data = yaml.load(raw);
|
| 18 |
+
mutator(data);
|
| 19 |
+
const tmp = filePath + ".tmp";
|
| 20 |
+
writeFileSync(tmp, yaml.dump(data, { lineWidth: -1, quotingType: '"' }), "utf-8");
|
| 21 |
+
renameSync(tmp, filePath);
|
| 22 |
+
}
|