icebear0828 Claude Opus 4.6 commited on
Commit
bc98e69
Β·
1 Parent(s): 8e4f44b

fix: preserve model name suffixes (-fast, -high, etc.) in API responses

Browse files

Response model name was losing suffixes because it used the stripped base
model ID from parseModelName() instead of reconstructing with suffixes.
Also adds `none` to EFFORT_SUFFIXES (gpt-5.4 supports none reasoning).

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>

src/models/model-store.ts CHANGED
@@ -195,7 +195,7 @@ export interface ParsedModelName {
195
  }
196
 
197
  const SERVICE_TIER_SUFFIXES = new Set(["fast", "flex"]);
198
- const EFFORT_SUFFIXES = new Set(["minimal", "low", "medium", "high", "xhigh"]);
199
 
200
  /**
201
  * Parse a model name that may contain embedded suffixes for service_tier and reasoning_effort.
@@ -243,6 +243,14 @@ export function parseModelName(input: string): ParsedModelName {
243
  return { modelId, serviceTier, reasoningEffort };
244
  }
245
 
 
 
 
 
 
 
 
 
246
  // ── Getters ────────────────────────────────────────────────────────
247
 
248
  /**
 
195
  }
196
 
197
  const SERVICE_TIER_SUFFIXES = new Set(["fast", "flex"]);
198
+ const EFFORT_SUFFIXES = new Set(["none", "minimal", "low", "medium", "high", "xhigh"]);
199
 
200
  /**
201
  * Parse a model name that may contain embedded suffixes for service_tier and reasoning_effort.
 
243
  return { modelId, serviceTier, reasoningEffort };
244
  }
245
 
246
+ /** Reconstruct display model name: resolved modelId + any parsed suffixes. */
247
+ export function buildDisplayModelName(parsed: ParsedModelName): string {
248
+ let name = parsed.modelId;
249
+ if (parsed.reasoningEffort) name += `-${parsed.reasoningEffort}`;
250
+ if (parsed.serviceTier) name += `-${parsed.serviceTier}`;
251
+ return name;
252
+ }
253
+
254
  // ── Getters ────────────────────────────────────────────────────────
255
 
256
  /**
src/routes/chat.ts CHANGED
@@ -9,6 +9,7 @@ import {
9
  collectCodexResponse,
10
  } from "../translation/codex-to-openai.js";
11
  import { getConfig } from "../config.js";
 
12
  import {
13
  handleProxyRequest,
14
  type FormatAdapter,
@@ -122,6 +123,7 @@ export function createChatRoutes(
122
  const req = parsed.data;
123
 
124
  const codexRequest = translateToCodexRequest(req);
 
125
  const wantReasoning = !!req.reasoning_effort;
126
 
127
  return handleProxyRequest(
@@ -130,7 +132,7 @@ export function createChatRoutes(
130
  cookieJar,
131
  {
132
  codexRequest,
133
- model: codexRequest.model,
134
  isStreaming: req.stream,
135
  },
136
  makeOpenAIFormat(wantReasoning),
 
9
  collectCodexResponse,
10
  } from "../translation/codex-to-openai.js";
11
  import { getConfig } from "../config.js";
12
+ import { parseModelName, buildDisplayModelName } from "../models/model-store.js";
13
  import {
14
  handleProxyRequest,
15
  type FormatAdapter,
 
123
  const req = parsed.data;
124
 
125
  const codexRequest = translateToCodexRequest(req);
126
+ const displayModel = buildDisplayModelName(parseModelName(req.model));
127
  const wantReasoning = !!req.reasoning_effort;
128
 
129
  return handleProxyRequest(
 
132
  cookieJar,
133
  {
134
  codexRequest,
135
+ model: displayModel,
136
  isStreaming: req.stream,
137
  },
138
  makeOpenAIFormat(wantReasoning),
src/routes/messages.ts CHANGED
@@ -16,6 +16,7 @@ import {
16
  collectCodexToAnthropicResponse,
17
  } from "../translation/codex-to-anthropic.js";
18
  import { getConfig } from "../config.js";
 
19
  import {
20
  handleProxyRequest,
21
  type FormatAdapter,
@@ -104,7 +105,7 @@ export function createMessagesRoutes(
104
  cookieJar,
105
  {
106
  codexRequest,
107
- model: req.model,
108
  isStreaming: req.stream,
109
  },
110
  makeAnthropicFormat(wantThinking),
 
16
  collectCodexToAnthropicResponse,
17
  } from "../translation/codex-to-anthropic.js";
18
  import { getConfig } from "../config.js";
19
+ import { parseModelName, buildDisplayModelName } from "../models/model-store.js";
20
  import {
21
  handleProxyRequest,
22
  type FormatAdapter,
 
105
  cookieJar,
106
  {
107
  codexRequest,
108
+ model: buildDisplayModelName(parseModelName(req.model)),
109
  isStreaming: req.stream,
110
  },
111
  makeAnthropicFormat(wantThinking),
src/routes/responses.ts CHANGED
@@ -12,7 +12,7 @@ import type { CookieJar } from "../proxy/cookie-jar.js";
12
  import type { ProxyPool } from "../proxy/proxy-pool.js";
13
  import type { CodexResponsesRequest, CodexInputItem, CodexApi } from "../proxy/codex-api.js";
14
  import { getConfig } from "../config.js";
15
- import { parseModelName, resolveModelId, getModelInfo } from "../models/model-store.js";
16
  import { EmptyResponseError } from "../translation/codex-event-extractor.js";
17
  import {
18
  handleProxyRequest,
@@ -215,6 +215,7 @@ export function createResponsesRoutes(
215
  const rawModel = typeof body.model === "string" ? body.model : "codex";
216
  const parsed = parseModelName(rawModel);
217
  const modelId = resolveModelId(parsed.modelId);
 
218
  const modelInfo = getModelInfo(modelId);
219
 
220
  // Build CodexResponsesRequest
@@ -269,7 +270,7 @@ export function createResponsesRoutes(
269
  cookieJar,
270
  {
271
  codexRequest,
272
- model: modelId,
273
  isStreaming: clientWantsStream,
274
  },
275
  PASSTHROUGH_FORMAT,
 
12
  import type { ProxyPool } from "../proxy/proxy-pool.js";
13
  import type { CodexResponsesRequest, CodexInputItem, CodexApi } from "../proxy/codex-api.js";
14
  import { getConfig } from "../config.js";
15
+ import { parseModelName, resolveModelId, getModelInfo, buildDisplayModelName } from "../models/model-store.js";
16
  import { EmptyResponseError } from "../translation/codex-event-extractor.js";
17
  import {
18
  handleProxyRequest,
 
215
  const rawModel = typeof body.model === "string" ? body.model : "codex";
216
  const parsed = parseModelName(rawModel);
217
  const modelId = resolveModelId(parsed.modelId);
218
+ const displayModel = buildDisplayModelName(parsed);
219
  const modelInfo = getModelInfo(modelId);
220
 
221
  // Build CodexResponsesRequest
 
270
  cookieJar,
271
  {
272
  codexRequest,
273
+ model: displayModel,
274
  isStreaming: clientWantsStream,
275
  },
276
  PASSTHROUGH_FORMAT,