|
|
import { Request } from "express"; |
|
|
import { |
|
|
API_REQUEST_VALIDATORS, |
|
|
API_REQUEST_TRANSFORMERS, |
|
|
} from "../../../../shared/api-schemas"; |
|
|
import { BadRequestError } from "../../../../shared/errors"; |
|
|
import { fixMistralPrompt, isMistralVisionModel } from "../../../../shared/api-schemas/mistral-ai"; |
|
|
import { |
|
|
isImageGenerationRequest, |
|
|
isTextGenerationRequest, |
|
|
} from "../../common"; |
|
|
import { RequestPreprocessor } from "../index"; |
|
|
|
|
|
|
|
|
export const transformOutboundPayload: RequestPreprocessor = async (req) => { |
|
|
const alreadyTransformed = req.retryCount > 0; |
|
|
const notTransformable = |
|
|
!isTextGenerationRequest(req) && !isImageGenerationRequest(req); |
|
|
|
|
|
if (alreadyTransformed) { |
|
|
return; |
|
|
} else if (notTransformable) { |
|
|
|
|
|
const { inboundApi, outboundApi, method, path } = req; |
|
|
req.log.warn( |
|
|
{ inboundApi, outboundApi, method, path }, |
|
|
"`transformOutboundPayload` called on a non-transformable request." |
|
|
); |
|
|
return; |
|
|
} |
|
|
|
|
|
applyMistralPromptFixes(req); |
|
|
applyGoogleAIKeyTransforms(req); |
|
|
applyOpenAIResponsesTransform(req); |
|
|
|
|
|
|
|
|
|
|
|
const isNativePrompt = req.inboundApi === req.outboundApi; |
|
|
if (isNativePrompt) { |
|
|
const result = API_REQUEST_VALIDATORS[req.inboundApi].parse(req.body); |
|
|
req.body = result; |
|
|
return; |
|
|
} |
|
|
|
|
|
|
|
|
const transformation = `${req.inboundApi}->${req.outboundApi}` as const; |
|
|
const transFn = API_REQUEST_TRANSFORMERS[transformation]; |
|
|
|
|
|
if (transFn) { |
|
|
req.log.info({ transformation }, "Transforming request..."); |
|
|
req.body = await transFn(req); |
|
|
return; |
|
|
} |
|
|
|
|
|
throw new BadRequestError( |
|
|
`${transformation} proxying is not supported. Make sure your client is configured to send requests in the correct format and to the correct endpoint.` |
|
|
); |
|
|
}; |
|
|
|
|
|
|
|
|
function applyOpenAIResponsesTransform(req: Request): void { |
|
|
if (req.outboundApi === "openai-responses") { |
|
|
req.log.info("Transforming request to OpenAI Responses API format"); |
|
|
|
|
|
|
|
|
const originalBody = { ...req.body }; |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if (req.body.messages && !req.body.input) { |
|
|
req.body.input = { |
|
|
messages: req.body.messages |
|
|
}; |
|
|
delete req.body.messages; |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
if (!req.body.previousResponseId && req.body.conversation_id) { |
|
|
req.body.previousResponseId = req.body.conversation_id; |
|
|
delete req.body.conversation_id; |
|
|
} |
|
|
|
|
|
|
|
|
if (req.body.max_tokens && !req.body.max_output_tokens) { |
|
|
req.body.max_output_tokens = req.body.max_tokens; |
|
|
delete req.body.max_tokens; |
|
|
} |
|
|
|
|
|
|
|
|
if (req.body.tools) { |
|
|
|
|
|
if (!req.body.tools.some((tool: any) => tool.type === "function" || tool.type === "web_search")) { |
|
|
req.body.tools = req.body.tools.map((tool: any) => ({ |
|
|
...tool, |
|
|
type: tool.type || "function" |
|
|
})); |
|
|
} |
|
|
} |
|
|
|
|
|
req.log.info({ |
|
|
originalModel: originalBody.model, |
|
|
newFormat: "openai-responses" |
|
|
}, "Successfully transformed request to Responses API format"); |
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
function applyMistralPromptFixes(req: Request): void { |
|
|
if (req.inboundApi === "mistral-ai") { |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
const result = API_REQUEST_VALIDATORS["mistral-ai"].parse(req.body); |
|
|
|
|
|
|
|
|
const isVisionModel = isMistralVisionModel(req.body.model); |
|
|
|
|
|
|
|
|
const hasImageContent = result.messages?.some((msg: {content: string | any[]}) => |
|
|
Array.isArray(msg.content) && |
|
|
msg.content.some((item: any) => item.type === "image_url") |
|
|
); |
|
|
|
|
|
|
|
|
if (hasImageContent && Array.isArray(result.messages)) { |
|
|
|
|
|
result.messages.forEach((msg: any) => { |
|
|
if (Array.isArray(msg.content)) { |
|
|
|
|
|
msg.content.forEach((item: any) => { |
|
|
if (item.type === "image_url") { |
|
|
|
|
|
if (typeof item.image_url === "object") { |
|
|
|
|
|
if (item.image_url.url) { |
|
|
item.image_url = item.image_url.url; |
|
|
} else if (item.image_url.data) { |
|
|
item.image_url = item.image_url.data; |
|
|
} |
|
|
|
|
|
req.log.info( |
|
|
{ model: req.body.model }, |
|
|
"Normalized object-format image_url to string format" |
|
|
); |
|
|
} |
|
|
} |
|
|
}); |
|
|
} |
|
|
}); |
|
|
} |
|
|
|
|
|
|
|
|
req.body.messages = fixMistralPrompt(result.messages); |
|
|
req.log.info( |
|
|
{ |
|
|
n: req.body.messages.length, |
|
|
prev: result.messages.length, |
|
|
isVisionModel, |
|
|
hasImageContent |
|
|
}, |
|
|
"Applied Mistral chat prompt fixes." |
|
|
); |
|
|
|
|
|
|
|
|
|
|
|
if (hasImageContent) { |
|
|
req.log.info( |
|
|
{ model: req.body.model }, |
|
|
"Detected Mistral vision request with image content. Keeping as chat format." |
|
|
); |
|
|
return; |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
const { messages } = req.body; |
|
|
const lastMessage = messages && messages[messages.length - 1]; |
|
|
if (lastMessage?.role === "assistant" && req.service === "aws") { |
|
|
|
|
|
|
|
|
lastMessage.prefix = true; |
|
|
req.outboundApi = "mistral-text"; |
|
|
req.log.info( |
|
|
"Native Mistral chat prompt relies on assistant message prefix. Converting to text completions request." |
|
|
); |
|
|
} |
|
|
} |
|
|
} |
|
|
|
|
|
function toCamelCase(str: string): string { |
|
|
return str.replace(/_([a-z])/g, (_, letter) => letter.toUpperCase()); |
|
|
} |
|
|
|
|
|
function transformKeysToCamelCase(obj: any, hasTransformed = { value: false }): any { |
|
|
if (Array.isArray(obj)) { |
|
|
return obj.map(item => transformKeysToCamelCase(item, hasTransformed)); |
|
|
} |
|
|
|
|
|
if (obj !== null && typeof obj === 'object') { |
|
|
return Object.fromEntries( |
|
|
Object.entries(obj).map(([key, value]) => { |
|
|
const camelKey = toCamelCase(key); |
|
|
if (camelKey !== key) { |
|
|
hasTransformed.value = true; |
|
|
} |
|
|
return [ |
|
|
camelKey, |
|
|
transformKeysToCamelCase(value, hasTransformed) |
|
|
]; |
|
|
}) |
|
|
); |
|
|
} |
|
|
|
|
|
return obj; |
|
|
} |
|
|
|
|
|
function applyGoogleAIKeyTransforms(req: Request): void { |
|
|
|
|
|
|
|
|
|
|
|
if (req.outboundApi === "google-ai") { |
|
|
const hasTransformed = { value: false }; |
|
|
req.body = transformKeysToCamelCase(req.body, hasTransformed); |
|
|
if (hasTransformed.value) { |
|
|
req.log.info("Applied Gemini camelCase -> snake_case transform"); |
|
|
} |
|
|
} |
|
|
} |
|
|
|