Pro2a-26 / Temp
bunnybun07's picture
Create Temp
cfe09c2 verified
/**
* 将 OpenAI 格式参数转换为 PromptPlayer 支持的参数格式
* @param {object} openaiParams - OpenAI 格式的参数
* @param {string} provider - 提供商类型
* @param {string} model - 模型名称
* @returns {object} - PromptPlayer 格式的参数
*/
function convertOpenAIParamsToPromptPlayer(openaiParams, provider, model) {
let params = {}
switch (provider) {
case "google":
params = {
response_format: openaiParams.response_format || null,
candidateCount: openaiParams.candidateCount || null,
stopSequences: openaiParams.stop || null,
maxOutputTokens: openaiParams.max_tokens ?? 50000,
temperature: openaiParams.temperature ?? 1,
topP: openaiParams.top_p ?? null,
topK: openaiParams.top_k ?? null
};
break;
case "anthropic":
case "anthropic.bedrock":
params = {
max_tokens: openaiParams.max_tokens ?? (model.includes("sonnet") ? 64000 : model.includes("opus") ? 32000 : 64000),
temperature: openaiParams.temperature ?? 1,
top_k: openaiParams.top_k || null,
top_p: openaiParams.top_p ?? null
};
// 如果模型名称包含 -thinking 或用户传入 thinking 参数
if (model.includes("-thinking") || openaiParams.thinking) {
params.thinking = {
type: "enabled",
budget_tokens: openaiParams.thinking?.budget_tokens ?? (params.max_tokens ? Math.floor(params.max_tokens / 2) : null)
};
}
break;
case "mistral":
params = {
temperature: openaiParams.temperature ?? 0.3,
top_p: openaiParams.top_p ?? 1,
max_tokens: openaiParams.max_tokens ?? null,
safe_prompt: openaiParams.safe_prompt || null,
random_seed: openaiParams.seed || null
};
break;
case "cohere":
params = {
max_tokens: openaiParams.max_tokens ?? null,
temperature: openaiParams.temperature ?? 0.3,
k: openaiParams.top_k ?? 0,
p: openaiParams.top_p ?? 0,
presence_penalty: openaiParams.presence_penalty ?? 0,
frequency_penalty: openaiParams.frequency_penalty ?? 0
};
break;
case "openai":
case "openai.azure":
default:
params = {
temperature: openaiParams.temperature ?? 1,
seed: openaiParams.seed || null,
response_format: openaiParams.response_format || null,
top_p: openaiParams.top_p ?? 1,
frequency_penalty: openaiParams.frequency_penalty || null,
presence_penalty: openaiParams.presence_penalty || null,
max_completion_tokens: openaiParams.max_tokens ?? null,
reasoning_effort: openaiParams.reasoning_effort || null
};
// 特殊判断,如果模型名称为 o4-mini,则设置 reasoning_effort 为 high
if (model.includes("o4-mini")) {
params.reasoning_effort = "high";
}
break;
}
// 去掉为空的参数
Object.keys(params).forEach(key => {
if (params[key] === null || params[key] === undefined) {
delete params[key];
}
});
return params;
}