追加op模型的适配,同时支持思考和非思考
Browse files- src/api/client.js +12 -5
- src/utils/utils.js +42 -26
src/api/client.js
CHANGED
|
@@ -205,15 +205,22 @@ export async function getAvailableModels() {
|
|
| 205 |
}
|
| 206 |
data = await response.json();
|
| 207 |
}
|
| 208 |
-
|
| 209 |
-
return {
|
| 210 |
-
object: 'list',
|
| 211 |
-
data: Object.keys(data.models).map(id => ({
|
| 212 |
id,
|
| 213 |
object: 'model',
|
| 214 |
created: Math.floor(Date.now() / 1000),
|
| 215 |
owned_by: 'google'
|
| 216 |
-
}))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 217 |
};
|
| 218 |
} catch (error) {
|
| 219 |
await handleApiError(error, token);
|
|
|
|
| 205 |
}
|
| 206 |
data = await response.json();
|
| 207 |
}
|
| 208 |
+
const modelList = Object.keys(data.models).map(id => ({
|
|
|
|
|
|
|
|
|
|
| 209 |
id,
|
| 210 |
object: 'model',
|
| 211 |
created: Math.floor(Date.now() / 1000),
|
| 212 |
owned_by: 'google'
|
| 213 |
+
}));
|
| 214 |
+
modelList.push({
|
| 215 |
+
id: "claude-opus-4-5",
|
| 216 |
+
object: 'model',
|
| 217 |
+
created: Math.floor(Date.now() / 1000),
|
| 218 |
+
owned_by: 'google'
|
| 219 |
+
})
|
| 220 |
+
|
| 221 |
+
return {
|
| 222 |
+
object: 'list',
|
| 223 |
+
data: modelList
|
| 224 |
};
|
| 225 |
} catch (error) {
|
| 226 |
await handleApiError(error, token);
|
src/utils/utils.js
CHANGED
|
@@ -39,7 +39,7 @@ function extractImagesFromContent(content) {
|
|
| 39 |
|
| 40 |
return result;
|
| 41 |
}
|
| 42 |
-
function handleUserMessage(extracted, antigravityMessages)
|
| 43 |
antigravityMessages.push({
|
| 44 |
role: "user",
|
| 45 |
parts: [
|
|
@@ -50,11 +50,11 @@ function handleUserMessage(extracted, antigravityMessages) {
|
|
| 50 |
]
|
| 51 |
})
|
| 52 |
}
|
| 53 |
-
function handleAssistantMessage(message, antigravityMessages)
|
| 54 |
const lastMessage = antigravityMessages[antigravityMessages.length - 1];
|
| 55 |
const hasToolCalls = message.tool_calls && message.tool_calls.length > 0;
|
| 56 |
const hasContent = message.content && message.content.trim() !== '';
|
| 57 |
-
|
| 58 |
const antigravityTools = hasToolCalls ? message.tool_calls.map(toolCall => ({
|
| 59 |
functionCall: {
|
| 60 |
id: toolCall.id,
|
|
@@ -64,21 +64,21 @@ function handleAssistantMessage(message, antigravityMessages) {
|
|
| 64 |
}
|
| 65 |
}
|
| 66 |
})) : [];
|
| 67 |
-
|
| 68 |
-
if (lastMessage?.role === "model" && hasToolCalls && !hasContent)
|
| 69 |
lastMessage.parts.push(...antigravityTools)
|
| 70 |
-
}
|
| 71 |
const parts = [];
|
| 72 |
if (hasContent) parts.push({ text: message.content });
|
| 73 |
parts.push(...antigravityTools);
|
| 74 |
-
|
| 75 |
antigravityMessages.push({
|
| 76 |
role: "model",
|
| 77 |
parts
|
| 78 |
})
|
| 79 |
}
|
| 80 |
}
|
| 81 |
-
function handleToolCall(message, antigravityMessages)
|
| 82 |
// 从之前的 model 消息中找到对应的 functionCall name
|
| 83 |
let functionName = '';
|
| 84 |
for (let i = antigravityMessages.length - 1; i >= 0; i--) {
|
|
@@ -93,7 +93,7 @@ function handleToolCall(message, antigravityMessages) {
|
|
| 93 |
if (functionName) break;
|
| 94 |
}
|
| 95 |
}
|
| 96 |
-
|
| 97 |
const lastMessage = antigravityMessages[antigravityMessages.length - 1];
|
| 98 |
const functionResponse = {
|
| 99 |
functionResponse: {
|
|
@@ -104,7 +104,7 @@ function handleToolCall(message, antigravityMessages) {
|
|
| 104 |
}
|
| 105 |
}
|
| 106 |
};
|
| 107 |
-
|
| 108 |
// 如果上一条消息是 user 且包含 functionResponse,则合并
|
| 109 |
if (lastMessage?.role === "user" && lastMessage.parts.some(p => p.functionResponse)) {
|
| 110 |
lastMessage.parts.push(functionResponse);
|
|
@@ -115,7 +115,7 @@ function handleToolCall(message, antigravityMessages) {
|
|
| 115 |
});
|
| 116 |
}
|
| 117 |
}
|
| 118 |
-
function openaiMessageToAntigravity(openaiMessages)
|
| 119 |
const antigravityMessages = [];
|
| 120 |
for (const message of openaiMessages) {
|
| 121 |
if (message.role === "user" || message.role === "system") {
|
|
@@ -127,10 +127,10 @@ function openaiMessageToAntigravity(openaiMessages) {
|
|
| 127 |
handleToolCall(message, antigravityMessages);
|
| 128 |
}
|
| 129 |
}
|
| 130 |
-
|
| 131 |
return antigravityMessages;
|
| 132 |
}
|
| 133 |
-
function generateGenerationConfig(parameters, enableThinking, actualModelName)
|
| 134 |
const generationConfig = {
|
| 135 |
topP: parameters.top_p ?? config.defaults.top_p,
|
| 136 |
topK: parameters.top_k ?? config.defaults.top_k,
|
|
@@ -149,14 +149,14 @@ function generateGenerationConfig(parameters, enableThinking, actualModelName) {
|
|
| 149 |
thinkingBudget: enableThinking ? 1024 : 0
|
| 150 |
}
|
| 151 |
}
|
| 152 |
-
if (enableThinking && actualModelName.includes("claude"))
|
| 153 |
delete generationConfig.topP;
|
| 154 |
}
|
| 155 |
return generationConfig
|
| 156 |
}
|
| 157 |
-
function convertOpenAIToolsToAntigravity(openaiTools)
|
| 158 |
if (!openaiTools || openaiTools.length === 0) return [];
|
| 159 |
-
return openaiTools.map((tool)
|
| 160 |
delete tool.function.parameters.$schema;
|
| 161 |
return {
|
| 162 |
functionDeclarations: [
|
|
@@ -169,16 +169,32 @@ function convertOpenAIToolsToAntigravity(openaiTools) {
|
|
| 169 |
}
|
| 170 |
})
|
| 171 |
}
|
| 172 |
-
function generateRequestBody(openaiMessages, modelName, parameters, openaiTools, token) {
|
| 173 |
|
| 174 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 175 |
modelName === 'gemini-2.5-pro' ||
|
| 176 |
modelName.startsWith('gemini-3-pro-') ||
|
| 177 |
modelName === "rev19-uic3-1p" ||
|
| 178 |
modelName === "gpt-oss-120b-medium"
|
| 179 |
-
|
| 180 |
|
| 181 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 182 |
project: token.projectId,
|
| 183 |
requestId: generateRequestId(),
|
| 184 |
request: {
|
|
@@ -200,12 +216,12 @@ function generateRequestBody(openaiMessages, modelName, parameters, openaiTools,
|
|
| 200 |
userAgent: "antigravity"
|
| 201 |
}
|
| 202 |
}
|
| 203 |
-
function getDefaultIp()
|
| 204 |
const interfaces = os.networkInterfaces();
|
| 205 |
-
if (interfaces.WLAN)
|
| 206 |
-
for (const inter of interfaces.WLAN)
|
| 207 |
-
if (inter.family === 'IPv4' && !inter.internal)
|
| 208 |
-
|
| 209 |
}
|
| 210 |
}
|
| 211 |
} else if (interfaces.wlan2) {
|
|
@@ -217,7 +233,7 @@ function getDefaultIp() {
|
|
| 217 |
}
|
| 218 |
return '127.0.0.1';
|
| 219 |
}
|
| 220 |
-
export
|
| 221 |
generateRequestId,
|
| 222 |
generateRequestBody,
|
| 223 |
getDefaultIp
|
|
|
|
| 39 |
|
| 40 |
return result;
|
| 41 |
}
|
| 42 |
+
function handleUserMessage(extracted, antigravityMessages){
|
| 43 |
antigravityMessages.push({
|
| 44 |
role: "user",
|
| 45 |
parts: [
|
|
|
|
| 50 |
]
|
| 51 |
})
|
| 52 |
}
|
| 53 |
+
function handleAssistantMessage(message, antigravityMessages){
|
| 54 |
const lastMessage = antigravityMessages[antigravityMessages.length - 1];
|
| 55 |
const hasToolCalls = message.tool_calls && message.tool_calls.length > 0;
|
| 56 |
const hasContent = message.content && message.content.trim() !== '';
|
| 57 |
+
|
| 58 |
const antigravityTools = hasToolCalls ? message.tool_calls.map(toolCall => ({
|
| 59 |
functionCall: {
|
| 60 |
id: toolCall.id,
|
|
|
|
| 64 |
}
|
| 65 |
}
|
| 66 |
})) : [];
|
| 67 |
+
|
| 68 |
+
if (lastMessage?.role === "model" && hasToolCalls && !hasContent){
|
| 69 |
lastMessage.parts.push(...antigravityTools)
|
| 70 |
+
}else{
|
| 71 |
const parts = [];
|
| 72 |
if (hasContent) parts.push({ text: message.content });
|
| 73 |
parts.push(...antigravityTools);
|
| 74 |
+
|
| 75 |
antigravityMessages.push({
|
| 76 |
role: "model",
|
| 77 |
parts
|
| 78 |
})
|
| 79 |
}
|
| 80 |
}
|
| 81 |
+
function handleToolCall(message, antigravityMessages){
|
| 82 |
// 从之前的 model 消息中找到对应的 functionCall name
|
| 83 |
let functionName = '';
|
| 84 |
for (let i = antigravityMessages.length - 1; i >= 0; i--) {
|
|
|
|
| 93 |
if (functionName) break;
|
| 94 |
}
|
| 95 |
}
|
| 96 |
+
|
| 97 |
const lastMessage = antigravityMessages[antigravityMessages.length - 1];
|
| 98 |
const functionResponse = {
|
| 99 |
functionResponse: {
|
|
|
|
| 104 |
}
|
| 105 |
}
|
| 106 |
};
|
| 107 |
+
|
| 108 |
// 如果上一条消息是 user 且包含 functionResponse,则合并
|
| 109 |
if (lastMessage?.role === "user" && lastMessage.parts.some(p => p.functionResponse)) {
|
| 110 |
lastMessage.parts.push(functionResponse);
|
|
|
|
| 115 |
});
|
| 116 |
}
|
| 117 |
}
|
| 118 |
+
function openaiMessageToAntigravity(openaiMessages){
|
| 119 |
const antigravityMessages = [];
|
| 120 |
for (const message of openaiMessages) {
|
| 121 |
if (message.role === "user" || message.role === "system") {
|
|
|
|
| 127 |
handleToolCall(message, antigravityMessages);
|
| 128 |
}
|
| 129 |
}
|
| 130 |
+
|
| 131 |
return antigravityMessages;
|
| 132 |
}
|
| 133 |
+
function generateGenerationConfig(parameters, enableThinking, actualModelName){
|
| 134 |
const generationConfig = {
|
| 135 |
topP: parameters.top_p ?? config.defaults.top_p,
|
| 136 |
topK: parameters.top_k ?? config.defaults.top_k,
|
|
|
|
| 149 |
thinkingBudget: enableThinking ? 1024 : 0
|
| 150 |
}
|
| 151 |
}
|
| 152 |
+
if (enableThinking && actualModelName.includes("claude")){
|
| 153 |
delete generationConfig.topP;
|
| 154 |
}
|
| 155 |
return generationConfig
|
| 156 |
}
|
| 157 |
+
function convertOpenAIToolsToAntigravity(openaiTools){
|
| 158 |
if (!openaiTools || openaiTools.length === 0) return [];
|
| 159 |
+
return openaiTools.map((tool)=>{
|
| 160 |
delete tool.function.parameters.$schema;
|
| 161 |
return {
|
| 162 |
functionDeclarations: [
|
|
|
|
| 169 |
}
|
| 170 |
})
|
| 171 |
}
|
|
|
|
| 172 |
|
| 173 |
+
function modelMapping(modelName){
|
| 174 |
+
if (modelName === "claude-sonnet-4-5-thinking"){
|
| 175 |
+
return "claude-sonnet-4-5";
|
| 176 |
+
} else if (modelName === "claude-opus-4-5"){
|
| 177 |
+
return "claude-opus-4-5-thinking";
|
| 178 |
+
} else if (modelName === "gemini-2.5-flash-thinking"){
|
| 179 |
+
return "gemini-2.5-flash";
|
| 180 |
+
}
|
| 181 |
+
return modelName;
|
| 182 |
+
}
|
| 183 |
+
|
| 184 |
+
function isEnableThinking(modelName){
|
| 185 |
+
return modelName.endsWith('-thinking') ||
|
| 186 |
modelName === 'gemini-2.5-pro' ||
|
| 187 |
modelName.startsWith('gemini-3-pro-') ||
|
| 188 |
modelName === "rev19-uic3-1p" ||
|
| 189 |
modelName === "gpt-oss-120b-medium"
|
| 190 |
+
}
|
| 191 |
|
| 192 |
+
function generateRequestBody(openaiMessages,modelName,parameters,openaiTools,token){
|
| 193 |
+
|
| 194 |
+
const enableThinking = isEnableThinking(modelName);
|
| 195 |
+
const actualModelName = modelMapping(modelName);
|
| 196 |
+
|
| 197 |
+
return{
|
| 198 |
project: token.projectId,
|
| 199 |
requestId: generateRequestId(),
|
| 200 |
request: {
|
|
|
|
| 216 |
userAgent: "antigravity"
|
| 217 |
}
|
| 218 |
}
|
| 219 |
+
function getDefaultIp(){
|
| 220 |
const interfaces = os.networkInterfaces();
|
| 221 |
+
if (interfaces.WLAN){
|
| 222 |
+
for (const inter of interfaces.WLAN){
|
| 223 |
+
if (inter.family === 'IPv4' && !inter.internal){
|
| 224 |
+
return inter.address;
|
| 225 |
}
|
| 226 |
}
|
| 227 |
} else if (interfaces.wlan2) {
|
|
|
|
| 233 |
}
|
| 234 |
return '127.0.0.1';
|
| 235 |
}
|
| 236 |
+
export{
|
| 237 |
generateRequestId,
|
| 238 |
generateRequestBody,
|
| 239 |
getDefaultIp
|