| |
| use serde_json::{json, Value}; |
|
|
| |
| pub fn wrap_request( |
| body: &Value, |
| project_id: &str, |
| mapped_model: &str, |
| account_id: Option<&str>, |
| session_id: Option<&str>, |
| token: Option<&crate::proxy::token_manager::ProxyToken>, |
| ) -> Value { |
| |
| let original_model = body |
| .get("model") |
| .and_then(|v| v.as_str()) |
| .unwrap_or(mapped_model); |
|
|
| |
| let final_model_name = if !mapped_model.is_empty() { |
| mapped_model |
| } else { |
| original_model |
| }; |
|
|
| |
| let message_count = body.get("contents") |
| .and_then(|c| c.as_array()) |
| .map(|a| a.len()) |
| .unwrap_or(1); |
|
|
| |
| let mut inner_request = body.clone(); |
|
|
| |
| crate::proxy::mappers::common_utils::deep_clean_undefined(&mut inner_request, 0); |
|
|
| |
| |
| |
| let is_target_claude = final_model_name.to_lowercase().contains("claude"); |
|
|
| if let Some(contents) = inner_request |
| .get_mut("contents") |
| .and_then(|c| c.as_array_mut()) |
| { |
| for content in contents { |
| |
| let mut name_counters: std::collections::HashMap<String, usize> = |
| std::collections::HashMap::new(); |
|
|
| if let Some(parts) = content.get_mut("parts").and_then(|p| p.as_array_mut()) { |
| for part in parts { |
| if let Some(obj) = part.as_object_mut() { |
| |
| if let Some(fc) = obj.get_mut("functionCall") { |
| if fc.get("id").is_none() && is_target_claude { |
| let name = |
| fc.get("name").and_then(|n| n.as_str()).unwrap_or("unknown"); |
| let count = name_counters.entry(name.to_string()).or_insert(0); |
| let call_id = format!("call_{}_{}", name, count); |
| *count += 1; |
|
|
| fc.as_object_mut() |
| .unwrap() |
| .insert("id".to_string(), json!(call_id)); |
| tracing::debug!("[Gemini-Wrap] Request stage: Injected missing call_id '{}' for Claude model", call_id); |
| } |
| } |
|
|
| |
| if let Some(fr) = obj.get_mut("functionResponse") { |
| if fr.get("id").is_none() && is_target_claude { |
| |
| |
| let name = |
| fr.get("name").and_then(|n| n.as_str()).unwrap_or("unknown"); |
| let count = name_counters.entry(name.to_string()).or_insert(0); |
| let call_id = format!("call_{}_{}", name, count); |
| *count += 1; |
|
|
| fr.as_object_mut() |
| .unwrap() |
| .insert("id".to_string(), json!(call_id)); |
| tracing::debug!("[Gemini-Wrap] Request stage: Injected synced response_id '{}' for Claude model", call_id); |
| } |
| } |
|
|
| |
| if obj.contains_key("functionCall") && obj.get("thoughtSignature").is_none() |
| { |
| if let Some(s_id) = session_id { |
| if let Some(sig) = crate::proxy::SignatureCache::global() |
| .get_session_signature(s_id) |
| { |
| obj.insert("thoughtSignature".to_string(), json!(sig)); |
| tracing::debug!("[Gemini-Wrap] Injected signature (len: {}) for session: {}", sig.len(), s_id); |
| } else { |
| |
| |
| let is_flash = final_model_name.to_lowercase().contains("gemini-3-flash") |
| || final_model_name.to_lowercase().contains("gemini-3.1-flash"); |
| if is_flash { |
| obj.insert("thoughtSignature".to_string(), json!("skip_thought_signature_validator")); |
| tracing::debug!("[Gemini-Wrap] [FIX #2167] Injected sentinel signature for flash model (no session cache)"); |
| } |
| } |
| } |
| } |
| } |
| } |
| } |
| } |
| } |
|
|
| |
| |
| |
| |
| let lower_model = final_model_name.to_lowercase(); |
| if lower_model.contains("flash") |
| || lower_model.contains("pro") |
| || lower_model.contains("thinking") |
| { |
| |
| let req_max_tokens = inner_request.get("max_tokens").and_then(|v| v.as_u64()); |
|
|
| |
| let is_claude = lower_model.contains("claude"); |
| let is_preview = lower_model.contains("preview"); |
| let should_inject = lower_model.contains("thinking") |
| || (lower_model.contains("gemini-2.0-pro") && !is_preview) |
| || (lower_model.contains("gemini-3-pro") && !is_preview) |
| || (lower_model.contains("gemini-3.1-pro") && !is_preview); |
|
|
| if should_inject { |
| |
| let mut has_thinking = false; |
| if is_claude { |
| has_thinking = inner_request.get("thinking").is_some(); |
| } else { |
| if let Some(gc) = inner_request.get("generationConfig").and_then(|v| v.as_object()) { |
| has_thinking = gc.get("thinkingConfig").is_some(); |
| } |
| } |
|
|
| if !has_thinking { |
| tracing::debug!( |
| "[Gemini-Wrap] Auto-injecting default thinking for {}", |
| final_model_name |
| ); |
|
|
| |
| |
| let default_budget = crate::proxy::model_specs::get_thinking_budget(final_model_name, token); |
| |
| let gen_config = inner_request |
| .as_object_mut() |
| .unwrap() |
| .entry("generationConfig") |
| .or_insert(json!({})) |
| .as_object_mut() |
| .unwrap(); |
| |
| gen_config.insert( |
| "thinkingConfig".to_string(), |
| json!({ |
| "includeThoughts": true, |
| "thinkingBudget": default_budget |
| }), |
| ); |
| } |
| } |
|
|
| |
| let gen_config = inner_request |
| .as_object_mut() |
| .unwrap() |
| .entry("generationConfig") |
| .or_insert(json!({})) |
| .as_object_mut() |
| .unwrap(); |
|
|
| |
| if !gen_config.contains_key("topK") { |
| gen_config.insert("topK".to_string(), json!(40)); |
| } |
| if !gen_config.contains_key("topP") { |
| gen_config.insert("topP".to_string(), json!(1.0)); |
| } |
|
|
| |
| |
| |
| if let Some(thinking_config) = gen_config.get_mut("thinkingConfig") { |
| if let Some(level) = thinking_config.get("thinkingLevel").and_then(|v| v.as_str()).map(|s| s.to_uppercase()) { |
| let thinking_budget_cap = crate::proxy::model_specs::get_thinking_budget(final_model_name, token); |
| let budget: i64 = match level.as_str() { |
| "NONE" => 0, |
| "LOW" => (thinking_budget_cap / 4).max(4096) as i64, |
| "MEDIUM" => (thinking_budget_cap / 2).max(8192) as i64, |
| "HIGH" => thinking_budget_cap as i64, |
| _ => (thinking_budget_cap / 2).max(8192) as i64, |
| }; |
| tracing::info!( |
| "[Gemini-Wrap] Converting thinkingLevel '{}' to thinkingBudget {}", |
| level, budget |
| ); |
| if let Some(tc) = thinking_config.as_object_mut() { |
| tc.remove("thinkingLevel"); |
| tc.insert("thinkingBudget".to_string(), json!(budget)); |
| } |
| } |
| } |
|
|
| if let Some(thinking_config) = gen_config.get_mut("thinkingConfig") { |
| if let Some(budget_val) = thinking_config.get("thinkingBudget") { |
| if let Some(budget_i64) = budget_val.as_i64() { |
| |
| if budget_i64 != -1 { |
| let budget = budget_i64 as u64; |
| let thinking_budget_cap = crate::proxy::model_specs::get_thinking_budget(final_model_name, token); |
| let tb_config = crate::proxy::config::get_thinking_budget_config(); |
| let final_budget = match tb_config.mode { |
| crate::proxy::config::ThinkingBudgetMode::Passthrough => budget, |
| crate::proxy::config::ThinkingBudgetMode::Custom => { |
| let val = tb_config.custom_value as u64; |
| let is_limited = (final_model_name.contains("gemini") |
| || final_model_name.contains("thinking")) |
| && !final_model_name.contains("-image"); |
|
|
| if is_limited && val > thinking_budget_cap { |
| thinking_budget_cap |
| } else { |
| val |
| } |
| } |
| crate::proxy::config::ThinkingBudgetMode::Auto => { |
| let is_limited = (final_model_name.contains("gemini") |
| || final_model_name.contains("thinking")) |
| && !final_model_name.contains("-image"); |
|
|
| if is_limited && budget > thinking_budget_cap { |
| thinking_budget_cap |
| } else { |
| budget |
| } |
| } |
| crate::proxy::config::ThinkingBudgetMode::Adaptive => budget, |
| }; |
|
|
| if final_budget != budget { |
| thinking_config["thinkingBudget"] = json!(final_budget); |
| } |
| } |
| } |
| } |
| } |
|
|
| |
| |
| |
| let thinking_config_opt = gen_config.get("thinkingConfig"); |
| let is_adaptive = thinking_config_opt.map_or(false, |t| { |
| t.get("thinkingLevel").is_some() || t.get("thinkingBudget").and_then(|v| v.as_i64()) == Some(-1) |
| }) || (thinking_config_opt.and_then(|t| t.get("thinkingBudget").and_then(|v| v.as_u64())) == Some(32768) && is_claude); |
|
|
| if let Some(thinking_config) = gen_config.get("thinkingConfig") { |
| let budget_opt = thinking_config.get("thinkingBudget").and_then(|v| v.as_i64()); |
| |
| |
| |
| let current_max = gen_config |
| .get("maxOutputTokens") |
| .and_then(|v| v.as_u64()) |
| .or(req_max_tokens); |
|
|
| if is_adaptive { |
| if current_max.map_or(true, |m| m < 131072) { |
| gen_config.insert("maxOutputTokens".to_string(), json!(131072)); |
| } |
| } else if let Some(budget_i64) = budget_opt { |
| if budget_i64 > 0 { |
| let budget = budget_i64 as u64; |
| let min_required_max = budget + 8192; |
| if current_max.map_or(true, |m| m <= budget) { |
| tracing::info!( |
| "[Gemini-Wrap] Bumping maxOutputTokens from {:?} to {} to satisfy thinkingBudget ({})", |
| current_max, min_required_max, budget |
| ); |
| gen_config.insert("maxOutputTokens".to_string(), json!(min_required_max)); |
| } |
| } |
| } |
| } |
| } |
|
|
| |
| |
| { |
| let final_cap = crate::proxy::model_specs::get_max_output_tokens(final_model_name, token); |
| let gen_config = inner_request |
| .as_object_mut() |
| .unwrap() |
| .entry("generationConfig") |
| .or_insert(serde_json::json!({})) |
| .as_object_mut() |
| .unwrap(); |
| if let Some(current) = gen_config.get("maxOutputTokens").and_then(|v| v.as_u64()) { |
| if current > final_cap { |
| tracing::debug!( |
| "[Gemini-Wrap] Capped maxOutputTokens from {} to {} for model {}", |
| current, final_cap, final_model_name |
| ); |
| gen_config.insert("maxOutputTokens".to_string(), serde_json::json!(final_cap)); |
| } |
| } |
| } |
|
|
| |
| |
|
|
| |
| let tools_val: Option<Vec<Value>> = inner_request |
| .get("tools") |
| .and_then(|t| t.as_array()) |
| .map(|arr| arr.clone()); |
|
|
| |
| let size = body.get("size").and_then(|v| v.as_str()); |
| let quality = body.get("quality").and_then(|v| v.as_str()); |
| let image_size = body.get("imageSize").and_then(|v| v.as_str()); |
|
|
| |
| let config = crate::proxy::mappers::common_utils::resolve_request_config( |
| original_model, |
| final_model_name, |
| &tools_val, |
| size, |
| quality, |
| image_size, |
| Some(body), |
| ); |
|
|
| |
| if let Some(tools) = inner_request.get_mut("tools") { |
| if let Some(tools_arr) = tools.as_array_mut() { |
| for tool in tools_arr { |
| if let Some(decls) = tool.get_mut("functionDeclarations") { |
| if let Some(decls_arr) = decls.as_array_mut() { |
| |
| decls_arr.retain(|decl| { |
| if let Some(name) = decl.get("name").and_then(|v| v.as_str()) { |
| if name == "web_search" || name == "google_search" { |
| return false; |
| } |
| } |
| true |
| }); |
|
|
| |
| |
| |
| for decl in decls_arr { |
| |
| if let Some(decl_obj) = decl.as_object_mut() { |
| |
| if let Some(params_json_schema) = |
| decl_obj.remove("parametersJsonSchema") |
| { |
| let mut params = params_json_schema; |
| crate::proxy::common::json_schema::clean_json_schema( |
| &mut params, |
| ); |
| decl_obj.insert("parameters".to_string(), params); |
| } else if let Some(params) = decl_obj.get_mut("parameters") { |
| |
| crate::proxy::common::json_schema::clean_json_schema(params); |
| } |
| } |
| } |
| } |
| } |
| } |
| } |
| } |
|
|
| tracing::debug!( |
| "[Debug] Gemini Wrap: original='{}', mapped='{}', final='{}', type='{}'", |
| original_model, |
| final_model_name, |
| config.final_model, |
| config.request_type |
| ); |
|
|
| |
| if config.inject_google_search { |
| |
| if config.request_type == "web_search" { |
| if let Some(obj) = inner_request.as_object_mut() { |
| let tools_entry = obj.entry("tools").or_insert_with(|| json!([])); |
| if let Some(tools_arr) = tools_entry.as_array_mut() { |
| tools_arr.push(json!({ |
| "googleSearch": { |
| "enhancedContent": { |
| "imageSearch": { |
| "maxResultCount": 5 |
| } |
| } |
| } |
| })); |
| } |
| } |
| } else { |
| crate::proxy::mappers::common_utils::inject_google_search_tool(&mut inner_request, Some(&config.final_model)); |
| } |
| } |
|
|
| |
| if let Some(image_config) = config.image_config { |
| if let Some(obj) = inner_request.as_object_mut() { |
| |
| obj.remove("tools"); |
|
|
| |
| obj.remove("systemInstruction"); |
|
|
| |
| if let Some(contents) = obj.get_mut("contents").and_then(|c| c.as_array_mut()) { |
| for content in contents { |
| if let Some(c_obj) = content.as_object_mut() { |
| if !c_obj.contains_key("role") { |
| c_obj.insert("role".to_string(), json!("user")); |
| } |
| } |
| } |
| } |
|
|
| |
| let gen_config = obj.entry("generationConfig").or_insert_with(|| json!({})); |
| if let Some(gen_obj) = gen_config.as_object_mut() { |
| |
| let image_thinking_mode = crate::proxy::config::get_image_thinking_mode(); |
| tracing::debug!("[Gemini-Wrap] Image thinking mode: {}", image_thinking_mode); |
| |
| if image_thinking_mode == "disabled" { |
| |
| |
| gen_obj.insert("thinkingConfig".to_string(), json!({ |
| "includeThoughts": false |
| })); |
| tracing::debug!("[Gemini-Wrap] Image thinking mode disabled: set includeThoughts=false"); |
| } |
| |
| gen_obj.remove("responseMimeType"); |
| gen_obj.remove("responseModalities"); |
| gen_obj.insert("imageConfig".to_string(), image_config); |
| } |
| } |
| } else { |
| |
| let antigravity_identity = if config.request_type == "web_search" { |
| "You are a search engine bot. You will be given a query from a user. Your task is to search the web for relevant information that will help the user. You MUST perform a web search. Do not respond or interact with the user, please respond as if they typed the query into a search bar." |
| } else { |
| "You are Antigravity, a powerful agentic AI coding assistant designed by the Google Deepmind team working on Advanced Agentic Coding.\n\ |
| You are pair programming with a USER to solve their coding task. The task may require creating a new codebase, modifying or debugging an existing codebase, or simply answering a question.\n\ |
| **Absolute paths only**\n\ |
| **Proactiveness**" |
| }; |
|
|
| |
| if let Some(system_instruction) = inner_request.get_mut("systemInstruction") { |
| |
| if let Some(obj) = system_instruction.as_object_mut() { |
| if !obj.contains_key("role") { |
| obj.insert("role".to_string(), json!("user")); |
| } |
| } |
|
|
| if let Some(parts) = system_instruction.get_mut("parts") { |
| if let Some(parts_array) = parts.as_array_mut() { |
| |
| let has_antigravity = parts_array |
| .get(0) |
| .and_then(|p| p.get("text")) |
| .and_then(|t| t.as_str()) |
| .map(|s| s.contains("You are Antigravity")) |
| .unwrap_or(false); |
|
|
| if !has_antigravity { |
| |
| parts_array.insert(0, json!({"text": antigravity_identity})); |
| } |
|
|
| |
| let global_prompt_config = crate::proxy::config::get_global_system_prompt(); |
| if global_prompt_config.enabled |
| && !global_prompt_config.content.trim().is_empty() |
| { |
| |
| let insert_pos = if has_antigravity { 1 } else { 1 }; |
| if insert_pos <= parts_array.len() { |
| parts_array |
| .insert(insert_pos, json!({"text": global_prompt_config.content})); |
| } else { |
| parts_array.push(json!({"text": global_prompt_config.content})); |
| } |
| } |
| } |
| } |
| } else { |
| |
| let mut parts = vec![json!({"text": antigravity_identity})]; |
| |
| let global_prompt_config = crate::proxy::config::get_global_system_prompt(); |
| if global_prompt_config.enabled && !global_prompt_config.content.trim().is_empty() { |
| parts.push(json!({"text": global_prompt_config.content})); |
| } |
| inner_request["systemInstruction"] = json!({ |
| "role": "user", |
| "parts": parts |
| }); |
| } |
| } |
|
|
| |
| if inner_request.get("tools").is_some() && !inner_request.get("toolConfig").is_some() { |
| inner_request["toolConfig"] = json!({ |
| "functionCallingConfig": { "mode": "VALIDATED" } |
| }); |
| } |
|
|
| |
| if let Some(account_id_str) = account_id { |
| inner_request["sessionId"] = json!(crate::proxy::common::session::derive_session_id(account_id_str)); |
| } |
|
|
| let sid = session_id.unwrap_or("default"); |
| |
| |
| |
| let timestamp_ms = chrono::Utc::now().timestamp_millis(); |
| let random_hex = &uuid::Uuid::new_v4().simple().to_string()[..8]; |
| let official_request_id = format!("agent/{}/{}", timestamp_ms, random_hex); |
|
|
| |
| |
| let is_enterprise = if let Some(t) = token { |
| !t.email.ends_with("@gmail.com") && !t.email.ends_with("@googlemail.com") |
| } else { |
| false |
| }; |
| |
| |
| let official_ide_type = if is_enterprise { "JETSKI" } else { "ANTIGRAVITY" }; |
| let official_user_agent = if is_enterprise { "jetski" } else { "antigravity" }; |
|
|
| |
| if final_model_name == "loadCodeAssist" || inner_request.get("metadata").is_some() { |
| let metadata = inner_request.as_object_mut().unwrap().entry("metadata").or_insert(json!({})); |
| if let Some(m_obj) = metadata.as_object_mut() { |
| if m_obj.get("ideType").is_none() { |
| m_obj.insert("ideType".to_string(), json!(official_ide_type)); |
| } |
| } |
| } |
|
|
| |
| |
| |
| |
| let is_agent_request = config.request_type != "image_gen"; |
| |
| let mut final_request_obj = json!({ |
| "project": project_id, |
| "requestId": official_request_id, |
| "request": inner_request, |
| "model": config.final_model, |
| "userAgent": official_user_agent, |
| "requestType": if is_agent_request { "agent" } else { "image_gen" } |
| }); |
|
|
| if is_agent_request { |
| if let Some(obj) = final_request_obj.as_object_mut() { |
| |
| obj.insert("enabledCreditTypes".to_string(), json!(["GOOGLE_ONE_AI"])); |
| } |
| } |
|
|
| final_request_obj |
| } |
|
|
| #[cfg(test)] |
| mod test_fixes { |
| use super::*; |
| use serde_json::json; |
|
|
| #[test] |
| fn test_wrap_request_with_signature() { |
| let session_id = "test-session-sig"; |
| let signature = "test-signature-must-be-longer-than-fifty-characters-to-be-cached-by-signature-cache-12345"; |
| crate::proxy::SignatureCache::global().cache_session_signature( |
| session_id, |
| signature.to_string(), |
| 1, |
| ); |
|
|
| let body = json!({ |
| "model": "gemini-pro", |
| "contents": [{ |
| "role": "user", |
| "parts": [{ |
| "functionCall": { |
| "name": "get_weather", |
| "args": {"location": "London"} |
| } |
| }] |
| }] |
| }); |
|
|
| let result = wrap_request(&body, "proj", "gemini-pro", None, Some(session_id), None); |
| let injected_sig = result["request"]["contents"][0]["parts"][0]["thoughtSignature"] |
| .as_str() |
| .unwrap(); |
| assert_eq!(injected_sig, signature); |
| } |
| } |
|
|
| |
| pub fn unwrap_response(response: &Value) -> Value { |
| response.get("response").unwrap_or(response).clone() |
| } |
|
|
| |
| |
| |
| |
| pub fn inject_ids_to_response(response: &mut Value, model_name: &str) { |
| if !model_name.to_lowercase().contains("claude") { |
| return; |
| } |
|
|
| if let Some(candidates) = response |
| .get_mut("candidates") |
| .and_then(|c| c.as_array_mut()) |
| { |
| for candidate in candidates { |
| if let Some(parts) = candidate |
| .get_mut("content") |
| .and_then(|c| c.get_mut("parts")) |
| .and_then(|p| p.as_array_mut()) |
| { |
| let mut name_counters: std::collections::HashMap<String, usize> = |
| std::collections::HashMap::new(); |
| for part in parts { |
| if let Some(fc) = part.get_mut("functionCall").and_then(|f| f.as_object_mut()) { |
| if fc.get("id").is_none() { |
| let name = fc.get("name").and_then(|n| n.as_str()).unwrap_or("unknown"); |
| let count = name_counters.entry(name.to_string()).or_insert(0); |
| let call_id = format!("call_{}_{}", name, count); |
| *count += 1; |
|
|
| fc.insert("id".to_string(), json!(call_id)); |
| tracing::debug!("[Gemini-Wrap] Response stage: Injected synthetic call_id '{}' for client", call_id); |
| } |
| } |
| } |
| } |
| } |
| } |
| } |
|
|
| #[cfg(test)] |
| mod tests { |
| use super::*; |
| use serde_json::json; |
|
|
| #[test] |
| fn test_wrap_request() { |
| let body = json!({ |
| "model": "gemini-2.5-flash", |
| "contents": [{"role": "user", "parts": [{"text": "Hi"}]}] |
| }); |
|
|
| let result = wrap_request(&body, "test-project", "gemini-2.5-flash", None, None, None); |
| assert_eq!(result["project"], "test-project"); |
| assert_eq!(result["model"], "gemini-2.5-flash"); |
| assert!(result["requestId"].as_str().unwrap().starts_with("agent/")); |
| } |
|
|
| #[test] |
| fn test_unwrap_response() { |
| let wrapped = json!({ |
| "response": { |
| "candidates": [{"content": {"parts": [{"text": "Hello"}]}}] |
| } |
| }); |
|
|
| let result = unwrap_response(&wrapped); |
| assert!(result.get("candidates").is_some()); |
| assert!(result.get("response").is_none()); |
| } |
|
|
| #[test] |
| fn test_antigravity_identity_injection_with_role() { |
| let body = json!({ |
| "model": "gemini-pro", |
| "messages": [] |
| }); |
|
|
| let result = wrap_request(&body, "test-proj", "gemini-pro", None, None, None); |
|
|
| |
| let sys = result |
| .get("request") |
| .unwrap() |
| .get("systemInstruction") |
| .unwrap(); |
| } |
|
|
| #[test] |
| fn test_gemini_flash_thinking_budget_capping() { |
| |
| crate::proxy::config::update_thinking_budget_config(crate::proxy::config::ThinkingBudgetConfig::default()); |
|
|
| let body = json!({ |
| "model": "gemini-2.0-flash-thinking-exp", |
| "generationConfig": { |
| "thinkingConfig": { |
| "includeThoughts": true, |
| "thinkingBudget": 32000 |
| } |
| } |
| }); |
|
|
| |
| let result = wrap_request(&body, "test-proj", "gemini-2.0-flash-thinking-exp", None, None, None); |
| let req = result.get("request").unwrap(); |
| let gen_config = req.get("generationConfig").unwrap(); |
| let budget = gen_config["thinkingConfig"]["thinkingBudget"] |
| .as_u64() |
| .unwrap(); |
|
|
| |
| assert_eq!(budget, 24576); |
|
|
| |
| let body_pro = json!({ |
| "model": "gemini-2.0-pro-exp", |
| "generationConfig": { |
| "thinkingConfig": { |
| "includeThoughts": true, |
| "thinkingBudget": 32000 |
| } |
| } |
| }); |
| let result_pro = wrap_request(&body_pro, "test-proj", "gemini-2.0-pro-exp", None, None, None); |
| let budget_pro = result_pro["request"]["generationConfig"]["thinkingConfig"] |
| ["thinkingBudget"] |
| .as_u64() |
| .unwrap(); |
| |
| assert_eq!(budget_pro, 24576); |
| } |
|
|
|
|
|
|
| #[test] |
| fn test_image_thinking_mode_disabled() { |
| |
| crate::proxy::config::update_image_thinking_mode(Some("disabled".to_string())); |
|
|
| |
| |
| |
| let body = json!({ |
| "model": "gemini-3-pro-image-2k", |
| "contents": [{"role": "user", "parts": [{"text": "Draw a cat"}]}] |
| }); |
|
|
| let result = wrap_request(&body, "test-proj", "gemini-3-pro-image-2k", None, None, None); |
| let req = result.get("request").unwrap(); |
| let gen_config = req.get("generationConfig").unwrap(); |
| |
| |
| let thinking_config = gen_config.get("thinkingConfig").unwrap(); |
| assert_eq!(thinking_config["includeThoughts"], false); |
|
|
| |
| crate::proxy::config::update_image_thinking_mode(Some("enabled".to_string())); |
| } |
|
|
| #[test] |
| fn test_user_instruction_preservation() { |
| let body = json!({ |
| "model": "gemini-pro", |
| "systemInstruction": { |
| "role": "user", |
| "parts": [{"text": "User custom prompt"}] |
| } |
| }); |
|
|
| let result = wrap_request(&body, "test-proj", "gemini-pro", None, None, None); |
| let sys = result |
| .get("request") |
| .unwrap() |
| .get("systemInstruction") |
| .unwrap(); |
| let parts = sys.get("parts").unwrap().as_array().unwrap(); |
|
|
| |
| assert_eq!(parts.len(), 2); |
| assert!(parts[0] |
| .get("text") |
| .unwrap() |
| .as_str() |
| .unwrap() |
| .contains("You are Antigravity")); |
| assert_eq!( |
| parts[1].get("text").unwrap().as_str().unwrap(), |
| "User custom prompt" |
| ); |
| } |
|
|
| #[test] |
| fn test_duplicate_prevention() { |
| let body = json!({ |
| "model": "gemini-pro", |
| "systemInstruction": { |
| "parts": [{"text": "You are Antigravity..."}] |
| } |
| }); |
|
|
| let result = wrap_request(&body, "test-proj", "gemini-pro", None, None, None); |
| let sys = result |
| .get("request") |
| .unwrap() |
| .get("systemInstruction") |
| .unwrap(); |
| let parts = sys.get("parts").unwrap().as_array().unwrap(); |
|
|
| |
| assert_eq!(parts.len(), 1); |
| } |
|
|
| #[test] |
| fn test_image_generation_with_reference_images() { |
| |
| let mut parts = Vec::new(); |
| parts.push(json!({"text": "Generate a variation"})); |
|
|
| for _ in 0..14 { |
| parts.push(json!({ |
| "inlineData": { |
| "mimeType": "image/jpeg", |
| "data": "base64data..." |
| } |
| })); |
| } |
|
|
| let body = json!({ |
| "model": "gemini-3-pro-image", |
| "contents": [{"parts": parts}] |
| }); |
|
|
| let result = wrap_request(&body, "test-proj", "gemini-3-pro-image", None, None, None); |
|
|
| let request = result.get("request").unwrap(); |
| let contents = request.get("contents").unwrap().as_array().unwrap(); |
| let result_parts = contents[0].get("parts").unwrap().as_array().unwrap(); |
|
|
| |
| assert_eq!(result_parts.len(), 15); |
| } |
|
|
| #[test] |
| fn test_gemini_pro_thinking_budget_processing() { |
| |
| use crate::proxy::config::{ |
| update_thinking_budget_config, ThinkingBudgetConfig, ThinkingBudgetMode, |
| }; |
|
|
| |
| update_thinking_budget_config(ThinkingBudgetConfig { |
| mode: ThinkingBudgetMode::Custom, |
| custom_value: 1024, |
| effort: None, |
| }); |
|
|
| let body = json!({ |
| "model": "gemini-3-pro-preview", |
| "generationConfig": { |
| "thinkingConfig": { |
| "includeThoughts": true, |
| "thinkingBudget": 32000 |
| } |
| } |
| }); |
|
|
| |
| let result = wrap_request(&body, "test-proj", "gemini-3-pro-preview", None, None, None); |
| let req = result.get("request").unwrap(); |
| let gen_config = req.get("generationConfig").unwrap(); |
|
|
| let budget = gen_config["thinkingConfig"]["thinkingBudget"] |
| .as_u64() |
| .unwrap(); |
|
|
| |
| |
| assert_eq!( |
| budget, 1024, |
| "Budget should be overridden to 1024 by custom config, proving logic execution" |
| ); |
|
|
| |
| update_thinking_budget_config(ThinkingBudgetConfig::default()); |
| } |
|
|
| #[cfg(test)] |
| mod test_v4_fixes { |
| use super::*; |
| use serde_json::json; |
|
|
| #[test] |
| fn test_claude_no_root_thinking_injection() { |
| |
| |
| |
| |
| crate::proxy::config::update_thinking_budget_config( |
| crate::proxy::config::ThinkingBudgetConfig { |
| mode: crate::proxy::config::ThinkingBudgetMode::Auto, |
| custom_value: 0, |
| effort: None, |
| }, |
| ); |
|
|
| let body = json!({ |
| "model": "claude-3-7-sonnet-thinking", |
| "messages": [{"role": "user", "content": "hi"}] |
| }); |
|
|
| let result = wrap_request(&body, "proj", "claude-3-7-sonnet-thinking", None, None, None); |
| let req = result.get("request").unwrap(); |
|
|
| |
| assert!(req.get("thinking").is_none(), "Root level 'thinking' should NOT be present"); |
|
|
| |
| let gen_config = req.get("generationConfig").expect("generationConfig should be present"); |
| let thinking_config = gen_config.get("thinkingConfig").expect("thinkingConfig should be injected"); |
|
|
| |
| let budget = thinking_config["thinkingBudget"].as_u64().expect("thinkingBudget should be a number"); |
| assert_eq!(budget, 16000, "Claude default thinking budget should be 16000"); |
| } |
|
|
| #[test] |
| fn test_gemini_thinking_injection_default() { |
| |
| let body = json!({ |
| "model": "gemini-2.0-flash-thinking-exp", |
| "contents": [{"role": "user", "parts": [{"text": "hi"}]}] |
| }); |
|
|
| let result = wrap_request(&body, "proj", "gemini-2.0-flash-thinking-exp", None, None, None); |
| let req = result.get("request").unwrap(); |
| let gen_config = req.get("generationConfig").unwrap(); |
| let thinking_config = gen_config.get("thinkingConfig").unwrap(); |
|
|
| let budget = thinking_config["thinkingBudget"].as_u64().unwrap(); |
| assert_eq!(budget, 24576, "Gemini default thinking budget should be 24576"); |
| } |
| } |
|
|
| #[test] |
| fn test_gemini_pro_auto_inject_thinking() { |
| |
| crate::proxy::config::update_thinking_budget_config( |
| crate::proxy::config::ThinkingBudgetConfig { |
| mode: crate::proxy::config::ThinkingBudgetMode::Auto, |
| custom_value: 24576, |
| effort: None, |
| }, |
| ); |
|
|
| |
| let body = json!({ |
| "model": "gemini-3-pro-preview", |
| |
| "generationConfig": {} |
| }); |
|
|
| |
| let result = wrap_request(&body, "test-proj", "gemini-3-pro-preview", None, None, None); |
| let req = result.get("request").unwrap(); |
| let gen_config = req.get("generationConfig").unwrap(); |
|
|
| |
| assert!( |
| gen_config.get("thinkingConfig").is_none(), |
| "Should NOT auto-inject thinkingConfig for gemini-3-pro-preview to avoid 400 error" |
| ); |
|
|
| |
| let body_std = json!({ |
| "model": "gemini-3-pro", |
| "generationConfig": {} |
| }); |
| let result_std = wrap_request(&body_std, "test-proj", "gemini-3-pro", None, None, None); |
| let gen_config_std = result_std.get("request").unwrap().get("generationConfig").unwrap(); |
| |
| assert!( |
| gen_config_std.get("thinkingConfig").is_some(), |
| "Should still auto-inject thinkingConfig for standard gemini-3-pro" |
| ); |
| } |
|
|
| #[test] |
| fn test_openai_image_params_support() { |
| |
| let body_1 = json!({ |
| "model": "gemini-3-pro-image", |
| "size": "1920x1080", |
| "quality": "hd", |
| "prompt": "Test" |
| }); |
|
|
| let result_1 = wrap_request(&body_1, "test-proj", "gemini-3-pro-image", None, None, None); |
| let req_1 = result_1.get("request").unwrap(); |
| let gen_config_1 = req_1.get("generationConfig").unwrap(); |
| let image_config_1 = gen_config_1.get("imageConfig").unwrap(); |
|
|
| assert_eq!(image_config_1["aspectRatio"], "16:9"); |
| assert_eq!(image_config_1["imageSize"], "4K"); |
|
|
| |
| let body_2 = json!({ |
| "model": "gemini-3-pro-image", |
| "size": "1:1", |
| "quality": "standard", |
| "prompt": "Test" |
| }); |
|
|
| let result_2 = wrap_request(&body_2, "test-proj", "gemini-3-pro-image", None, None, None); |
| let req_2 = result_2.get("request").unwrap(); |
| let image_config_2 = req_2["generationConfig"]["imageConfig"] |
| .as_object() |
| .unwrap(); |
|
|
| assert_eq!(image_config_2["aspectRatio"], "1:1"); |
| assert_eq!(image_config_2["imageSize"], "1K"); |
| } |
|
|
| #[test] |
| fn test_mixed_tools_injection_gemini_native() { |
| |
| let body = json!({ |
| "contents": [{"parts": [{"text": "Hello"}]}], |
| "tools": [{"functionDeclarations": [{"name": "get_weather", "parameters": {"type": "OBJECT", "properties": {"location": {"type": "STRING"}}}}]}], |
| "generationConfig": {} |
| }); |
|
|
| |
| use crate::proxy::mappers::common_utils::resolve_request_config; |
| let _config = resolve_request_config("-online", "gemini-2.0-flash", &None, None, None, None, None); |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| let mut inner_request = body.clone(); |
| crate::proxy::mappers::common_utils::inject_google_search_tool(&mut inner_request, Some("gemini-2.0-flash")); |
| |
| let tools = inner_request["tools"].as_array().expect("Should have tools"); |
| let has_functions = tools.iter().any(|t| t.get("functionDeclarations").is_some()); |
| let has_google_search = tools.iter().any(|t| t.get("googleSearch").is_some()); |
| |
| assert!(has_functions, "Should contain functionDeclarations"); |
| assert!(has_google_search, "Should contain googleSearch (Gemini 2.0+ supports mixed tools)"); |
| } |
| } |
|
|