diff --git a/src-tauri/src/proxy/forwarder.rs b/src-tauri/src/proxy/forwarder.rs index c4172ea3d..6fa7f43d0 100644 --- a/src-tauri/src/proxy/forwarder.rs +++ b/src-tauri/src/proxy/forwarder.rs @@ -9,7 +9,10 @@ use super::{ failover_switch::FailoverSwitchManager, log_codes::fwd as log_fwd, provider_router::ProviderRouter, - providers::{get_adapter, AuthInfo, AuthStrategy, ProviderAdapter, ProviderType}, + providers::{ + gemini_shadow::GeminiShadowStore, get_adapter, AuthInfo, AuthStrategy, ProviderAdapter, + ProviderType, + }, thinking_budget_rectifier::{rectify_thinking_budget, should_rectify_thinking_budget}, thinking_rectifier::{ normalize_thinking_type, rectify_anthropic_request, should_rectify_thinking_signature, @@ -43,12 +46,15 @@ pub struct RequestForwarder { router: Arc, status: Arc>, current_providers: Arc>>, + gemini_shadow: Arc, /// 故障转移切换管理器 failover_manager: Arc, /// AppHandle,用于发射事件和更新托盘 app_handle: Option, /// 请求开始时的"当前供应商 ID"(用于判断是否需要同步 UI/托盘) current_provider_id_at_start: String, + /// 代理会话 ID(用于 Gemini Native shadow replay) + session_id: String, /// 整流器配置 rectifier_config: RectifierConfig, /// 优化器配置 @@ -66,9 +72,11 @@ impl RequestForwarder { non_streaming_timeout: u64, status: Arc>, current_providers: Arc>>, + gemini_shadow: Arc, failover_manager: Arc, app_handle: Option, current_provider_id_at_start: String, + session_id: String, _streaming_first_byte_timeout: u64, _streaming_idle_timeout: u64, rectifier_config: RectifierConfig, @@ -79,9 +87,11 @@ impl RequestForwarder { router, status, current_providers, + gemini_shadow, failover_manager, app_handle, current_provider_id_at_start, + session_id, rectifier_config, optimizer_config, copilot_optimizer_config, @@ -878,7 +888,7 @@ impl RequestForwarder { let api_format = resolved_claude_api_format .as_deref() .unwrap_or_else(|| super::providers::get_claude_api_format(provider)); - rewrite_claude_transform_endpoint(endpoint, api_format, is_copilot) + rewrite_claude_transform_endpoint(endpoint, api_format, is_copilot, &mapped_body) } else { ( endpoint.to_string(), @@ -888,7 +898,13 @@ impl RequestForwarder { ) }; - let url = if is_full_url { + let url = if matches!(resolved_claude_api_format.as_deref(), Some("gemini_native")) { + super::gemini_url::resolve_gemini_native_url( + &base_url, + &effective_endpoint, + is_full_url, + ) + } else if is_full_url { append_query_to_full_url(&base_url, passthrough_query.as_deref()) } else { adapter.build_url(&base_url, &effective_endpoint) @@ -904,6 +920,8 @@ impl RequestForwarder { mapped_body, provider, api_format, + Some(&self.session_id), + Some(self.gemini_shadow.as_ref()), )? } else { adapter.transform_request(mapped_body, provider)? @@ -1083,8 +1101,11 @@ impl RequestForwarder { .ok() .and_then(|u| u.authority().map(|a| a.to_string())); + let should_send_anthropic_headers = adapter.name() == "Claude" + && matches!(resolved_claude_api_format.as_deref(), Some("anthropic")); + // 预计算 anthropic-beta 值(仅 Claude) - let anthropic_beta_value = if adapter.name() == "Claude" { + let anthropic_beta_value = if should_send_anthropic_headers { const CLAUDE_CODE_BETA: &str = "claude-code-20250219"; Some(if let Some(beta) = headers.get("anthropic-beta") { if let Ok(beta_str) = beta.to_str() { @@ -1204,8 +1225,10 @@ impl RequestForwarder { // --- anthropic-version — 透传客户端值 --- if key_str.eq_ignore_ascii_case("anthropic-version") { - saw_anthropic_version = true; - ordered_headers.append(key.clone(), value.clone()); + if should_send_anthropic_headers { + saw_anthropic_version = true; + ordered_headers.append(key.clone(), value.clone()); + } continue; } @@ -1246,7 +1269,7 @@ impl RequestForwarder { } // anthropic-version:仅在缺失时补充默认值 - if adapter.name() == "Claude" && !saw_anthropic_version { + if should_send_anthropic_headers && !saw_anthropic_version { ordered_headers.append( "anthropic-version", http::HeaderValue::from_static("2023-06-01"), @@ -1594,6 +1617,7 @@ fn rewrite_claude_transform_endpoint( endpoint: &str, api_format: &str, is_copilot: bool, + body: &Value, ) -> (String, Option) { let (path, query) = split_endpoint_and_query(endpoint); let passthrough_query = if is_claude_messages_path(path) { @@ -1606,6 +1630,32 @@ fn rewrite_claude_transform_endpoint( return (endpoint.to_string(), passthrough_query); } + if api_format == "gemini_native" { + let model = + super::providers::transform_gemini::extract_gemini_model(body).unwrap_or("unknown"); + let is_stream = body + .get("stream") + .and_then(|value| value.as_bool()) + .unwrap_or(false); + let target_path = if is_stream { + format!("/v1beta/models/{model}:streamGenerateContent") + } else { + format!("/v1beta/models/{model}:generateContent") + }; + + let rewritten_query = merge_query_params( + passthrough_query.as_deref(), + if is_stream { Some("alt=sse") } else { None }, + ); + + let rewritten = match rewritten_query.as_deref() { + Some(query) if !query.is_empty() => format!("{target_path}?{query}"), + _ => target_path, + }; + + return (rewritten, rewritten_query); + } + let target_path = if is_copilot && api_format == "openai_responses" { "/v1/responses" } else if is_copilot { @@ -1624,6 +1674,26 @@ fn rewrite_claude_transform_endpoint( (rewritten, passthrough_query) } +fn merge_query_params(base_query: Option<&str>, extra_param: Option<&str>) -> Option { + let mut params: Vec = base_query + .into_iter() + .flat_map(|query| query.split('&')) + .filter(|pair| !pair.is_empty()) + .filter(|pair| !pair.starts_with("alt=")) + .map(ToString::to_string) + .collect(); + + if let Some(extra_param) = extra_param { + params.push(extra_param.to_string()); + } + + if params.is_empty() { + None + } else { + Some(params.join("&")) + } +} + fn append_query_to_full_url(base_url: &str, query: Option<&str>) -> String { match query { Some(query) if !query.is_empty() => { @@ -1752,6 +1822,7 @@ mod tests { "/v1/messages?beta=true&foo=bar", "openai_chat", false, + &json!({ "model": "gpt-5.4" }), ); assert_eq!(endpoint, "/v1/chat/completions?foo=bar"); @@ -1764,6 +1835,7 @@ mod tests { "/claude/v1/messages?beta=true&x-id=1", "openai_responses", false, + &json!({ "model": "gpt-5.4" }), ); assert_eq!(endpoint, "/v1/responses?x-id=1"); @@ -1772,8 +1844,12 @@ mod tests { #[test] fn rewrite_claude_transform_endpoint_uses_copilot_path() { - let (endpoint, passthrough_query) = - rewrite_claude_transform_endpoint("/v1/messages?beta=true&x-id=1", "anthropic", true); + let (endpoint, passthrough_query) = rewrite_claude_transform_endpoint( + "/v1/messages?beta=true&x-id=1", + "anthropic", + true, + &json!({ "model": "claude-sonnet-4-6" }), + ); assert_eq!(endpoint, "/chat/completions?x-id=1"); assert_eq!(passthrough_query.as_deref(), Some("x-id=1")); @@ -1785,12 +1861,45 @@ mod tests { "/v1/messages?beta=true&x-id=1", "openai_responses", true, + &json!({ "model": "gpt-5.4" }), ); assert_eq!(endpoint, "/v1/responses?x-id=1"); assert_eq!(passthrough_query.as_deref(), Some("x-id=1")); } + #[test] + fn rewrite_claude_transform_endpoint_maps_gemini_generate_content() { + let (endpoint, passthrough_query) = rewrite_claude_transform_endpoint( + "/v1/messages?beta=true&x-id=1", + "gemini_native", + false, + &json!({ "model": "gemini-2.5-pro" }), + ); + + assert_eq!( + endpoint, + "/v1beta/models/gemini-2.5-pro:generateContent?x-id=1" + ); + assert_eq!(passthrough_query.as_deref(), Some("x-id=1")); + } + + #[test] + fn rewrite_claude_transform_endpoint_maps_gemini_streaming() { + let (endpoint, passthrough_query) = rewrite_claude_transform_endpoint( + "/v1/messages?beta=true", + "gemini_native", + false, + &json!({ "model": "gemini-2.5-flash", "stream": true }), + ); + + assert_eq!( + endpoint, + "/v1beta/models/gemini-2.5-flash:streamGenerateContent?alt=sse" + ); + assert_eq!(passthrough_query.as_deref(), Some("alt=sse")); + } + #[test] fn append_query_to_full_url_preserves_existing_query_string() { let url = append_query_to_full_url("https://relay.example/api?foo=bar", Some("x-id=1")); @@ -1798,6 +1907,43 @@ mod tests { assert_eq!(url, "https://relay.example/api?foo=bar&x-id=1"); } + #[test] + fn build_gemini_native_url_uses_origin_when_base_ends_with_v1beta() { + let url = crate::proxy::gemini_url::build_gemini_native_url( + "https://generativelanguage.googleapis.com/v1beta", + "/v1beta/models/gemini-2.5-pro:generateContent", + ); + + assert_eq!( + url, + "https://generativelanguage.googleapis.com/v1beta/models/gemini-2.5-pro:generateContent" + ); + } + + #[test] + fn build_gemini_native_url_uses_origin_when_base_already_contains_models_prefix() { + let url = crate::proxy::gemini_url::build_gemini_native_url( + "https://generativelanguage.googleapis.com/v1beta/models", + "/v1beta/models/gemini-2.5-flash:streamGenerateContent?alt=sse", + ); + + assert_eq!( + url, + "https://generativelanguage.googleapis.com/v1beta/models/gemini-2.5-flash:streamGenerateContent?alt=sse" + ); + } + + #[test] + fn resolve_gemini_native_url_keeps_opaque_full_url_as_is() { + let url = crate::proxy::gemini_url::resolve_gemini_native_url( + "https://relay.example/custom/generate-content", + "/v1beta/models/gemini-2.5-flash:streamGenerateContent?alt=sse", + true, + ); + + assert_eq!(url, "https://relay.example/custom/generate-content?alt=sse"); + } + #[test] fn force_identity_for_stream_flag_requests() { let headers = HeaderMap::new(); diff --git a/src-tauri/src/proxy/gemini_url.rs b/src-tauri/src/proxy/gemini_url.rs new file mode 100644 index 000000000..f84fe095d --- /dev/null +++ b/src-tauri/src/proxy/gemini_url.rs @@ -0,0 +1,268 @@ +//! Gemini Native URL helpers. +//! +//! Normalizes legacy Gemini/OpenAI-compatible base URLs into the canonical +//! Gemini Native `models/*:generateContent` endpoints. + +pub fn resolve_gemini_native_url(base_url: &str, endpoint: &str, is_full_url: bool) -> String { + if !is_full_url || should_normalize_gemini_full_url(base_url) { + return build_gemini_native_url(base_url, endpoint); + } + + let base_url = base_url + .split_once('#') + .map_or(base_url, |(base, _)| base) + .trim_end_matches('/'); + let (base_without_query, base_query) = split_query(base_url); + let (_, endpoint_query) = split_query(endpoint); + + let mut url = base_without_query.to_string(); + if let Some(query) = merge_queries(base_query, endpoint_query) { + url.push('?'); + url.push_str(&query); + } + + url +} + +pub fn build_gemini_native_url(base_url: &str, endpoint: &str) -> String { + let base_url = base_url + .split_once('#') + .map_or(base_url, |(base, _)| base) + .trim_end_matches('/'); + let (base_without_query, base_query) = split_query(base_url); + let (endpoint_without_query, endpoint_query) = split_query(endpoint); + + let endpoint_path = format!("/{}", endpoint_without_query.trim_start_matches('/')); + let (origin, raw_path) = split_origin_and_path(base_without_query); + let prefix_path = normalize_gemini_base_path(raw_path); + + let mut url = if prefix_path.is_empty() { + format!("{origin}{endpoint_path}") + } else { + format!("{origin}{prefix_path}{endpoint_path}") + }; + + if let Some(query) = merge_queries(base_query, endpoint_query) { + url.push('?'); + url.push_str(&query); + } + + url +} + +fn should_normalize_gemini_full_url(base_url: &str) -> bool { + let base_url = base_url + .split_once('#') + .map_or(base_url, |(base, _)| base) + .trim_end_matches('/'); + let (base_without_query, _) = split_query(base_url); + let (_, path) = split_origin_and_path(base_without_query); + + if path.is_empty() || path == "/" { + return true; + } + + let path = path.trim_end_matches('/'); + path.contains("/v1beta/models/") + || path.contains("/v1/models/") + || matches_bare_gemini_models_path(path) + || path.ends_with("/v1beta") + || path.ends_with("/v1") + || path.ends_with("/v1beta/models") + || path.ends_with("/v1/models") + || path.ends_with("/models") + || path.ends_with("/v1beta/openai") + || path.ends_with("/v1/openai") + || path.ends_with("/openai") + || path.ends_with("/v1beta/openai/chat/completions") + || path.ends_with("/v1/openai/chat/completions") + || path.ends_with("/openai/chat/completions") + || path.ends_with("/v1beta/openai/responses") + || path.ends_with("/v1/openai/responses") + || path.ends_with("/openai/responses") + || path.contains(":generateContent") + || path.contains(":streamGenerateContent") +} + +fn split_query(input: &str) -> (&str, Option<&str>) { + input + .split_once('?') + .map_or((input, None), |(path, query)| (path, Some(query))) +} + +fn split_origin_and_path(base_url: &str) -> (&str, &str) { + let Some(scheme_sep) = base_url.find("://") else { + return (base_url, ""); + }; + let authority_start = scheme_sep + 3; + let Some(path_start_rel) = base_url[authority_start..].find('/') else { + return (base_url, ""); + }; + let path_start = authority_start + path_start_rel; + (&base_url[..path_start], &base_url[path_start..]) +} + +fn normalize_gemini_base_path(path: &str) -> String { + let path = path.trim_end_matches('/'); + if path.is_empty() || path == "/" { + return String::new(); + } + + for marker in ["/v1beta/models/", "/v1/models/", "/models/"] { + if let Some(index) = path.find(marker) { + return normalize_prefix(&path[..index]); + } + } + + for suffix in [ + "/v1beta/openai/chat/completions", + "/v1/openai/chat/completions", + "/openai/chat/completions", + "/v1beta/openai/responses", + "/v1/openai/responses", + "/openai/responses", + "/v1beta/openai", + "/v1/openai", + "/openai", + "/v1beta/models", + "/v1/models", + "/models", + "/v1beta", + "/v1", + ] { + if path == suffix { + return String::new(); + } + if let Some(prefix) = path.strip_suffix(suffix) { + return normalize_prefix(prefix); + } + } + + path.to_string() +} + +fn normalize_prefix(prefix: &str) -> String { + let prefix = prefix.trim_end_matches('/'); + if prefix.is_empty() || prefix == "/" { + String::new() + } else { + prefix.to_string() + } +} + +fn matches_bare_gemini_models_path(path: &str) -> bool { + if let Some(idx) = path.find("/models/") { + let after = &path[idx + "/models/".len()..]; + after.contains(":generateContent") || after.contains(":streamGenerateContent") + } else { + false + } +} + +fn merge_queries(base_query: Option<&str>, endpoint_query: Option<&str>) -> Option { + let parts: Vec<&str> = [base_query, endpoint_query] + .into_iter() + .flatten() + .flat_map(|query| query.split('&')) + .filter(|part| !part.is_empty()) + .collect(); + + if parts.is_empty() { + None + } else { + Some(parts.join("&")) + } +} + +#[cfg(test)] +mod tests { + use super::{build_gemini_native_url, resolve_gemini_native_url}; + + #[test] + fn strips_version_root_for_official_base() { + let url = build_gemini_native_url( + "https://generativelanguage.googleapis.com/v1beta", + "/v1beta/models/gemini-2.5-pro:generateContent", + ); + + assert_eq!( + url, + "https://generativelanguage.googleapis.com/v1beta/models/gemini-2.5-pro:generateContent" + ); + } + + #[test] + fn strips_openai_compat_path_for_official_base() { + let url = build_gemini_native_url( + "https://generativelanguage.googleapis.com/v1beta/openai/chat/completions", + "/v1beta/models/gemini-2.5-pro:generateContent", + ); + + assert_eq!( + url, + "https://generativelanguage.googleapis.com/v1beta/models/gemini-2.5-pro:generateContent" + ); + } + + #[test] + fn preserves_custom_proxy_prefix_while_stripping_openai_suffix() { + let url = build_gemini_native_url( + "https://proxy.example.com/google/v1beta/openai/chat/completions", + "/v1beta/models/gemini-2.5-flash:streamGenerateContent?alt=sse", + ); + + assert_eq!( + url, + "https://proxy.example.com/google/v1beta/models/gemini-2.5-flash:streamGenerateContent?alt=sse" + ); + } + + #[test] + fn strips_model_method_path_from_full_url_base() { + let url = build_gemini_native_url( + "https://generativelanguage.googleapis.com/v1beta/models/gemini-2.5-pro:generateContent", + "/v1beta/models/gemini-2.5-flash:streamGenerateContent?alt=sse", + ); + + assert_eq!( + url, + "https://generativelanguage.googleapis.com/v1beta/models/gemini-2.5-flash:streamGenerateContent?alt=sse" + ); + } + + #[test] + fn resolves_structured_full_url_by_normalizing_to_requested_method() { + let url = resolve_gemini_native_url( + "https://generativelanguage.googleapis.com/v1beta/models/gemini-2.5-pro:generateContent", + "/v1beta/models/gemini-2.5-flash:streamGenerateContent?alt=sse", + true, + ); + + assert_eq!( + url, + "https://generativelanguage.googleapis.com/v1beta/models/gemini-2.5-flash:streamGenerateContent?alt=sse" + ); + } + + #[test] + fn resolves_opaque_full_url_without_appending_gemini_models_path() { + let url = resolve_gemini_native_url( + "https://relay.example/custom/generate-content", + "/v1beta/models/gemini-2.5-flash:streamGenerateContent?alt=sse", + true, + ); + + assert_eq!(url, "https://relay.example/custom/generate-content?alt=sse"); + } + + #[test] + fn preserves_opaque_full_url_containing_models_segment() { + let url = resolve_gemini_native_url( + "https://relay.example/custom/models/invoke", + "/v1beta/models/gemini-2.5-flash:streamGenerateContent?alt=sse", + true, + ); + + assert_eq!(url, "https://relay.example/custom/models/invoke?alt=sse"); + } +} diff --git a/src-tauri/src/proxy/handler_context.rs b/src-tauri/src/proxy/handler_context.rs index bad855a91..da2413ec4 100644 --- a/src-tauri/src/proxy/handler_context.rs +++ b/src-tauri/src/proxy/handler_context.rs @@ -218,9 +218,11 @@ impl RequestContext { non_streaming_timeout, state.status.clone(), state.current_providers.clone(), + state.gemini_shadow.clone(), state.failover_manager.clone(), state.app_handle.clone(), self.current_provider_id.clone(), + self.session_id.clone(), first_byte_timeout, idle_timeout, self.rectifier_config.clone(), diff --git a/src-tauri/src/proxy/handlers.rs b/src-tauri/src/proxy/handlers.rs index 08816246a..b8a233e52 100644 --- a/src-tauri/src/proxy/handlers.rs +++ b/src-tauri/src/proxy/handlers.rs @@ -15,8 +15,9 @@ use super::{ handler_context::RequestContext, providers::{ get_adapter, get_claude_api_format, streaming::create_anthropic_sse_stream, + streaming_gemini::create_anthropic_sse_stream_from_gemini, streaming_responses::create_anthropic_sse_stream_from_responses, transform, - transform_responses, + transform_gemini, transform_responses, }, response_processor::{ create_logged_passthrough_stream, process_response, read_decoded_body, @@ -144,11 +145,13 @@ async fn handle_claude_transform( response: super::hyper_client::ProxyResponse, ctx: &RequestContext, state: &ProxyState, - _original_body: &Value, + original_body: &Value, is_stream: bool, api_format: &str, ) -> Result { let status = response.status(); + let tool_schema_hints = transform_gemini::extract_anthropic_tool_schema_hints(original_body); + let tool_schema_hints = (!tool_schema_hints.is_empty()).then_some(tool_schema_hints); if is_stream { // 根据 api_format 选择流式转换器 @@ -157,6 +160,14 @@ async fn handle_claude_transform( dyn futures::Stream> + Send + Unpin, > = if api_format == "openai_responses" { Box::new(Box::pin(create_anthropic_sse_stream_from_responses(stream))) + } else if api_format == "gemini_native" { + Box::new(Box::pin(create_anthropic_sse_stream_from_gemini( + stream, + Some(state.gemini_shadow.clone()), + Some(ctx.provider.id.clone()), + Some(ctx.session_id.clone()), + tool_schema_hints.clone(), + ))) } else { Box::new(Box::pin(create_anthropic_sse_stream(stream))) }; @@ -245,6 +256,14 @@ async fn handle_claude_transform( // 根据 api_format 选择非流式转换器 let anthropic_response = if api_format == "openai_responses" { transform_responses::responses_to_anthropic(upstream_response) + } else if api_format == "gemini_native" { + transform_gemini::gemini_to_anthropic_with_shadow_and_hints( + upstream_response, + Some(state.gemini_shadow.as_ref()), + Some(&ctx.provider.id), + Some(&ctx.session_id), + tool_schema_hints.as_ref(), + ) } else { transform::openai_to_anthropic(upstream_response) } diff --git a/src-tauri/src/proxy/mod.rs b/src-tauri/src/proxy/mod.rs index a14818c5d..48cec6b38 100644 --- a/src-tauri/src/proxy/mod.rs +++ b/src-tauri/src/proxy/mod.rs @@ -10,6 +10,7 @@ pub mod error; pub mod error_mapper; pub(crate) mod failover_switch; mod forwarder; +pub mod gemini_url; pub mod handler_config; pub mod handler_context; mod handlers; diff --git a/src-tauri/src/proxy/providers/claude.rs b/src-tauri/src/proxy/providers/claude.rs index 52689d3b5..3f35f4ac5 100644 --- a/src-tauri/src/proxy/providers/claude.rs +++ b/src-tauri/src/proxy/providers/claude.rs @@ -6,6 +6,7 @@ //! - **anthropic** (默认): Anthropic Messages API 格式,直接透传 //! - **openai_chat**: OpenAI Chat Completions 格式,需要 Anthropic ↔ OpenAI 转换 //! - **openai_responses**: OpenAI Responses API 格式,需要 Anthropic ↔ Responses 转换 +//! - **gemini_native**: Google Gemini Native generateContent 格式,需要 Anthropic ↔ Gemini 转换 //! //! ## 认证模式 //! - **Claude**: Anthropic 官方 API (x-api-key + anthropic-version) @@ -35,6 +36,7 @@ pub fn get_claude_api_format(provider: &Provider) -> &'static str { return match api_format { "openai_chat" => "openai_chat", "openai_responses" => "openai_responses", + "gemini_native" => "gemini_native", _ => "anthropic", }; } @@ -49,6 +51,7 @@ pub fn get_claude_api_format(provider: &Provider) -> &'static str { return match api_format { "openai_chat" => "openai_chat", "openai_responses" => "openai_responses", + "gemini_native" => "gemini_native", _ => "anthropic", }; } @@ -73,13 +76,18 @@ pub fn get_claude_api_format(provider: &Provider) -> &'static str { } pub fn claude_api_format_needs_transform(api_format: &str) -> bool { - matches!(api_format, "openai_chat" | "openai_responses") + matches!( + api_format, + "openai_chat" | "openai_responses" | "gemini_native" + ) } pub fn transform_claude_request_for_api_format( body: serde_json::Value, provider: &Provider, api_format: &str, + session_id: Option<&str>, + shadow_store: Option<&super::gemini_shadow::GeminiShadowStore>, ) -> Result { let cache_key = provider .meta @@ -103,6 +111,12 @@ pub fn transform_claude_request_for_api_format( ) } "openai_chat" => super::transform::anthropic_to_openai(body, Some(cache_key)), + "gemini_native" => super::transform_gemini::anthropic_to_gemini_with_shadow( + body, + shadow_store, + Some(&provider.id), + session_id, + ), _ => Ok(body), } } @@ -124,6 +138,16 @@ impl ClaudeAdapter { /// - ClaudeAuth: auth_mode 为 bearer_only /// - Claude: 默认 Anthropic 官方 pub fn provider_type(&self, provider: &Provider) -> ProviderType { + // 检测 Gemini Native 格式 + if self.get_api_format(provider) == "gemini_native" { + return match self.extract_key(provider) { + Some(key) if key.starts_with("ya29.") || key.starts_with('{') => { + ProviderType::GeminiCli + } + _ => ProviderType::Gemini, + }; + } + // 检测 Codex OAuth (ChatGPT Plus/Pro) if self.is_codex_oauth(provider) { return ProviderType::CodexOAuth; @@ -352,14 +376,23 @@ impl ProviderAdapter for ClaudeAdapter { )); } - let strategy = match provider_type { - ProviderType::OpenRouter => AuthStrategy::Bearer, - ProviderType::ClaudeAuth => AuthStrategy::ClaudeAuth, - _ => AuthStrategy::Anthropic, - }; + let key = self.extract_key(provider)?; - self.extract_key(provider) - .map(|key| AuthInfo::new(key, strategy)) + match provider_type { + ProviderType::GeminiCli => { + if let Some(creds) = + super::gemini::GeminiAdapter::new().parse_oauth_credentials(&key) + { + Some(AuthInfo::with_access_token(key, creds.access_token)) + } else { + Some(AuthInfo::new(key, AuthStrategy::GoogleOAuth)) + } + } + ProviderType::Gemini => Some(AuthInfo::new(key, AuthStrategy::Google)), + ProviderType::OpenRouter => Some(AuthInfo::new(key, AuthStrategy::Bearer)), + ProviderType::ClaudeAuth => Some(AuthInfo::new(key, AuthStrategy::ClaudeAuth)), + _ => Some(AuthInfo::new(key, AuthStrategy::Anthropic)), + } } fn build_url(&self, base_url: &str, endpoint: &str) -> String { @@ -401,6 +434,23 @@ impl ProviderAdapter for ClaudeAdapter { HeaderValue::from_str(&bearer).unwrap(), )] } + AuthStrategy::Google => vec![( + HeaderName::from_static("x-goog-api-key"), + HeaderValue::from_str(&auth.api_key).unwrap(), + )], + AuthStrategy::GoogleOAuth => { + let token = auth.access_token.as_ref().unwrap_or(&auth.api_key); + vec![ + ( + HeaderName::from_static("authorization"), + HeaderValue::from_str(&format!("Bearer {token}")).unwrap(), + ), + ( + HeaderName::from_static("x-goog-api-client"), + HeaderValue::from_static("GeminiCLI/1.0"), + ), + ] + } AuthStrategy::CodexOAuth => { // 注意:bearer token 由 forwarder 动态注入到 auth.api_key // ChatGPT-Account-Id 由 forwarder 注入额外 header @@ -470,7 +520,6 @@ impl ProviderAdapter for ClaudeAdapter { ), ] } - _ => vec![], } } @@ -491,7 +540,7 @@ impl ProviderAdapter for ClaudeAdapter { // - "openai_responses": 需要 Anthropic ↔ OpenAI Responses API 格式转换 matches!( self.get_api_format(provider), - "openai_chat" | "openai_responses" + "openai_chat" | "openai_responses" | "gemini_native" ) } @@ -500,7 +549,13 @@ impl ProviderAdapter for ClaudeAdapter { body: serde_json::Value, provider: &Provider, ) -> Result { - transform_claude_request_for_api_format(body, provider, self.get_api_format(provider)) + transform_claude_request_for_api_format( + body, + provider, + self.get_api_format(provider), + None, + None, + ) } fn transform_response(&self, body: serde_json::Value) -> Result { @@ -509,7 +564,9 @@ impl ProviderAdapter for ClaudeAdapter { // config, so we can't check api_format here. Instead we rely on the fact that // Responses API always returns "output" while Chat Completions returns "choices". // This is safe because the two formats are structurally disjoint. - if body.get("output").is_some() { + if body.get("candidates").is_some() || body.get("promptFeedback").is_some() { + super::transform_gemini::gemini_to_anthropic(body) + } else if body.get("output").is_some() { super::transform_responses::responses_to_anthropic(body) } else { super::transform::openai_to_anthropic(body) @@ -813,6 +870,24 @@ mod tests { ); assert!(adapter.needs_transform(&openai_responses_provider)); + let gemini_native_provider = create_provider_with_meta( + json!({ + "env": { + "ANTHROPIC_BASE_URL": "https://generativelanguage.googleapis.com", + "ANTHROPIC_API_KEY": "test-key" + } + }), + ProviderMeta { + api_format: Some("gemini_native".to_string()), + ..Default::default() + }, + ); + assert!(adapter.needs_transform(&gemini_native_provider)); + assert_eq!( + adapter.provider_type(&gemini_native_provider), + ProviderType::Gemini + ); + // meta takes precedence over legacy settings_config fields let meta_precedence_over_settings = create_provider_with_meta( json!({ @@ -920,11 +995,106 @@ mod tests { "max_tokens": 128 }); - let transformed = - transform_claude_request_for_api_format(body, &provider, "openai_responses").unwrap(); + let transformed = transform_claude_request_for_api_format( + body, + &provider, + "openai_responses", + None, + None, + ) + .unwrap(); assert_eq!(transformed["model"], "gpt-5.4"); assert!(transformed.get("input").is_some()); assert!(transformed.get("max_output_tokens").is_some()); } + + #[test] + fn test_transform_claude_request_for_api_format_gemini_native() { + let provider = create_provider_with_meta( + json!({ + "env": { + "ANTHROPIC_BASE_URL": "https://generativelanguage.googleapis.com", + "ANTHROPIC_API_KEY": "test-key" + } + }), + ProviderMeta { + api_format: Some("gemini_native".to_string()), + ..Default::default() + }, + ); + let body = json!({ + "model": "gemini-2.5-pro", + "system": "You are helpful.", + "messages": [{ "role": "user", "content": "hello" }], + "max_tokens": 64 + }); + + let transformed = + transform_claude_request_for_api_format(body, &provider, "gemini_native", None, None) + .unwrap(); + + assert!(transformed.get("contents").is_some()); + assert_eq!( + transformed["systemInstruction"]["parts"][0]["text"], + "You are helpful." + ); + assert_eq!(transformed["generationConfig"]["maxOutputTokens"], 64); + } + + #[test] + fn test_transform_claude_request_for_api_format_openai_chat_skips_prompt_cache_key_by_default() + { + let provider = create_provider_with_meta( + json!({ + "env": { + "ANTHROPIC_BASE_URL": "https://api.example.com", + "ANTHROPIC_API_KEY": "test-key" + } + }), + ProviderMeta { + api_format: Some("openai_chat".to_string()), + ..Default::default() + }, + ); + let body = json!({ + "model": "gpt-5.4", + "messages": [{ "role": "user", "content": "hello" }], + "max_tokens": 64 + }); + + let transformed = + transform_claude_request_for_api_format(body, &provider, "openai_chat", None, None) + .unwrap(); + + assert!(transformed.get("prompt_cache_key").is_none()); + } + + #[test] + fn test_transform_claude_request_for_api_format_openai_chat_keeps_explicit_prompt_cache_key() { + let provider = create_provider_with_meta( + json!({ + "env": { + "ANTHROPIC_BASE_URL": "https://api.example.com", + "ANTHROPIC_API_KEY": "test-key" + } + }), + ProviderMeta { + api_format: Some("openai_chat".to_string()), + prompt_cache_key: Some("claude-cache-route".to_string()), + ..Default::default() + }, + ); + let body = json!({ + "model": "gpt-5.4", + "messages": [{ "role": "user", "content": "hello" }], + "max_tokens": 64 + }); + + let transformed = + transform_claude_request_for_api_format(body, &provider, "openai_chat", None, None) + .unwrap(); + + assert_eq!(transformed["prompt_cache_key"], "claude-cache-route"); + } } diff --git a/src-tauri/src/proxy/providers/gemini_schema.rs b/src-tauri/src/proxy/providers/gemini_schema.rs new file mode 100644 index 000000000..dc07b09b0 --- /dev/null +++ b/src-tauri/src/proxy/providers/gemini_schema.rs @@ -0,0 +1,278 @@ +//! Gemini tool schema helpers. +//! +//! Gemini `FunctionDeclaration` supports two schema channels: +//! - `parameters`: a restricted `Schema` subset +//! - `parametersJsonSchema`: richer JSON Schema via arbitrary JSON `Value` +//! +//! Anthropic tool schemas are closer to JSON Schema, so we choose the richer +//! channel when unsupported `Schema` fields are present. + +use serde_json::{json, Map, Value}; + +#[derive(Debug, Clone, PartialEq)] +pub enum GeminiFunctionParameters { + Schema(Value), + JsonSchema(Value), +} + +pub fn build_gemini_function_parameters(input_schema: Value) -> GeminiFunctionParameters { + let schema = normalize_json_schema(input_schema); + + if requires_parameters_json_schema(&schema) { + GeminiFunctionParameters::JsonSchema(schema) + } else { + GeminiFunctionParameters::Schema(to_gemini_schema(schema)) + } +} + +fn normalize_json_schema(schema: Value) -> Value { + match schema { + Value::Object(mut obj) => { + obj.remove("$schema"); + obj.remove("$id"); + + if let Some(properties) = obj + .get_mut("properties") + .and_then(|value| value.as_object_mut()) + { + for value in properties.values_mut() { + *value = normalize_json_schema(value.clone()); + } + } + + if let Some(items) = obj.get_mut("items") { + *items = normalize_json_schema(items.clone()); + } + + for key in ["anyOf", "oneOf", "allOf", "prefixItems"] { + if let Some(values) = obj.get_mut(key).and_then(|value| value.as_array_mut()) { + for value in values.iter_mut() { + *value = normalize_json_schema(value.clone()); + } + } + } + + for key in ["not", "if", "then", "else", "additionalProperties"] { + if let Some(value) = obj.get_mut(key) { + *value = normalize_json_schema(value.clone()); + } + } + + Value::Object(obj) + } + Value::Array(values) => { + Value::Array(values.into_iter().map(normalize_json_schema).collect()) + } + other => other, + } +} + +fn requires_parameters_json_schema(schema: &Value) -> bool { + match schema { + Value::Object(obj) => object_requires_parameters_json_schema(obj), + Value::Array(values) => values.iter().any(requires_parameters_json_schema), + _ => false, + } +} + +fn object_requires_parameters_json_schema(obj: &Map) -> bool { + for (key, value) in obj { + match key.as_str() { + "type" => { + if value.is_array() { + return true; + } + } + "format" | "title" | "description" | "nullable" | "enum" | "maxItems" | "minItems" + | "required" | "minProperties" | "maxProperties" | "minLength" | "maxLength" + | "pattern" | "example" | "propertyOrdering" | "default" | "minimum" | "maximum" => {} + "properties" => { + let Some(properties) = value.as_object() else { + return true; + }; + if properties.values().any(requires_parameters_json_schema) { + return true; + } + } + "items" => { + if !value.is_object() || requires_parameters_json_schema(value) { + return true; + } + } + "anyOf" => { + let Some(values) = value.as_array() else { + return true; + }; + if values.iter().any(requires_parameters_json_schema) { + return true; + } + } + // JSON Schema keywords that Gemini `parameters` does not accept. + "$ref" + | "$defs" + | "definitions" + | "additionalProperties" + | "unevaluatedProperties" + | "patternProperties" + | "oneOf" + | "allOf" + | "const" + | "not" + | "if" + | "then" + | "else" + | "dependentRequired" + | "dependentSchemas" + | "contains" + | "minContains" + | "maxContains" + | "prefixItems" + | "exclusiveMinimum" + | "exclusiveMaximum" + | "multipleOf" + | "examples" => return true, + // Be conservative for unknown keywords. + _ => return true, + } + } + + false +} + +fn to_gemini_schema(schema: Value) -> Value { + match schema { + Value::Object(obj) => { + let mut result = Map::new(); + + for (key, value) in obj { + match key.as_str() { + "type" | "format" | "title" | "description" | "nullable" | "enum" + | "maxItems" | "minItems" | "required" | "minProperties" | "maxProperties" + | "minLength" | "maxLength" | "pattern" | "example" | "propertyOrdering" + | "default" | "minimum" | "maximum" => { + result.insert(key, value); + } + "properties" => { + if let Some(properties) = value.as_object() { + let converted = properties + .iter() + .map(|(name, property_schema)| { + (name.clone(), to_gemini_schema(property_schema.clone())) + }) + .collect(); + result.insert("properties".to_string(), Value::Object(converted)); + } + } + "items" => { + if value.is_object() { + result.insert("items".to_string(), to_gemini_schema(value)); + } + } + "anyOf" => { + if let Some(values) = value.as_array() { + result.insert( + "anyOf".to_string(), + Value::Array( + values + .iter() + .map(|value| to_gemini_schema(value.clone())) + .collect(), + ), + ); + } + } + _ => {} + } + } + + Value::Object(result) + } + other => other, + } +} + +pub fn build_gemini_function_declaration( + name: &str, + description: Option<&str>, + input_schema: Value, +) -> Value { + let mut declaration = Map::new(); + declaration.insert("name".to_string(), json!(name)); + declaration.insert("description".to_string(), json!(description.unwrap_or(""))); + + match build_gemini_function_parameters(input_schema) { + GeminiFunctionParameters::Schema(schema) => { + declaration.insert("parameters".to_string(), schema); + } + GeminiFunctionParameters::JsonSchema(schema) => { + declaration.insert("parametersJsonSchema".to_string(), schema); + } + } + + Value::Object(declaration) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn uses_schema_for_simple_openapi_subset() { + let schema = json!({ + "type": "object", + "properties": { + "city": { "type": "string", "description": "Target city" } + }, + "required": ["city"] + }); + + let result = build_gemini_function_declaration("weather", Some("Weather lookup"), schema); + + assert!(result.get("parameters").is_some()); + assert!(result.get("parametersJsonSchema").is_none()); + assert_eq!(result["parameters"]["properties"]["city"]["type"], "string"); + } + + #[test] + fn uses_parameters_json_schema_for_additional_properties() { + let schema = json!({ + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": "object", + "properties": { + "city": { "type": "string" } + }, + "required": ["city"], + "additionalProperties": false + }); + + let result = build_gemini_function_declaration("weather", Some("Weather lookup"), schema); + + assert!(result.get("parameters").is_none()); + assert!(result.get("parametersJsonSchema").is_some()); + assert!(result["parametersJsonSchema"].get("$schema").is_none()); + assert_eq!( + result["parametersJsonSchema"]["additionalProperties"], + false + ); + } + + #[test] + fn uses_parameters_json_schema_for_one_of() { + let schema = json!({ + "type": "object", + "properties": { + "target": { + "oneOf": [ + { "type": "string" }, + { "type": "integer" } + ] + } + } + }); + + let result = build_gemini_function_declaration("search", Some("Search"), schema); + + assert!(result.get("parameters").is_none()); + assert!(result.get("parametersJsonSchema").is_some()); + } +} diff --git a/src-tauri/src/proxy/providers/gemini_shadow.rs b/src-tauri/src/proxy/providers/gemini_shadow.rs new file mode 100644 index 000000000..f1f5d6d39 --- /dev/null +++ b/src-tauri/src/proxy/providers/gemini_shadow.rs @@ -0,0 +1,389 @@ +//! Gemini Native shadow state +//! +//! Keeps provider/session-scoped assistant content snapshots and tool call metadata +//! so Gemini thought signatures and tool turns can be replayed without bloating +//! the main proxy files. + +use serde_json::Value; +use std::collections::{HashMap, VecDeque}; +use std::sync::RwLock; + +/// Composite key for a Gemini shadow session. +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct GeminiShadowKey { + pub provider_id: String, + pub session_id: String, +} + +impl GeminiShadowKey { + pub fn new(provider_id: impl Into, session_id: impl Into) -> Self { + Self { + provider_id: provider_id.into(), + session_id: session_id.into(), + } + } +} + +/// Gemini function call metadata captured from an assistant turn. +#[derive(Debug, Clone, PartialEq)] +pub struct GeminiToolCallMeta { + pub id: Option, + pub name: String, + pub args: Value, + pub thought_signature: Option, +} + +impl GeminiToolCallMeta { + pub fn new( + id: Option>, + name: impl Into, + args: Value, + thought_signature: Option>, + ) -> Self { + Self { + id: id.map(Into::into), + name: name.into(), + args, + thought_signature: thought_signature.map(Into::into), + } + } +} + +/// Stored assistant turn snapshot. +#[derive(Debug, Clone, PartialEq)] +pub struct GeminiAssistantTurn { + pub assistant_content: Value, + pub tool_calls: Vec, +} + +impl GeminiAssistantTurn { + pub fn new(assistant_content: Value, tool_calls: Vec) -> Self { + Self { + assistant_content, + tool_calls, + } + } +} + +/// Session snapshot returned by read APIs. +#[derive(Debug, Clone, PartialEq)] +pub struct GeminiShadowSessionSnapshot { + pub provider_id: String, + pub session_id: String, + pub turns: Vec, +} + +#[derive(Debug, Clone)] +struct GeminiShadowSession { + turns: VecDeque, +} + +impl GeminiShadowSession { + fn new() -> Self { + Self { + turns: VecDeque::new(), + } + } +} + +#[derive(Debug, Clone)] +struct GeminiShadowInner { + sessions: HashMap, + session_order: VecDeque, +} + +impl GeminiShadowInner { + fn new() -> Self { + Self { + sessions: HashMap::new(), + session_order: VecDeque::new(), + } + } +} + +/// Thread-safe shadow store for Gemini Native replay state. +/// +/// The store is intentionally small and explicit: +/// - sessions are keyed by `(provider_id, session_id)` +/// - each session keeps only a bounded number of recent assistant turns +/// - the oldest session is evicted first when the store is full +#[derive(Debug)] +pub struct GeminiShadowStore { + max_sessions: usize, + max_turns_per_session: usize, + inner: RwLock, +} + +impl Default for GeminiShadowStore { + fn default() -> Self { + Self::with_limits(200, 64) + } +} + +impl GeminiShadowStore { + #[allow(dead_code)] + pub fn new() -> Self { + Self::default() + } + + pub fn with_limits(max_sessions: usize, max_turns_per_session: usize) -> Self { + Self { + max_sessions: max_sessions.max(1), + max_turns_per_session: max_turns_per_session.max(1), + inner: RwLock::new(GeminiShadowInner::new()), + } + } + + /// Record a Gemini assistant turn for later replay. + pub fn record_assistant_turn( + &self, + provider_id: impl Into, + session_id: impl Into, + assistant_content: Value, + tool_calls: Vec, + ) -> GeminiShadowSessionSnapshot { + let key = GeminiShadowKey::new(provider_id, session_id); + let turn = GeminiAssistantTurn::new(assistant_content, tool_calls); + + let mut inner = self.inner.write().expect("gemini shadow lock poisoned"); + Self::touch_session_order(&mut inner.session_order, &key); + + let snapshot = { + let session = inner + .sessions + .entry(key.clone()) + .or_insert_with(GeminiShadowSession::new); + session.turns.push_back(turn); + while session.turns.len() > self.max_turns_per_session { + session.turns.pop_front(); + } + Self::snapshot_session(&key, session) + }; + Self::prune_sessions(&mut inner, self.max_sessions); + snapshot + } + + /// Get the latest assistant content for a provider/session pair. + #[allow(dead_code)] + pub fn latest_assistant_content(&self, provider_id: &str, session_id: &str) -> Option { + self.get_session(provider_id, session_id) + .and_then(|snapshot| { + snapshot + .turns + .last() + .map(|turn| turn.assistant_content.clone()) + }) + } + + /// Get the latest tool calls for a provider/session pair. + #[allow(dead_code)] + pub fn latest_tool_calls( + &self, + provider_id: &str, + session_id: &str, + ) -> Option> { + self.get_session(provider_id, session_id) + .and_then(|snapshot| snapshot.turns.last().map(|turn| turn.tool_calls.clone())) + } + + /// Read a full session snapshot. + pub fn get_session( + &self, + provider_id: &str, + session_id: &str, + ) -> Option { + let key = GeminiShadowKey::new(provider_id, session_id); + let mut inner = self.inner.write().expect("gemini shadow lock poisoned"); + let snapshot = inner + .sessions + .get(&key) + .map(|session| Self::snapshot_session(&key, session)); + if snapshot.is_some() { + Self::touch_session_order(&mut inner.session_order, &key); + } + snapshot + } + + /// Remove a single session from the store. + #[allow(dead_code)] + pub fn clear_session(&self, provider_id: &str, session_id: &str) -> bool { + let key = GeminiShadowKey::new(provider_id, session_id); + let mut inner = self.inner.write().expect("gemini shadow lock poisoned"); + let removed = inner.sessions.remove(&key).is_some(); + if removed { + Self::remove_key_from_order(&mut inner.session_order, &key); + } + removed + } + + /// Remove all sessions for a provider. + #[allow(dead_code)] + pub fn clear_provider(&self, provider_id: &str) -> usize { + let mut inner = self.inner.write().expect("gemini shadow lock poisoned"); + let keys: Vec<_> = inner + .sessions + .keys() + .filter(|key| key.provider_id == provider_id) + .cloned() + .collect(); + for key in &keys { + inner.sessions.remove(key); + Self::remove_key_from_order(&mut inner.session_order, key); + } + keys.len() + } + + /// Number of tracked sessions. + #[allow(dead_code)] + pub fn session_count(&self) -> usize { + self.inner + .read() + .expect("gemini shadow lock poisoned") + .sessions + .len() + } + + fn snapshot_session( + key: &GeminiShadowKey, + session: &GeminiShadowSession, + ) -> GeminiShadowSessionSnapshot { + GeminiShadowSessionSnapshot { + provider_id: key.provider_id.clone(), + session_id: key.session_id.clone(), + turns: session.turns.iter().cloned().collect(), + } + } + + fn touch_session_order(order: &mut VecDeque, key: &GeminiShadowKey) { + if let Some(pos) = order.iter().position(|existing| existing == key) { + order.remove(pos); + } + order.push_back(key.clone()); + } + + #[allow(dead_code)] + fn remove_key_from_order(order: &mut VecDeque, key: &GeminiShadowKey) { + if let Some(pos) = order.iter().position(|existing| existing == key) { + order.remove(pos); + } + } + + fn prune_sessions(inner: &mut GeminiShadowInner, max_sessions: usize) { + while inner.sessions.len() > max_sessions { + let Some(evicted_key) = inner.session_order.pop_front() else { + break; + }; + inner.sessions.remove(&evicted_key); + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use serde_json::json; + + #[test] + fn record_and_read_latest_turn() { + let store = GeminiShadowStore::with_limits(8, 4); + let snapshot = store.record_assistant_turn( + "provider-a", + "session-1", + json!({"parts": [{"text": "hello", "thoughtSignature": "sig-1"}]}), + vec![GeminiToolCallMeta::new( + Some("call-1"), + "get_weather", + json!({"location": "Tokyo"}), + Some("sig-1"), + )], + ); + + assert_eq!(snapshot.provider_id, "provider-a"); + assert_eq!(snapshot.session_id, "session-1"); + assert_eq!(snapshot.turns.len(), 1); + + let content = store + .latest_assistant_content("provider-a", "session-1") + .expect("content"); + assert_eq!(content["parts"][0]["text"], "hello"); + assert_eq!(content["parts"][0]["thoughtSignature"], "sig-1"); + + let tool_calls = store + .latest_tool_calls("provider-a", "session-1") + .expect("tool calls"); + assert_eq!(tool_calls.len(), 1); + assert_eq!(tool_calls[0].id.as_deref(), Some("call-1")); + assert_eq!(tool_calls[0].name, "get_weather"); + assert_eq!(tool_calls[0].args["location"], "Tokyo"); + assert_eq!(tool_calls[0].thought_signature.as_deref(), Some("sig-1")); + } + + #[test] + fn sessions_are_isolated_by_provider_and_session_id() { + let store = GeminiShadowStore::with_limits(8, 4); + + store.record_assistant_turn("provider-a", "session-1", json!({"text": "a"}), vec![]); + store.record_assistant_turn("provider-b", "session-1", json!({"text": "b"}), vec![]); + store.record_assistant_turn("provider-a", "session-2", json!({"text": "c"}), vec![]); + + assert_eq!(store.session_count(), 3); + assert_eq!( + store.latest_assistant_content("provider-a", "session-1"), + Some(json!({"text": "a"})) + ); + assert_eq!( + store.latest_assistant_content("provider-b", "session-1"), + Some(json!({"text": "b"})) + ); + assert_eq!( + store.latest_assistant_content("provider-a", "session-2"), + Some(json!({"text": "c"})) + ); + } + + #[test] + fn retains_only_latest_turns_per_session() { + let store = GeminiShadowStore::with_limits(8, 2); + + store.record_assistant_turn("provider-a", "session-1", json!({"idx": 1}), vec![]); + store.record_assistant_turn("provider-a", "session-1", json!({"idx": 2}), vec![]); + store.record_assistant_turn("provider-a", "session-1", json!({"idx": 3}), vec![]); + + let snapshot = store + .get_session("provider-a", "session-1") + .expect("snapshot"); + assert_eq!(snapshot.turns.len(), 2); + assert_eq!(snapshot.turns[0].assistant_content, json!({"idx": 2})); + assert_eq!(snapshot.turns[1].assistant_content, json!({"idx": 3})); + } + + #[test] + fn evicts_oldest_session_when_capacity_is_exceeded() { + let store = GeminiShadowStore::with_limits(2, 2); + + store.record_assistant_turn("provider-a", "session-1", json!({"idx": 1}), vec![]); + store.record_assistant_turn("provider-a", "session-2", json!({"idx": 2}), vec![]); + store.record_assistant_turn("provider-a", "session-3", json!({"idx": 3}), vec![]); + + assert!(store.get_session("provider-a", "session-1").is_none()); + assert!(store.get_session("provider-a", "session-2").is_some()); + assert!(store.get_session("provider-a", "session-3").is_some()); + } + + #[test] + fn clear_session_and_provider_work() { + let store = GeminiShadowStore::with_limits(8, 4); + + store.record_assistant_turn("provider-a", "session-1", json!({"idx": 1}), vec![]); + store.record_assistant_turn("provider-a", "session-2", json!({"idx": 2}), vec![]); + store.record_assistant_turn("provider-b", "session-3", json!({"idx": 3}), vec![]); + + assert!(store.clear_session("provider-a", "session-1")); + assert!(store.get_session("provider-a", "session-1").is_none()); + + let removed = store.clear_provider("provider-a"); + assert_eq!(removed, 1); + assert!(store.get_session("provider-a", "session-2").is_none()); + assert!(store.get_session("provider-b", "session-3").is_some()); + } +} diff --git a/src-tauri/src/proxy/providers/mod.rs b/src-tauri/src/proxy/providers/mod.rs index b6b8bb643..7e6238ec1 100644 --- a/src-tauri/src/proxy/providers/mod.rs +++ b/src-tauri/src/proxy/providers/mod.rs @@ -18,10 +18,14 @@ mod codex; pub mod codex_oauth_auth; pub mod copilot_auth; mod gemini; +pub(crate) mod gemini_schema; +pub mod gemini_shadow; pub mod models; pub mod streaming; +pub mod streaming_gemini; pub mod streaming_responses; pub mod transform; +pub mod transform_gemini; pub mod transform_responses; use crate::app_config::AppType; @@ -101,6 +105,14 @@ impl ProviderType { pub fn from_app_type_and_config(app_type: &AppType, provider: &Provider) -> Self { match app_type { AppType::Claude => { + if get_claude_api_format(provider) == "gemini_native" { + let adapter = ClaudeAdapter::new(); + return match adapter.extract_auth(provider).map(|auth| auth.strategy) { + Some(AuthStrategy::GoogleOAuth) => ProviderType::GeminiCli, + _ => ProviderType::Gemini, + }; + } + // 检测是否为 GitHub Copilot if let Some(meta) = provider.meta.as_ref() { if meta.provider_type.as_deref() == Some("github_copilot") { diff --git a/src-tauri/src/proxy/providers/streaming.rs b/src-tauri/src/proxy/providers/streaming.rs index d553ba6b9..306165a31 100644 --- a/src-tauri/src/proxy/providers/streaming.rs +++ b/src-tauri/src/proxy/providers/streaming.rs @@ -2,7 +2,7 @@ //! //! 实现 OpenAI SSE → Anthropic SSE 格式转换 -use crate::proxy::sse::strip_sse_field; +use crate::proxy::sse::{strip_sse_field, take_sse_block}; use bytes::Bytes; use futures::stream::{Stream, StreamExt}; use serde::{Deserialize, Serialize}; @@ -110,10 +110,7 @@ pub fn create_anthropic_sse_stream( Ok(bytes) => { crate::proxy::sse::append_utf8_safe(&mut buffer, &mut utf8_remainder, &bytes); - while let Some(pos) = buffer.find("\n\n") { - let line = buffer[..pos].to_string(); - buffer = buffer[pos + 2..].to_string(); - + while let Some(line) = take_sse_block(&mut buffer) { if line.trim().is_empty() { continue; } diff --git a/src-tauri/src/proxy/providers/streaming_gemini.rs b/src-tauri/src/proxy/providers/streaming_gemini.rs new file mode 100644 index 000000000..2a22bc237 --- /dev/null +++ b/src-tauri/src/proxy/providers/streaming_gemini.rs @@ -0,0 +1,734 @@ +//! Gemini Native streaming conversion module. +//! +//! Converts Gemini `streamGenerateContent?alt=sse` chunks into Anthropic-style +//! SSE events for Claude-compatible clients. + +use super::gemini_shadow::{GeminiShadowStore, GeminiToolCallMeta}; +use super::transform_gemini::{rectify_tool_call_parts, AnthropicToolSchemaHints}; +use crate::proxy::sse::{strip_sse_field, take_sse_block}; +use bytes::Bytes; +use futures::stream::{Stream, StreamExt}; +use serde_json::{json, Value}; +use std::collections::HashSet; +use std::sync::Arc; + +fn anthropic_usage_from_gemini(usage: Option<&Value>) -> Value { + let Some(usage) = usage else { + return json!({ + "input_tokens": 0, + "output_tokens": 0 + }); + }; + + let input_tokens = usage + .get("promptTokenCount") + .and_then(|value| value.as_u64()) + .unwrap_or(0); + let total_tokens = usage + .get("totalTokenCount") + .and_then(|value| value.as_u64()) + .unwrap_or(0); + let output_tokens = total_tokens.saturating_sub(input_tokens); + + let mut result = json!({ + "input_tokens": input_tokens, + "output_tokens": output_tokens + }); + + if let Some(cached) = usage + .get("cachedContentTokenCount") + .and_then(|value| value.as_u64()) + { + result["cache_read_input_tokens"] = json!(cached); + } + + result +} + +fn map_finish_reason(reason: Option<&str>, has_tool_use: bool, blocked: bool) -> &'static str { + if blocked { + return "refusal"; + } + + match reason { + Some("MAX_TOKENS") => "max_tokens", + Some("SAFETY") + | Some("RECITATION") + | Some("SPII") + | Some("BLOCKLIST") + | Some("PROHIBITED_CONTENT") => "refusal", + _ if has_tool_use => "tool_use", + _ => "end_turn", + } +} + +fn extract_visible_text(parts: &[Value]) -> String { + parts + .iter() + .filter(|part| part.get("thought").and_then(|value| value.as_bool()) != Some(true)) + .filter_map(|part| part.get("text").and_then(|value| value.as_str())) + .collect::() +} + +fn extract_tool_calls( + parts: &[Value], + tool_schema_hints: Option<&AnthropicToolSchemaHints>, +) -> Vec { + let mut rectified_parts = parts.to_vec(); + rectify_tool_call_parts(&mut rectified_parts, tool_schema_hints); + + rectified_parts + .iter() + .filter_map(|part| { + let function_call = part.get("functionCall")?; + Some(GeminiToolCallMeta::new( + function_call.get("id").and_then(|value| value.as_str()), + function_call + .get("name") + .and_then(|value| value.as_str()) + .unwrap_or(""), + function_call + .get("args") + .cloned() + .unwrap_or_else(|| json!({})), + part.get("thoughtSignature") + .or_else(|| part.get("thought_signature")) + .and_then(|value| value.as_str()), + )) + }) + .collect() +} + +fn extract_text_thought_signature(parts: &[Value]) -> Option { + parts + .iter() + .filter(|part| part.get("text").is_some() && part.get("functionCall").is_none()) + .filter_map(|part| { + part.get("thoughtSignature") + .or_else(|| part.get("thought_signature")) + .and_then(|value| value.as_str()) + }) + .next_back() + .map(ToString::to_string) +} + +fn merge_tool_call_snapshots( + tool_call_snapshots: &mut Vec, + incoming: Vec, +) { + for tool_call in incoming { + let existing_index = + tool_call_snapshots + .iter() + .position(|existing| match (&existing.id, &tool_call.id) { + (Some(existing_id), Some(incoming_id)) => existing_id == incoming_id, + _ => existing.name == tool_call.name, + }); + + if let Some(index) = existing_index { + tool_call_snapshots[index] = tool_call; + } else { + tool_call_snapshots.push(tool_call); + } + } +} + +fn build_shadow_assistant_parts( + text: Option<&str>, + text_thought_signature: Option<&str>, + tool_calls: &[GeminiToolCallMeta], +) -> Vec { + let mut parts = Vec::new(); + + if text.filter(|text| !text.is_empty()).is_some() || text_thought_signature.is_some() { + let mut part = json!({ + "text": text.unwrap_or("") + }); + if let Some(signature) = text_thought_signature { + part["thoughtSignature"] = json!(signature); + } + parts.push(part); + } + + for tool_call in tool_calls { + let mut part = json!({ + "functionCall": { + "id": tool_call.id.clone().unwrap_or_default(), + "name": tool_call.name, + "args": tool_call.args + } + }); + + if let Some(signature) = &tool_call.thought_signature { + part["thoughtSignature"] = json!(signature); + } + + parts.push(part); + } + + parts +} + +fn encode_sse(event_name: &str, payload: &Value) -> Bytes { + Bytes::from(format!( + "event: {event_name}\ndata: {}\n\n", + serde_json::to_string(payload).unwrap_or_default() + )) +} + +pub fn create_anthropic_sse_stream_from_gemini( + stream: impl Stream> + Send + 'static, + shadow_store: Option>, + provider_id: Option, + session_id: Option, + tool_schema_hints: Option, +) -> impl Stream> + Send { + async_stream::stream! { + let mut buffer = String::new(); + let mut message_id: Option = None; + let mut current_model: Option = None; + let mut has_sent_message_start = false; + let mut accumulated_text = String::new(); + let mut text_block_index: Option = None; + let mut next_content_index: u32 = 0; + let mut open_indices: HashSet = HashSet::new(); + let mut tool_call_snapshots: Vec = Vec::new(); + let mut text_thought_signature: Option = None; + let mut latest_usage: Option = None; + let mut latest_finish_reason: Option = None; + let mut blocked_text: Option = None; + tokio::pin!(stream); + + while let Some(chunk) = stream.next().await { + match chunk { + Ok(bytes) => { + let text = String::from_utf8_lossy(&bytes); + buffer.push_str(&text); + + while let Some(block) = take_sse_block(&mut buffer) { + if block.trim().is_empty() { + continue; + } + + let mut data_lines: Vec = Vec::new(); + for line in block.lines() { + if let Some(data) = strip_sse_field(line, "data") { + data_lines.push(data.to_string()); + } + } + + if data_lines.is_empty() { + continue; + } + + let data = data_lines.join("\n"); + if data.trim() == "[DONE]" { + break; + } + + let chunk_json: Value = match serde_json::from_str(&data) { + Ok(value) => value, + Err(_) => continue, + }; + + if message_id.is_none() { + message_id = chunk_json + .get("responseId") + .and_then(|value| value.as_str()) + .map(ToString::to_string); + } + if current_model.is_none() { + current_model = chunk_json + .get("modelVersion") + .and_then(|value| value.as_str()) + .map(ToString::to_string); + } + if latest_usage.is_none() { + latest_usage = chunk_json.get("usageMetadata").cloned(); + } + + if !has_sent_message_start { + let event = json!({ + "type": "message_start", + "message": { + "id": message_id.clone().unwrap_or_default(), + "type": "message", + "role": "assistant", + "model": current_model.clone().unwrap_or_default(), + "usage": anthropic_usage_from_gemini(chunk_json.get("usageMetadata")) + } + }); + yield Ok(encode_sse("message_start", &event)); + has_sent_message_start = true; + } + + if let Some(reason) = chunk_json + .get("promptFeedback") + .and_then(|value| value.get("blockReason")) + .and_then(|value| value.as_str()) + { + blocked_text = Some(format!("Request blocked by Gemini safety filters: {reason}")); + } + + if let Some(candidate) = chunk_json + .get("candidates") + .and_then(|value| value.as_array()) + .and_then(|value| value.first()) + { + if let Some(reason) = candidate.get("finishReason").and_then(|value| value.as_str()) { + latest_finish_reason = Some(reason.to_string()); + } + if let Some(usage) = chunk_json.get("usageMetadata") { + latest_usage = Some(usage.clone()); + } + if let Some(parts) = candidate + .get("content") + .and_then(|value| value.get("parts")) + .and_then(|value| value.as_array()) + { + let mut rectified_parts = parts.clone(); + rectify_tool_call_parts(&mut rectified_parts, tool_schema_hints.as_ref()); + if let Some(signature) = extract_text_thought_signature(parts) { + text_thought_signature = Some(signature); + } + merge_tool_call_snapshots( + &mut tool_call_snapshots, + extract_tool_calls(&rectified_parts, tool_schema_hints.as_ref()), + ); + let visible_text = extract_visible_text(&rectified_parts); + if !visible_text.is_empty() { + let is_cumulative = visible_text.starts_with(&accumulated_text); + let delta = if is_cumulative { + visible_text[accumulated_text.len()..].to_string() + } else { + visible_text.clone() + }; + + if !delta.is_empty() { + let index = *text_block_index.get_or_insert_with(|| { + let assigned = next_content_index; + next_content_index += 1; + assigned + }); + + if !open_indices.contains(&index) { + let start_event = json!({ + "type": "content_block_start", + "index": index, + "content_block": { + "type": "text", + "text": "" + } + }); + yield Ok(encode_sse("content_block_start", &start_event)); + open_indices.insert(index); + } + + let delta_event = json!({ + "type": "content_block_delta", + "index": index, + "delta": { + "type": "text_delta", + "text": delta + } + }); + yield Ok(encode_sse("content_block_delta", &delta_event)); + if is_cumulative { + accumulated_text = visible_text; + } else { + accumulated_text.push_str(&delta); + } + } + } + } + } + } + } + Err(error) => { + yield Err(std::io::Error::other(error.to_string())); + return; + } + } + } + + if !has_sent_message_start { + let event = json!({ + "type": "message_start", + "message": { + "id": message_id.clone().unwrap_or_default(), + "type": "message", + "role": "assistant", + "model": current_model.clone().unwrap_or_default(), + "usage": anthropic_usage_from_gemini(latest_usage.as_ref()) + } + }); + yield Ok(encode_sse("message_start", &event)); + } + + if accumulated_text.is_empty() { + if let Some(blocked_text) = blocked_text.clone() { + let index = *text_block_index.get_or_insert_with(|| { + let assigned = next_content_index; + next_content_index += 1; + assigned + }); + + if !open_indices.contains(&index) { + let start_event = json!({ + "type": "content_block_start", + "index": index, + "content_block": { + "type": "text", + "text": "" + } + }); + yield Ok(encode_sse("content_block_start", &start_event)); + open_indices.insert(index); + } + + let delta_event = json!({ + "type": "content_block_delta", + "index": index, + "delta": { + "type": "text_delta", + "text": blocked_text + } + }); + yield Ok(encode_sse("content_block_delta", &delta_event)); + } + } + + if let Some(index) = text_block_index { + if open_indices.remove(&index) { + let stop_event = json!({ + "type": "content_block_stop", + "index": index + }); + yield Ok(encode_sse("content_block_stop", &stop_event)); + } + } + + let tool_calls = tool_call_snapshots; + for tool_call in &tool_calls { + let index = next_content_index; + next_content_index += 1; + + let start_event = json!({ + "type": "content_block_start", + "index": index, + "content_block": { + "type": "tool_use", + "id": tool_call.id.clone().unwrap_or_default(), + "name": tool_call.name + } + }); + yield Ok(encode_sse("content_block_start", &start_event)); + + let delta_event = json!({ + "type": "content_block_delta", + "index": index, + "delta": { + "type": "input_json_delta", + "partial_json": serde_json::to_string(&tool_call.args).unwrap_or_else(|_| "{}".to_string()) + } + }); + yield Ok(encode_sse("content_block_delta", &delta_event)); + + let stop_event = json!({ + "type": "content_block_stop", + "index": index + }); + yield Ok(encode_sse("content_block_stop", &stop_event)); + } + + if let (Some(store), Some(provider_id), Some(session_id)) = ( + shadow_store.as_ref(), + provider_id.as_deref(), + session_id.as_deref(), + ) { + let shadow_text = if accumulated_text.is_empty() { + blocked_text.as_deref() + } else { + Some(accumulated_text.as_str()) + }; + let shadow_parts = build_shadow_assistant_parts( + shadow_text, + text_thought_signature.as_deref(), + &tool_calls, + ); + if !shadow_parts.is_empty() { + store.record_assistant_turn( + provider_id, + session_id, + json!({ "parts": shadow_parts }), + tool_calls.clone(), + ); + } + } + + let stop_reason = map_finish_reason( + latest_finish_reason.as_deref(), + !tool_calls.is_empty(), + blocked_text.is_some(), + ); + let usage = anthropic_usage_from_gemini(latest_usage.as_ref()); + let message_delta = json!({ + "type": "message_delta", + "delta": { + "stop_reason": stop_reason, + "stop_sequence": Value::Null + }, + "usage": usage + }); + yield Ok(encode_sse("message_delta", &message_delta)); + + let message_stop = json!({ "type": "message_stop" }); + yield Ok(encode_sse("message_stop", &message_stop)); + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::proxy::providers::gemini_shadow::GeminiShadowStore; + use crate::proxy::providers::transform_gemini::anthropic_to_gemini_with_shadow; + use std::sync::Arc; + + fn collect_stream_output(chunks: Vec<&str>) -> String { + let owned_chunks: Vec = chunks.into_iter().map(ToString::to_string).collect(); + let stream = futures::stream::iter( + owned_chunks + .into_iter() + .map(|chunk| Ok::(Bytes::from(chunk))), + ); + let converted = create_anthropic_sse_stream_from_gemini(stream, None, None, None, None); + futures::executor::block_on(async move { + converted + .collect::>() + .await + .into_iter() + .map(|item| String::from_utf8(item.unwrap().to_vec()).unwrap()) + .collect::>() + .join("") + }) + } + + fn collect_stream_output_with_shadow( + chunks: Vec<&str>, + store: Arc, + provider_id: &str, + session_id: &str, + ) -> String { + let owned_chunks: Vec = chunks.into_iter().map(ToString::to_string).collect(); + let stream = futures::stream::iter( + owned_chunks + .into_iter() + .map(|chunk| Ok::(Bytes::from(chunk))), + ); + let converted = create_anthropic_sse_stream_from_gemini( + stream, + Some(store), + Some(provider_id.to_string()), + Some(session_id.to_string()), + None, + ); + futures::executor::block_on(async move { + converted + .collect::>() + .await + .into_iter() + .map(|item| String::from_utf8(item.unwrap().to_vec()).unwrap()) + .collect::>() + .join("") + }) + } + + #[test] + fn converts_text_stream_to_anthropic_sse() { + let output = collect_stream_output(vec![ + "data: {\"responseId\":\"resp_1\",\"modelVersion\":\"gemini-2.5-pro\",\"candidates\":[{\"content\":{\"parts\":[{\"text\":\"Hel\"}]}}],\"usageMetadata\":{\"promptTokenCount\":10,\"totalTokenCount\":13}}\n\n", + "data: {\"responseId\":\"resp_1\",\"modelVersion\":\"gemini-2.5-pro\",\"candidates\":[{\"finishReason\":\"STOP\",\"content\":{\"parts\":[{\"text\":\"Hello\"}]}}],\"usageMetadata\":{\"promptTokenCount\":10,\"totalTokenCount\":15}}\n\n", + ]); + + assert!(output.contains("event: message_start")); + assert!(output.contains("\"type\":\"text_delta\"")); + assert!(output.contains("\"text\":\"Hel\"")); + assert!(output.contains("\"text\":\"lo\"")); + assert!(output.contains("\"stop_reason\":\"end_turn\"")); + assert!(output.contains("event: message_stop")); + } + + #[test] + fn converts_function_call_stream_to_tool_use_events() { + let output = collect_stream_output(vec![ + "data: {\"responseId\":\"resp_2\",\"modelVersion\":\"gemini-2.5-pro\",\"candidates\":[{\"finishReason\":\"STOP\",\"content\":{\"parts\":[{\"functionCall\":{\"id\":\"call_1\",\"name\":\"get_weather\",\"args\":{\"city\":\"Tokyo\"}},\"thoughtSignature\":\"sig-1\"}]}}],\"usageMetadata\":{\"promptTokenCount\":5,\"totalTokenCount\":8}}\n\n", + ]); + + assert!(output.contains("\"type\":\"tool_use\"")); + assert!(output.contains("\"name\":\"get_weather\"")); + assert!(output.contains("\"type\":\"input_json_delta\"")); + assert!(output.contains("\"stop_reason\":\"tool_use\"")); + } + + #[test] + fn converts_crlf_delimited_stream_to_anthropic_sse() { + let output = collect_stream_output(vec![ + "data: {\"responseId\":\"resp_3\",\"modelVersion\":\"gemini-2.5-pro\",\"candidates\":[{\"content\":{\"parts\":[{\"text\":\"Hi\"}]}}],\"usageMetadata\":{\"promptTokenCount\":4,\"totalTokenCount\":6}}\r\n\r\n", + "data: {\"responseId\":\"resp_3\",\"modelVersion\":\"gemini-2.5-pro\",\"candidates\":[{\"finishReason\":\"STOP\",\"content\":{\"parts\":[{\"text\":\"Hi there\"}]}}],\"usageMetadata\":{\"promptTokenCount\":4,\"totalTokenCount\":9}}\r\n\r\n", + ]); + + assert!(output.contains("event: message_start")); + assert!(output.contains("\"type\":\"text_delta\"")); + assert!(output.contains("\"text\":\"Hi\"")); + assert!(output.contains("\"text\":\" there\"")); + assert!(output.contains("event: message_stop")); + } + + #[test] + fn stores_full_text_for_shadow_replay_across_delta_chunks() { + let store = Arc::new(GeminiShadowStore::with_limits(8, 4)); + let output = collect_stream_output_with_shadow( + vec![ + "data: {\"responseId\":\"resp_4\",\"modelVersion\":\"gemini-2.5-pro\",\"candidates\":[{\"content\":{\"parts\":[{\"text\":\"Hel\"}]}}],\"usageMetadata\":{\"promptTokenCount\":4,\"totalTokenCount\":6}}\n\n", + "data: {\"responseId\":\"resp_4\",\"modelVersion\":\"gemini-2.5-pro\",\"candidates\":[{\"finishReason\":\"STOP\",\"content\":{\"parts\":[{\"text\":\"lo\"},{\"text\":\"\",\"thoughtSignature\":\"sig-1\"}]}}],\"usageMetadata\":{\"promptTokenCount\":4,\"totalTokenCount\":8}}\n\n", + ], + store.clone(), + "provider-a", + "session-1", + ); + + assert!(output.contains("\"text\":\"Hel\"")); + assert!(output.contains("\"text\":\"lo\"")); + + let shadow = store + .latest_assistant_content("provider-a", "session-1") + .unwrap(); + assert_eq!(shadow["parts"][0]["text"], "Hello"); + assert_eq!(shadow["parts"][0]["thoughtSignature"], "sig-1"); + + let second_turn = anthropic_to_gemini_with_shadow( + json!({ + "messages": [ + { "role": "user", "content": "Hi" }, + { "role": "assistant", "content": [{ "type": "text", "text": "Hello" }] }, + { "role": "user", "content": "Continue" } + ] + }), + Some(store.as_ref()), + Some("provider-a"), + Some("session-1"), + ) + .unwrap(); + + assert_eq!(second_turn["contents"][1]["role"], "model"); + assert_eq!(second_turn["contents"][1]["parts"][0]["text"], "Hello"); + assert_eq!( + second_turn["contents"][1]["parts"][0]["thoughtSignature"], + "sig-1" + ); + } + + #[test] + fn rectifies_streamed_tool_call_args_from_tool_schema_hints() { + let owned_chunks = vec![ + "data: {\"responseId\":\"resp_5\",\"modelVersion\":\"gemini-2.5-pro\",\"candidates\":[{\"finishReason\":\"STOP\",\"content\":{\"parts\":[{\"functionCall\":{\"id\":\"call_1\",\"name\":\"Bash\",\"args\":{\"args\":\"git status\"}}}]}}],\"usageMetadata\":{\"promptTokenCount\":5,\"totalTokenCount\":8}}\n\n".to_string(), + ]; + let stream = futures::stream::iter( + owned_chunks + .into_iter() + .map(|chunk| Ok::(Bytes::from(chunk))), + ); + let hints = super::super::transform_gemini::extract_anthropic_tool_schema_hints(&json!({ + "tools": [{ + "name": "Bash", + "input_schema": { + "type": "object", + "properties": { + "command": { "type": "string" }, + "timeout": { "type": "number" } + }, + "required": ["command"] + } + }] + })); + let converted = + create_anthropic_sse_stream_from_gemini(stream, None, None, None, Some(hints)); + let output = futures::executor::block_on(async move { + converted + .collect::>() + .await + .into_iter() + .map(|item| String::from_utf8(item.unwrap().to_vec()).unwrap()) + .collect::>() + .join("") + }); + + assert!(output.contains("\"partial_json\":\"{\\\"command\\\":\\\"git status\\\"}\"")); + } + + #[test] + fn rectifies_streamed_skill_args_from_nested_parameters() { + let payload = json!({ + "responseId": "resp_6", + "modelVersion": "gemini-2.5-pro", + "candidates": [{ + "finishReason": "STOP", + "content": { + "parts": [{ + "functionCall": { + "id": "call_1", + "name": "Skill", + "args": { + "name": "git-commit", + "parameters": { + "args": ["详细分析内容 编写提交信息 分多次提交代码"] + } + } + } + }] + } + }], + "usageMetadata": { + "promptTokenCount": 5, + "totalTokenCount": 8 + } + }); + let owned_chunks = vec![format!( + "data: {}\n\n", + serde_json::to_string(&payload).unwrap() + )]; + let stream = futures::stream::iter( + owned_chunks + .into_iter() + .map(|chunk| Ok::(Bytes::from(chunk))), + ); + let hints = super::super::transform_gemini::extract_anthropic_tool_schema_hints(&json!({ + "tools": [{ + "name": "Skill", + "input_schema": { + "type": "object", + "properties": { + "skill": { "type": "string" }, + "args": { "type": "string" } + }, + "required": ["skill"] + } + }] + })); + let converted = + create_anthropic_sse_stream_from_gemini(stream, None, None, None, Some(hints)); + let output = futures::executor::block_on(async move { + converted + .collect::>() + .await + .into_iter() + .map(|item| String::from_utf8(item.unwrap().to_vec()).unwrap()) + .collect::>() + .join("") + }); + + assert!(output.contains("git-commit")); + assert!(output.contains("详细分析内容 编写提交信息 分多次提交代码")); + assert!(!output.contains("\\\"parameters\\\"")); + } +} diff --git a/src-tauri/src/proxy/providers/streaming_responses.rs b/src-tauri/src/proxy/providers/streaming_responses.rs index 2c4b306e2..1eba5033e 100644 --- a/src-tauri/src/proxy/providers/streaming_responses.rs +++ b/src-tauri/src/proxy/providers/streaming_responses.rs @@ -9,7 +9,7 @@ //! 与 Chat Completions 的 delta chunk 模型完全不同,需要独立的状态机处理。 use super::transform_responses::{build_anthropic_usage_from_responses, map_responses_stop_reason}; -use crate::proxy::sse::strip_sse_field; +use crate::proxy::sse::{strip_sse_field, take_sse_block}; use bytes::Bytes; use futures::stream::{Stream, StreamExt}; use serde_json::{json, Value}; @@ -122,10 +122,7 @@ pub fn create_anthropic_sse_stream_from_responses, + required_keys: Vec, +} + +pub type AnthropicToolSchemaHints = HashMap; + +pub fn anthropic_to_gemini(body: Value) -> Result { + anthropic_to_gemini_with_shadow(body, None, None, None) +} + +pub fn anthropic_to_gemini_with_shadow( + body: Value, + shadow_store: Option<&GeminiShadowStore>, + provider_id: Option<&str>, + session_id: Option<&str>, +) -> Result { + let mut result = json!({}); + let shadow_turns = shadow_store + .zip(provider_id) + .zip(session_id) + .and_then(|((store, provider_id), session_id)| store.get_session(provider_id, session_id)) + .map(|snapshot| snapshot.turns) + .unwrap_or_default(); + + if let Some(system) = build_system_instruction(body.get("system"))? { + result["systemInstruction"] = system; + } + + if let Some(messages) = body.get("messages").and_then(|value| value.as_array()) { + result["contents"] = json!(convert_messages_to_contents(messages, &shadow_turns)?); + } + + if let Some(generation_config) = build_generation_config(&body) { + result["generationConfig"] = generation_config; + } + + if let Some(tools) = body.get("tools").and_then(|value| value.as_array()) { + let function_declarations: Vec = tools + .iter() + .filter(|tool| tool.get("type").and_then(|value| value.as_str()) != Some("BatchTool")) + .map(|tool| { + build_gemini_function_declaration( + tool.get("name") + .and_then(|value| value.as_str()) + .unwrap_or(""), + tool.get("description").and_then(|value| value.as_str()), + tool.get("input_schema") + .cloned() + .unwrap_or_else(|| json!({})), + ) + }) + .collect(); + + if !function_declarations.is_empty() { + result["tools"] = json!([{ "functionDeclarations": function_declarations }]); + } + } + + if let Some(tool_config) = map_tool_choice(body.get("tool_choice"))? { + result["toolConfig"] = tool_config; + } + + Ok(result) +} + +pub fn gemini_to_anthropic(body: Value) -> Result { + gemini_to_anthropic_with_shadow(body, None, None, None) +} + +pub fn gemini_to_anthropic_with_shadow( + body: Value, + shadow_store: Option<&GeminiShadowStore>, + provider_id: Option<&str>, + session_id: Option<&str>, +) -> Result { + gemini_to_anthropic_with_shadow_and_hints(body, shadow_store, provider_id, session_id, None) +} + +pub fn gemini_to_anthropic_with_shadow_and_hints( + body: Value, + shadow_store: Option<&GeminiShadowStore>, + provider_id: Option<&str>, + session_id: Option<&str>, + tool_schema_hints: Option<&AnthropicToolSchemaHints>, +) -> Result { + if let Some(block_reason) = body + .get("promptFeedback") + .and_then(|value| value.get("blockReason")) + .and_then(|value| value.as_str()) + { + let text = format!("Request blocked by Gemini safety filters: {block_reason}"); + return Ok(json!({ + "id": body.get("responseId").and_then(|value| value.as_str()).unwrap_or(""), + "type": "message", + "role": "assistant", + "content": [{ "type": "text", "text": text }], + "model": body.get("modelVersion").and_then(|value| value.as_str()).unwrap_or(""), + "stop_reason": "refusal", + "stop_sequence": Value::Null, + "usage": build_anthropic_usage(body.get("usageMetadata")) + })); + } + + let candidate = body + .get("candidates") + .and_then(|value| value.as_array()) + .and_then(|value| value.first()) + .ok_or_else(|| { + ProxyError::TransformError("No candidates in Gemini response".to_string()) + })?; + + let parts = candidate + .get("content") + .and_then(|value| value.get("parts")) + .and_then(|value| value.as_array()) + .cloned() + .unwrap_or_default(); + + let mut rectified_parts = parts.clone(); + rectify_tool_call_parts(&mut rectified_parts, tool_schema_hints); + + let mut content = Vec::new(); + let mut has_tool_use = false; + + for part in &rectified_parts { + if part.get("thought").and_then(|value| value.as_bool()) == Some(true) { + continue; + } + + if let Some(text) = part.get("text").and_then(|value| value.as_str()) { + if !text.is_empty() { + content.push(json!({ + "type": "text", + "text": text + })); + } + continue; + } + + if let Some(function_call) = part.get("functionCall") { + has_tool_use = true; + content.push(json!({ + "type": "tool_use", + "id": function_call.get("id").and_then(|value| value.as_str()).unwrap_or(""), + "name": function_call.get("name").and_then(|value| value.as_str()).unwrap_or(""), + "input": function_call.get("args").cloned().unwrap_or_else(|| json!({})) + })); + } + } + + let stop_reason = map_finish_reason( + candidate + .get("finishReason") + .and_then(|value| value.as_str()), + has_tool_use, + ); + + let anthropic_response = json!({ + "id": body.get("responseId").and_then(|value| value.as_str()).unwrap_or(""), + "type": "message", + "role": "assistant", + "content": content, + "model": body.get("modelVersion").and_then(|value| value.as_str()).unwrap_or(""), + "stop_reason": stop_reason, + "stop_sequence": Value::Null, + "usage": build_anthropic_usage(body.get("usageMetadata")) + }); + + if let (Some(store), Some(provider_id), Some(session_id), Some(content)) = ( + shadow_store, + provider_id, + session_id, + candidate.get("content"), + ) { + let mut shadow_content = content.clone(); + if let Some(parts_value) = shadow_content.get_mut("parts") { + *parts_value = json!(rectified_parts.clone()); + } + store.record_assistant_turn( + provider_id, + session_id, + shadow_content, + extract_tool_call_meta(&rectified_parts), + ); + } + + Ok(anthropic_response) +} + +pub fn extract_gemini_model(body: &Value) -> Option<&str> { + body.get("model").and_then(|value| value.as_str()) +} + +fn build_system_instruction(system: Option<&Value>) -> Result, ProxyError> { + let Some(system) = system else { + return Ok(None); + }; + + if let Some(text) = system.as_str() { + if text.is_empty() { + return Ok(None); + } + return Ok(Some(json!({ + "parts": [{ "text": text }] + }))); + } + + let Some(blocks) = system.as_array() else { + return Err(ProxyError::TransformError( + "Anthropic system must be a string or an array".to_string(), + )); + }; + + let texts: Vec<&str> = blocks + .iter() + .filter_map(|block| block.get("text").and_then(|value| value.as_str())) + .filter(|text| !text.is_empty()) + .collect(); + + if texts.is_empty() { + return Ok(None); + } + + Ok(Some(json!({ + "parts": [{ "text": texts.join("\n\n") }] + }))) +} + +fn build_generation_config(body: &Value) -> Option { + let mut config = Map::new(); + + if let Some(value) = body.get("max_tokens") { + config.insert("maxOutputTokens".to_string(), value.clone()); + } + if let Some(value) = body.get("temperature") { + config.insert("temperature".to_string(), value.clone()); + } + if let Some(value) = body.get("top_p") { + config.insert("topP".to_string(), value.clone()); + } + if let Some(value) = body.get("stop_sequences") { + config.insert("stopSequences".to_string(), value.clone()); + } + + if config.is_empty() { + None + } else { + Some(Value::Object(config)) + } +} + +fn convert_messages_to_contents( + messages: &[Value], + shadow_turns: &[GeminiAssistantTurn], +) -> Result, ProxyError> { + let mut contents = Vec::new(); + let total_assistant_messages = messages + .iter() + .filter(|message| message.get("role").and_then(|value| value.as_str()) == Some("assistant")) + .count(); + let effective_shadow_turns = if shadow_turns.len() > total_assistant_messages { + &shadow_turns[shadow_turns.len() - total_assistant_messages..] + } else { + shadow_turns + }; + let mut tool_name_by_id = build_tool_name_map_from_shadow_turns(shadow_turns); + let shadow_start_index = total_assistant_messages.saturating_sub(effective_shadow_turns.len()); + let mut assistant_seen_index = 0usize; + + for message in messages { + let role = message + .get("role") + .and_then(|value| value.as_str()) + .unwrap_or("user"); + + let gemini_role = if role == "assistant" { "model" } else { "user" }; + + let parts = if role == "assistant" { + let shadow_index = assistant_seen_index + .checked_sub(shadow_start_index) + .filter(|index| *index < effective_shadow_turns.len()); + assistant_seen_index += 1; + + if let Some(index) = shadow_index { + let shadow_turn = &effective_shadow_turns[index]; + merge_tool_names_from_shadow(shadow_turn, &mut tool_name_by_id); + if let Some(parts) = shadow_parts(&shadow_turn.assistant_content) { + parts + } else { + convert_message_content_to_parts( + message.get("content"), + role, + &mut tool_name_by_id, + )? + } + } else { + convert_message_content_to_parts( + message.get("content"), + role, + &mut tool_name_by_id, + )? + } + } else { + convert_message_content_to_parts(message.get("content"), role, &mut tool_name_by_id)? + }; + + if role == "assistant" { + merge_tool_names_from_parts(&parts, &mut tool_name_by_id); + } + + contents.push(json!({ + "role": gemini_role, + "parts": parts + })); + } + + Ok(contents) +} + +fn convert_message_content_to_parts( + content: Option<&Value>, + role: &str, + tool_name_by_id: &mut std::collections::HashMap, +) -> Result, ProxyError> { + let Some(content) = content else { + return Ok(Vec::new()); + }; + + if let Some(text) = content.as_str() { + return Ok(vec![json!({ "text": text })]); + } + + let Some(blocks) = content.as_array() else { + return Err(ProxyError::TransformError( + "Anthropic message content must be a string or array".to_string(), + )); + }; + + let mut parts = Vec::new(); + + for block in blocks { + let block_type = block + .get("type") + .and_then(|value| value.as_str()) + .unwrap_or(""); + + match block_type { + "text" => { + if let Some(text) = block.get("text").and_then(|value| value.as_str()) { + parts.push(json!({ "text": text })); + } + } + "image" => { + let source = block.get("source").ok_or_else(|| { + ProxyError::TransformError("Gemini image block missing source".to_string()) + })?; + + let source_type = source + .get("type") + .and_then(|value| value.as_str()) + .unwrap_or(""); + + if source_type != "base64" { + return Err(ProxyError::TransformError(format!( + "Gemini Native only supports base64 image sources, got `{source_type}`" + ))); + } + + parts.push(json!({ + "inlineData": { + "mimeType": source.get("media_type").and_then(|value| value.as_str()).unwrap_or("image/png"), + "data": source.get("data").and_then(|value| value.as_str()).unwrap_or("") + } + })); + } + "document" => { + let source = block.get("source").ok_or_else(|| { + ProxyError::TransformError("Gemini document block missing source".to_string()) + })?; + + let source_type = source + .get("type") + .and_then(|value| value.as_str()) + .unwrap_or(""); + + if source_type != "base64" { + return Err(ProxyError::TransformError(format!( + "Gemini Native only supports base64 document sources, got `{source_type}`" + ))); + } + + parts.push(json!({ + "inlineData": { + "mimeType": source.get("media_type").and_then(|value| value.as_str()).unwrap_or("application/pdf"), + "data": source.get("data").and_then(|value| value.as_str()).unwrap_or("") + } + })); + } + "tool_use" => { + if role != "assistant" { + return Err(ProxyError::TransformError( + "tool_use blocks are only valid in assistant messages".to_string(), + )); + } + + let id = block + .get("id") + .and_then(|value| value.as_str()) + .unwrap_or(""); + let name = block + .get("name") + .and_then(|value| value.as_str()) + .unwrap_or(""); + if !id.is_empty() && !name.is_empty() { + tool_name_by_id.insert(id.to_string(), name.to_string()); + } + + parts.push(json!({ + "functionCall": { + "id": id, + "name": name, + "args": block.get("input").cloned().unwrap_or_else(|| json!({})) + } + })); + } + "tool_result" => { + let tool_use_id = block + .get("tool_use_id") + .and_then(|value| value.as_str()) + .unwrap_or(""); + let name = tool_name_by_id + .get(tool_use_id) + .cloned() + .ok_or_else(|| { + ProxyError::TransformError(format!( + "Unable to resolve Gemini functionResponse.name for tool_use_id `{tool_use_id}`" + )) + })?; + + parts.push(json!({ + "functionResponse": { + "id": tool_use_id, + "name": name, + "response": normalize_tool_result_response(block.get("content")) + } + })); + } + "thinking" | "redacted_thinking" => {} + _ => {} + } + } + + Ok(parts) +} + +fn normalize_tool_result_response(content: Option<&Value>) -> Value { + match content { + Some(Value::String(text)) => json!({ "content": text }), + Some(Value::Array(blocks)) => { + let texts: Vec<&str> = blocks + .iter() + .filter(|block| block.get("type").and_then(|value| value.as_str()) == Some("text")) + .filter_map(|block| block.get("text").and_then(|value| value.as_str())) + .collect(); + + if texts.is_empty() { + json!({ "content": Value::Array(blocks.clone()) }) + } else { + json!({ "content": texts.join("\n") }) + } + } + Some(value) => json!({ "content": value.clone() }), + None => json!({ "content": "" }), + } +} + +fn shadow_parts(content: &Value) -> Option> { + content + .get("parts") + .and_then(|value| value.as_array()) + .cloned() + .or_else(|| content.as_array().cloned()) +} + +pub fn extract_anthropic_tool_schema_hints(body: &Value) -> AnthropicToolSchemaHints { + body.get("tools") + .and_then(|value| value.as_array()) + .into_iter() + .flatten() + .filter_map(|tool| { + let name = tool.get("name").and_then(|value| value.as_str())?; + let input_schema = tool + .get("input_schema") + .and_then(|value| value.as_object())?; + let properties = input_schema + .get("properties") + .and_then(|value| value.as_object())?; + if properties.is_empty() { + return None; + } + + let expected_keys = properties.keys().cloned().collect::>(); + let required_keys = input_schema + .get("required") + .and_then(|value| value.as_array()) + .map(|values| { + values + .iter() + .filter_map(|value| value.as_str().map(ToString::to_string)) + .collect::>() + }) + .unwrap_or_default(); + + Some(( + name.to_string(), + AnthropicToolSchemaHint { + expected_keys, + required_keys, + }, + )) + }) + .collect() +} + +pub fn rectify_tool_call_parts( + parts: &mut [Value], + tool_schema_hints: Option<&AnthropicToolSchemaHints>, +) { + for part in parts { + let Some(function_call) = part + .get_mut("functionCall") + .and_then(|value| value.as_object_mut()) + else { + continue; + }; + let Some(name) = function_call + .get("name") + .and_then(|value| value.as_str()) + .map(ToString::to_string) + else { + continue; + }; + let Some(args) = function_call.get_mut("args") else { + continue; + }; + + if rectify_tool_call_args(&name, args, tool_schema_hints) { + log::info!("[Claude/Gemini] Rectified tool args for `{name}`"); + } + } +} + +pub fn rectify_tool_call_args( + tool_name: &str, + args: &mut Value, + tool_schema_hints: Option<&AnthropicToolSchemaHints>, +) -> bool { + let Some(tool_schema_hints) = tool_schema_hints else { + return false; + }; + let Some(hint) = tool_schema_hints.get(tool_name) else { + return false; + }; + let Some(args_object) = args.as_object_mut() else { + return false; + }; + if args_object.is_empty() || hint.expected_keys.is_empty() { + return false; + } + let mut changed = false; + + if hint.expected_keys.iter().any(|key| key == "skill") && !args_object.contains_key("skill") { + if let Some(value) = args_object.remove("name") { + args_object.insert("skill".to_string(), value); + changed = true; + } + } + + if let Some(parameters_value) = args_object.remove("parameters") { + if let Some(parameters_object) = parameters_value.as_object() { + for expected_key in &hint.expected_keys { + if args_object.contains_key(expected_key) { + continue; + } + let Some(value) = parameters_object.get(expected_key) else { + continue; + }; + let normalized_value = match value { + Value::Array(values) if values.len() == 1 => values[0].clone(), + _ => value.clone(), + }; + args_object.insert(expected_key.clone(), normalized_value); + changed = true; + } + } + } + + if hint + .required_keys + .iter() + .all(|key| args_object.contains_key(key.as_str())) + { + return changed; + } + + let expected_key_set = hint + .expected_keys + .iter() + .map(String::as_str) + .collect::>(); + let unexpected_keys = args_object + .keys() + .filter(|key| !expected_key_set.contains(key.as_str())) + .cloned() + .collect::>(); + if unexpected_keys.len() != 1 { + return false; + } + + let target_key = hint + .required_keys + .iter() + .find(|key| !args_object.contains_key(key.as_str())) + .cloned() + .or_else(|| { + if hint.expected_keys.len() == 1 && args_object.len() == 1 { + hint.expected_keys.first().cloned() + } else { + None + } + }); + let Some(target_key) = target_key else { + return false; + }; + if args_object.contains_key(&target_key) { + return false; + } + + let source_key = &unexpected_keys[0]; + let Some(value) = args_object.remove(source_key) else { + return false; + }; + args_object.insert(target_key, value); + true +} + +fn merge_tool_names_from_shadow( + turn: &GeminiAssistantTurn, + tool_name_by_id: &mut HashMap, +) { + for tool_call in &turn.tool_calls { + if let Some(id) = &tool_call.id { + tool_name_by_id.insert(id.clone(), tool_call.name.clone()); + } + } + + if let Some(parts) = shadow_parts(&turn.assistant_content) { + merge_tool_names_from_parts(&parts, tool_name_by_id); + } +} + +fn build_tool_name_map_from_shadow_turns( + shadow_turns: &[GeminiAssistantTurn], +) -> HashMap { + let mut tool_name_by_id = HashMap::new(); + for turn in shadow_turns { + merge_tool_names_from_shadow(turn, &mut tool_name_by_id); + } + tool_name_by_id +} + +fn merge_tool_names_from_parts(parts: &[Value], tool_name_by_id: &mut HashMap) { + for part in parts { + let Some(function_call) = part.get("functionCall") else { + continue; + }; + let Some(id) = function_call.get("id").and_then(|value| value.as_str()) else { + continue; + }; + let Some(name) = function_call.get("name").and_then(|value| value.as_str()) else { + continue; + }; + if !id.is_empty() && !name.is_empty() { + tool_name_by_id.insert(id.to_string(), name.to_string()); + } + } +} + +fn extract_tool_call_meta(parts: &[Value]) -> Vec { + parts + .iter() + .filter_map(|part| { + let function_call = part.get("functionCall")?; + Some(GeminiToolCallMeta::new( + function_call.get("id").and_then(|value| value.as_str()), + function_call + .get("name") + .and_then(|value| value.as_str()) + .unwrap_or(""), + function_call + .get("args") + .cloned() + .unwrap_or_else(|| json!({})), + part.get("thoughtSignature") + .or_else(|| part.get("thought_signature")) + .and_then(|value| value.as_str()), + )) + }) + .collect() +} + +fn map_tool_choice(tool_choice: Option<&Value>) -> Result, ProxyError> { + let Some(tool_choice) = tool_choice else { + return Ok(None); + }; + + match tool_choice { + Value::String(choice) => Ok(match choice.as_str() { + "auto" => Some(json!({ + "functionCallingConfig": { "mode": "AUTO" } + })), + "none" => Some(json!({ + "functionCallingConfig": { "mode": "NONE" } + })), + other => { + return Err(ProxyError::TransformError(format!( + "Unsupported Gemini tool_choice string: {other}" + ))); + } + }), + Value::Object(object) => { + let Some(choice_type) = object.get("type").and_then(|value| value.as_str()) else { + return Ok(None); + }; + + let config = match choice_type { + "auto" => json!({ "mode": "AUTO" }), + "none" => json!({ "mode": "NONE" }), + "any" => json!({ "mode": "ANY" }), + "tool" => { + let name = object + .get("name") + .and_then(|value| value.as_str()) + .unwrap_or(""); + json!({ + "mode": "ANY", + "allowedFunctionNames": [name] + }) + } + other => { + return Err(ProxyError::TransformError(format!( + "Unsupported Gemini tool_choice type: {other}" + ))); + } + }; + + Ok(Some(json!({ "functionCallingConfig": config }))) + } + _ => Ok(None), + } +} + +fn build_anthropic_usage(usage: Option<&Value>) -> Value { + let Some(usage) = usage else { + return json!({ + "input_tokens": 0, + "output_tokens": 0 + }); + }; + + let input_tokens = usage + .get("promptTokenCount") + .and_then(|value| value.as_u64()) + .unwrap_or(0); + let total_tokens = usage + .get("totalTokenCount") + .and_then(|value| value.as_u64()) + .unwrap_or(0); + let output_tokens = total_tokens.saturating_sub(input_tokens); + + let mut result = json!({ + "input_tokens": input_tokens, + "output_tokens": output_tokens + }); + + if let Some(cached) = usage + .get("cachedContentTokenCount") + .and_then(|value| value.as_u64()) + { + result["cache_read_input_tokens"] = json!(cached); + } + + result +} + +fn map_finish_reason(reason: Option<&str>, has_tool_use: bool) -> Value { + let mapped = match reason { + Some("MAX_TOKENS") => Some("max_tokens"), + Some("STOP") | Some("FINISH_REASON_UNSPECIFIED") | None => { + if has_tool_use { + Some("tool_use") + } else { + Some("end_turn") + } + } + Some("SAFETY") + | Some("RECITATION") + | Some("SPII") + | Some("BLOCKLIST") + | Some("PROHIBITED_CONTENT") => Some("refusal"), + Some(other) => { + log::warn!("[Claude/Gemini] Unknown Gemini finishReason `{other}`, using end_turn"); + Some("end_turn") + } + }; + + match mapped { + Some(value) => json!(value), + None => Value::Null, + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn anthropic_to_gemini_maps_system_and_messages() { + let input = json!({ + "model": "gemini-2.5-pro", + "max_tokens": 128, + "system": "You are helpful.", + "messages": [ + { "role": "user", "content": "Hello" } + ] + }); + + let result = anthropic_to_gemini(input).unwrap(); + assert_eq!( + result["systemInstruction"]["parts"][0]["text"], + "You are helpful." + ); + assert_eq!(result["contents"][0]["role"], "user"); + assert_eq!(result["contents"][0]["parts"][0]["text"], "Hello"); + assert_eq!(result["generationConfig"]["maxOutputTokens"], 128); + } + + #[test] + fn anthropic_to_gemini_maps_tools_and_tool_results() { + let input = json!({ + "messages": [ + { + "role": "assistant", + "content": [ + { "type": "tool_use", "id": "call_1", "name": "get_weather", "input": { "city": "Tokyo" } } + ] + }, + { + "role": "user", + "content": [ + { "type": "tool_result", "tool_use_id": "call_1", "content": "Sunny" } + ] + } + ], + "tools": [ + { + "name": "get_weather", + "description": "Weather lookup", + "input_schema": { "type": "object", "properties": { "city": { "type": "string" } } } + } + ], + "tool_choice": { "type": "tool", "name": "get_weather" } + }); + + let result = anthropic_to_gemini(input).unwrap(); + assert_eq!( + result["tools"][0]["functionDeclarations"][0]["name"], + "get_weather" + ); + assert!(result["tools"][0]["functionDeclarations"][0] + .get("parameters") + .is_some()); + assert_eq!( + result["contents"][0]["parts"][0]["functionCall"]["name"], + "get_weather" + ); + assert_eq!( + result["contents"][1]["parts"][0]["functionResponse"]["name"], + "get_weather" + ); + assert_eq!( + result["toolConfig"]["functionCallingConfig"]["allowedFunctionNames"][0], + "get_weather" + ); + } + + #[test] + fn anthropic_to_gemini_resolves_tool_result_name_from_shadow_content() { + let store = GeminiShadowStore::with_limits(8, 4); + store.record_assistant_turn( + "provider-a", + "session-1", + json!({ + "parts": [{ + "functionCall": { + "id": "call_1", + "name": "get_weather", + "args": { "city": "Tokyo" } + } + }] + }), + vec![], + ); + + let input = json!({ + "messages": [ + { + "role": "user", + "content": [ + { "type": "tool_result", "tool_use_id": "call_1", "content": "Sunny" } + ] + } + ] + }); + + let result = anthropic_to_gemini_with_shadow( + input, + Some(&store), + Some("provider-a"), + Some("session-1"), + ) + .unwrap(); + + assert_eq!( + result["contents"][0]["parts"][0]["functionResponse"]["name"], + "get_weather" + ); + } + + #[test] + fn anthropic_to_gemini_rejects_tool_result_without_resolvable_name() { + let input = json!({ + "messages": [ + { + "role": "user", + "content": [ + { "type": "tool_result", "tool_use_id": "call_1", "content": "Sunny" } + ] + } + ] + }); + + let error = anthropic_to_gemini(input).unwrap_err(); + assert!(error + .to_string() + .contains("Unable to resolve Gemini functionResponse.name")); + } + + #[test] + fn anthropic_to_gemini_uses_parameters_json_schema_for_rich_tool_schema() { + let input = json!({ + "tools": [ + { + "name": "search", + "description": "Search data", + "input_schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": "object", + "properties": { + "query": { "type": "string" } + }, + "required": ["query"], + "additionalProperties": false + } + } + ] + }); + + let result = anthropic_to_gemini(input).unwrap(); + let declaration = &result["tools"][0]["functionDeclarations"][0]; + + assert!(declaration.get("parameters").is_none()); + assert!(declaration.get("parametersJsonSchema").is_some()); + assert!(declaration["parametersJsonSchema"].get("$schema").is_none()); + assert_eq!( + declaration["parametersJsonSchema"]["additionalProperties"], + false + ); + } + + #[test] + fn gemini_to_anthropic_maps_text_and_usage() { + let input = json!({ + "responseId": "resp_1", + "modelVersion": "gemini-2.5-pro", + "candidates": [{ + "finishReason": "STOP", + "content": { + "parts": [{ "text": "Hello from Gemini" }] + } + }], + "usageMetadata": { + "promptTokenCount": 12, + "totalTokenCount": 20, + "cachedContentTokenCount": 3 + } + }); + + let result = gemini_to_anthropic(input).unwrap(); + assert_eq!(result["id"], "resp_1"); + assert_eq!(result["content"][0]["type"], "text"); + assert_eq!(result["content"][0]["text"], "Hello from Gemini"); + assert_eq!(result["stop_reason"], "end_turn"); + assert_eq!(result["usage"]["input_tokens"], 12); + assert_eq!(result["usage"]["output_tokens"], 8); + assert_eq!(result["usage"]["cache_read_input_tokens"], 3); + } + + #[test] + fn gemini_to_anthropic_maps_function_calls_to_tool_use() { + let input = json!({ + "responseId": "resp_2", + "modelVersion": "gemini-2.5-pro", + "candidates": [{ + "finishReason": "STOP", + "content": { + "parts": [{ + "functionCall": { + "id": "call_1", + "name": "get_weather", + "args": { "city": "Tokyo" } + } + }] + } + }], + "usageMetadata": { + "promptTokenCount": 10, + "totalTokenCount": 15 + } + }); + + let result = gemini_to_anthropic(input).unwrap(); + assert_eq!(result["content"][0]["type"], "tool_use"); + assert_eq!(result["content"][0]["id"], "call_1"); + assert_eq!(result["stop_reason"], "tool_use"); + } + + #[test] + fn gemini_to_anthropic_rectifies_tool_args_from_schema_hints() { + let input = json!({ + "responseId": "resp_2", + "modelVersion": "gemini-2.5-pro", + "candidates": [{ + "finishReason": "STOP", + "content": { + "parts": [{ + "functionCall": { + "id": "call_1", + "name": "Skill", + "args": { + "name": "git-commit", + "parameters": { + "args": ["详细分析内容 编写提交信息 分多次提交代码"] + } + } + } + }] + } + }] + }); + let hints = extract_anthropic_tool_schema_hints(&json!({ + "tools": [{ + "name": "Skill", + "input_schema": { + "type": "object", + "properties": { + "skill": { "type": "string" }, + "args": { "type": "string" } + }, + "required": ["skill"] + } + }] + })); + + let result = + gemini_to_anthropic_with_shadow_and_hints(input, None, None, None, Some(&hints)) + .unwrap(); + + assert_eq!(result["content"][0]["input"]["skill"], "git-commit"); + assert_eq!( + result["content"][0]["input"]["args"], + "详细分析内容 编写提交信息 分多次提交代码" + ); + assert!(result["content"][0]["input"].get("name").is_none()); + assert!(result["content"][0]["input"].get("parameters").is_none()); + } + + #[test] + fn gemini_to_anthropic_maps_blocked_prompt_to_refusal() { + let input = json!({ + "responseId": "resp_3", + "modelVersion": "gemini-2.5-flash", + "promptFeedback": { "blockReason": "SAFETY" }, + "usageMetadata": { + "promptTokenCount": 4, + "totalTokenCount": 4 + } + }); + + let result = gemini_to_anthropic(input).unwrap(); + assert_eq!(result["stop_reason"], "refusal"); + assert_eq!(result["content"][0]["type"], "text"); + assert!(result["content"][0]["text"] + .as_str() + .unwrap() + .contains("SAFETY")); + } + + #[test] + fn shadow_replay_aligns_to_latest_turns_after_client_truncation() { + let store = GeminiShadowStore::with_limits(8, 4); + // Record 3 shadow turns (assistant messages 0, 1, 2) + for i in 0..3 { + store.record_assistant_turn( + "prov", + "sess", + json!({ + "parts": [{ + "functionCall": { + "id": format!("call_{i}"), + "name": format!("tool_{i}"), + "args": {} + } + }] + }), + vec![], + ); + } + + // Client truncates history: only sends assistant messages 1 and 2 + let input = json!({ + "messages": [ + { + "role": "assistant", + "content": [ + { "type": "tool_use", "id": "call_1", "name": "tool_1", "input": {} } + ] + }, + { + "role": "user", + "content": [ + { "type": "tool_result", "tool_use_id": "call_1", "content": "ok" } + ] + }, + { + "role": "assistant", + "content": [ + { "type": "tool_use", "id": "call_2", "name": "tool_2", "input": {} } + ] + }, + { + "role": "user", + "content": [ + { "type": "tool_result", "tool_use_id": "call_2", "content": "ok" } + ] + } + ] + }); + + let result = + anthropic_to_gemini_with_shadow(input, Some(&store), Some("prov"), Some("sess")) + .unwrap(); + + // Shadow turns[1] (tool_1) should align with first assistant message, + // shadow turns[2] (tool_2) with the second — not turns[0] and turns[1]. + assert_eq!( + result["contents"][0]["parts"][0]["functionCall"]["name"], + "tool_1" + ); + assert_eq!( + result["contents"][2]["parts"][0]["functionCall"]["name"], + "tool_2" + ); + } +} diff --git a/src-tauri/src/proxy/response_handler.rs b/src-tauri/src/proxy/response_handler.rs index 502ed289f..d40ec0b06 100644 --- a/src-tauri/src/proxy/response_handler.rs +++ b/src-tauri/src/proxy/response_handler.rs @@ -5,7 +5,7 @@ use super::session::ProxySession; use super::usage::parser::TokenUsage; use super::ProxyError; -use crate::proxy::sse::strip_sse_field; +use crate::proxy::sse::{strip_sse_field, take_sse_block}; use bytes::Bytes; use futures::stream::{Stream, StreamExt}; use serde_json::Value; @@ -86,10 +86,7 @@ impl StreamHandler { crate::proxy::sse::append_utf8_safe(&mut buffer, &mut utf8_remainder, &bytes); // 提取完整事件 - while let Some(pos) = buffer.find("\n\n") { - let event_text = buffer[..pos].to_string(); - buffer = buffer[pos + 2..].to_string(); - + while let Some(event_text) = take_sse_block(&mut buffer) { for line in event_text.lines() { if let Some(data) = strip_sse_field(line, "data") { if data.trim() != "[DONE]" { diff --git a/src-tauri/src/proxy/response_processor.rs b/src-tauri/src/proxy/response_processor.rs index 3f4fe8425..c41270ac0 100644 --- a/src-tauri/src/proxy/response_processor.rs +++ b/src-tauri/src/proxy/response_processor.rs @@ -7,7 +7,7 @@ use super::{ handler_context::{RequestContext, StreamingTimeoutConfig}, hyper_client::ProxyResponse, server::ProxyState, - sse::strip_sse_field, + sse::{strip_sse_field, take_sse_block}, usage::parser::TokenUsage, ProxyError, }; @@ -623,10 +623,7 @@ pub fn create_logged_passthrough_stream( crate::proxy::sse::append_utf8_safe(&mut buffer, &mut utf8_remainder, &bytes); // 尝试解析并记录完整的 SSE 事件 - while let Some(pos) = buffer.find("\n\n") { - let event_text = buffer[..pos].to_string(); - buffer = buffer[pos + 2..].to_string(); - + while let Some(event_text) = take_sse_block(&mut buffer) { if !event_text.trim().is_empty() { // 提取 data 部分并尝试解析为 JSON for line in event_text.lines() { @@ -687,6 +684,7 @@ mod tests { use crate::provider::ProviderMeta; use crate::proxy::failover_switch::FailoverSwitchManager; use crate::proxy::provider_router::ProviderRouter; + use crate::proxy::providers::gemini_shadow::GeminiShadowStore; use crate::proxy::types::{ProxyConfig, ProxyStatus}; use rust_decimal::Decimal; use std::collections::HashMap; @@ -723,6 +721,7 @@ mod tests { start_time: Arc::new(RwLock::new(None)), current_providers: Arc::new(RwLock::new(HashMap::new())), provider_router: Arc::new(ProviderRouter::new(db.clone())), + gemini_shadow: Arc::new(GeminiShadowStore::default()), app_handle: None, failover_manager: Arc::new(FailoverSwitchManager::new(db)), } diff --git a/src-tauri/src/proxy/server.rs b/src-tauri/src/proxy/server.rs index 8fa37edf6..fb49107ff 100644 --- a/src-tauri/src/proxy/server.rs +++ b/src-tauri/src/proxy/server.rs @@ -10,7 +10,8 @@ use super::{ failover_switch::FailoverSwitchManager, handlers, log_codes::srv as log_srv, - provider_router::ProviderRouter, types::*, ProxyError, + provider_router::ProviderRouter, providers::gemini_shadow::GeminiShadowStore, types::*, + ProxyError, }; use crate::database::Database; use axum::{ @@ -36,6 +37,8 @@ pub struct ProxyState { pub current_providers: Arc>>, /// 共享的 ProviderRouter(持有熔断器状态,跨请求保持) pub provider_router: Arc, + /// Gemini Native shadow state,用于 thoughtSignature / tool call 回放 + pub gemini_shadow: Arc, /// AppHandle,用于发射事件和更新托盘菜单 pub app_handle: Option, /// 故障转移切换管理器 @@ -69,6 +72,7 @@ impl ProxyServer { start_time: Arc::new(RwLock::new(None)), current_providers: Arc::new(RwLock::new(std::collections::HashMap::new())), provider_router, + gemini_shadow: Arc::new(GeminiShadowStore::default()), app_handle, failover_manager, }; diff --git a/src-tauri/src/proxy/sse.rs b/src-tauri/src/proxy/sse.rs index f26a6a711..adb259c97 100644 --- a/src-tauri/src/proxy/sse.rs +++ b/src-tauri/src/proxy/sse.rs @@ -4,6 +4,24 @@ pub(crate) fn strip_sse_field<'a>(line: &'a str, field: &str) -> Option<&'a str> .or_else(|| line.strip_prefix(&format!("{field}:"))) } +#[inline] +pub(crate) fn take_sse_block(buffer: &mut String) -> Option { + let mut best: Option<(usize, usize)> = None; + + for (delimiter, len) in [("\r\n\r\n", 4usize), ("\n\n", 2usize)] { + if let Some(pos) = buffer.find(delimiter) { + if best.is_none_or(|(best_pos, _)| pos < best_pos) { + best = Some((pos, len)); + } + } + } + + let (pos, len) = best?; + let block = buffer[..pos].to_string(); + buffer.drain(..pos + len); + Some(block) +} + /// Append raw bytes to a UTF-8 `String` buffer, correctly handling multi-byte /// characters that are split across chunk boundaries. /// @@ -68,7 +86,7 @@ pub(crate) fn append_utf8_safe(buffer: &mut String, remainder: &mut Vec, new #[cfg(test)] mod tests { - use super::{append_utf8_safe, strip_sse_field}; + use super::{append_utf8_safe, strip_sse_field, take_sse_block}; #[test] fn strip_sse_field_accepts_optional_space() { @@ -91,6 +109,28 @@ mod tests { assert_eq!(strip_sse_field("id:1", "data"), None); } + #[test] + fn take_sse_block_supports_lf_delimiters() { + let mut buffer = "data: {\"ok\":true}\n\nrest".to_string(); + + assert_eq!( + take_sse_block(&mut buffer), + Some("data: {\"ok\":true}".to_string()) + ); + assert_eq!(buffer, "rest"); + } + + #[test] + fn take_sse_block_supports_crlf_delimiters() { + let mut buffer = "data: {\"ok\":true}\r\n\r\nrest".to_string(); + + assert_eq!( + take_sse_block(&mut buffer), + Some("data: {\"ok\":true}".to_string()) + ); + assert_eq!(buffer, "rest"); + } + // ------------------------------------------------------------------ // append_utf8_safe tests // ------------------------------------------------------------------ diff --git a/src-tauri/src/proxy/thinking_rectifier.rs b/src-tauri/src/proxy/thinking_rectifier.rs index ce71503c8..b43b4d8ea 100644 --- a/src-tauri/src/proxy/thinking_rectifier.rs +++ b/src-tauri/src/proxy/thinking_rectifier.rs @@ -52,6 +52,14 @@ pub fn should_rectify_thinking_signature( return true; } + // 场景1b: Gemini/第三方渠道返回 "Thought signature is not valid" + // 错误示例: "Unable to submit request because Thought signature is not valid" + if lower.contains("thought signature") + && (lower.contains("not valid") || lower.contains("invalid")) + { + return true; + } + // 场景2: assistant 消息必须以 thinking block 开头 // 错误示例: "must start with a thinking block" if lower.contains("must start with a thinking block") { @@ -280,6 +288,16 @@ mod tests { )); } + #[test] + fn test_detect_invalid_thought_signature_message() { + assert!(should_rectify_thinking_signature( + Some( + "Unable to submit request because Thought signature is not valid.. Learn more: https://example.com/help" + ), + &enabled_config() + )); + } + #[test] fn test_detect_invalid_signature_nested_json() { // 测试嵌套 JSON 格式的错误消息(第三方渠道常见格式) @@ -290,6 +308,15 @@ mod tests { )); } + #[test] + fn test_detect_invalid_thought_signature_nested_json() { + let nested_error = r#"{"error":{"message":"Unable to submit request because Thought signature is not valid.. Learn more: https://example.com/help","type":"upstream_error","param":"","code":400}}"#; + assert!(should_rectify_thinking_signature( + Some(nested_error), + &enabled_config() + )); + } + #[test] fn test_detect_thinking_expected() { assert!(should_rectify_thinking_signature( diff --git a/src-tauri/src/services/stream_check.rs b/src-tauri/src/services/stream_check.rs index 5e2203a05..24ac6263f 100644 --- a/src-tauri/src/services/stream_check.rs +++ b/src-tauri/src/services/stream_check.rs @@ -12,8 +12,10 @@ use std::time::Instant; use crate::app_config::AppType; use crate::error::AppError; use crate::provider::Provider; +use crate::proxy::gemini_url::resolve_gemini_native_url; use crate::proxy::providers::copilot_auth; use crate::proxy::providers::transform::anthropic_to_openai; +use crate::proxy::providers::transform_gemini::anthropic_to_gemini; use crate::proxy::providers::transform_responses::anthropic_to_responses; use crate::proxy::providers::{get_adapter, AuthInfo, AuthStrategy}; @@ -307,6 +309,8 @@ impl StreamCheckService { /// 根据供应商的 api_format 选择请求格式: /// - "anthropic" (默认): Anthropic Messages API (/v1/messages) /// - "openai_chat": OpenAI Chat Completions API (/v1/chat/completions) + /// - "openai_responses": OpenAI Responses API (/v1/responses) + /// - "gemini_native": Gemini Native streamGenerateContent /// /// `extra_headers` 是一个可选的供应商级自定义 header 集合(从 OpenClaw /// 的 `settings_config.headers` 或 OpenCode 的 `settings_config.options.headers` @@ -348,8 +352,14 @@ impl StreamCheckService { .unwrap_or(false); let is_openai_chat = effective_api_format == "openai_chat"; let is_openai_responses = effective_api_format == "openai_responses"; - let url = - Self::resolve_claude_stream_url(base, auth.strategy, effective_api_format, is_full_url); + let is_gemini_native = effective_api_format == "gemini_native"; + let url = Self::resolve_claude_stream_url( + base, + auth.strategy, + effective_api_format, + is_full_url, + model, + ); let max_tokens = if is_openai_responses { 16 } else { 1 }; @@ -371,6 +381,9 @@ impl StreamCheckService { let body = if is_openai_responses { anthropic_to_responses(anthropic_body, Some(&provider.id), is_codex_oauth) .map_err(|e| AppError::Message(format!("Failed to build test request: {e}")))? + } else if is_gemini_native { + anthropic_to_gemini(anthropic_body) + .map_err(|e| AppError::Message(format!("Failed to build test request: {e}")))? } else if is_openai_chat { anthropic_to_openai(anthropic_body, Some(&provider.id)) .map_err(|e| AppError::Message(format!("Failed to build test request: {e}")))? @@ -406,6 +419,23 @@ impl StreamCheckService { .header("x-vscode-user-agent-library-version", "electron-fetch") .header("x-request-id", &request_id) .header("x-agent-task-id", &request_id); + } else if is_gemini_native { + request_builder = match auth.strategy { + AuthStrategy::GoogleOAuth => { + let token = auth.access_token.as_ref().unwrap_or(&auth.api_key); + request_builder + .header("authorization", format!("Bearer {token}")) + .header("x-goog-api-client", "GeminiCLI/1.0") + .header("content-type", "application/json") + .header("accept", "text/event-stream") + .header("accept-encoding", "identity") + } + _ => request_builder + .header("x-goog-api-key", &auth.api_key) + .header("content-type", "application/json") + .header("accept", "text/event-stream") + .header("accept-encoding", "identity"), + }; } else if is_openai_chat || is_openai_responses { // OpenAI-compatible targets: Bearer auth + SSE headers only request_builder = request_builder @@ -1228,7 +1258,13 @@ impl StreamCheckService { auth_strategy: AuthStrategy, api_format: &str, is_full_url: bool, + model: &str, ) -> String { + if api_format == "gemini_native" { + let endpoint = format!("/v1beta/models/{model}:streamGenerateContent?alt=sse"); + return resolve_gemini_native_url(base_url, &endpoint, is_full_url); + } + if is_full_url { return base_url.to_string(); } @@ -1512,6 +1548,7 @@ mod tests { AuthStrategy::Bearer, "openai_chat", true, + "gpt-5.4", ); assert_eq!(url, "https://relay.example/v1/chat/completions"); @@ -1524,6 +1561,7 @@ mod tests { AuthStrategy::GitHubCopilot, "openai_chat", false, + "gpt-5.4", ); assert_eq!(url, "https://api.githubcopilot.com/chat/completions"); @@ -1536,6 +1574,7 @@ mod tests { AuthStrategy::GitHubCopilot, "openai_responses", false, + "gpt-5.4", ); assert_eq!(url, "https://api.githubcopilot.com/v1/responses"); @@ -1548,6 +1587,7 @@ mod tests { AuthStrategy::Bearer, "openai_chat", false, + "gpt-5.4", ); assert_eq!(url, "https://example.com/v1/chat/completions"); @@ -1560,6 +1600,7 @@ mod tests { AuthStrategy::Bearer, "openai_responses", false, + "gpt-5.4", ); assert_eq!(url, "https://example.com/v1/responses"); @@ -1572,11 +1613,57 @@ mod tests { AuthStrategy::Anthropic, "anthropic", false, + "claude-sonnet-4-6", ); assert_eq!(url, "https://api.anthropic.com/v1/messages"); } + #[test] + fn test_resolve_claude_stream_url_for_gemini_native() { + let url = StreamCheckService::resolve_claude_stream_url( + "https://generativelanguage.googleapis.com", + AuthStrategy::Google, + "gemini_native", + false, + "gemini-2.5-flash", + ); + + assert_eq!( + url, + "https://generativelanguage.googleapis.com/v1beta/models/gemini-2.5-flash:streamGenerateContent?alt=sse" + ); + } + + #[test] + fn test_resolve_claude_stream_url_for_gemini_native_full_url_openai_compat_base() { + let url = StreamCheckService::resolve_claude_stream_url( + "https://generativelanguage.googleapis.com/v1beta/openai/chat/completions", + AuthStrategy::Google, + "gemini_native", + true, + "gemini-2.5-flash", + ); + + assert_eq!( + url, + "https://generativelanguage.googleapis.com/v1beta/models/gemini-2.5-flash:streamGenerateContent?alt=sse" + ); + } + + #[test] + fn test_resolve_claude_stream_url_for_gemini_native_opaque_full_url() { + let url = StreamCheckService::resolve_claude_stream_url( + "https://relay.example/custom/generate-content", + AuthStrategy::Google, + "gemini_native", + true, + "gemini-2.5-flash", + ); + + assert_eq!(url, "https://relay.example/custom/generate-content?alt=sse"); + } + #[test] fn test_resolve_codex_stream_urls_for_full_url_mode() { let urls = StreamCheckService::resolve_codex_stream_urls( diff --git a/src/components/providers/forms/ClaudeFormFields.tsx b/src/components/providers/forms/ClaudeFormFields.tsx index 0c9482a53..4a6fd8399 100644 --- a/src/components/providers/forms/ClaudeFormFields.tsx +++ b/src/components/providers/forms/ClaudeFormFields.tsx @@ -113,7 +113,7 @@ interface ClaudeFormFieldsProps { // Speed Test Endpoints speedTestEndpoints: EndpointCandidate[]; - // API Format (for third-party providers that use OpenAI Chat Completions format) + // API Format (for Claude-compatible providers that need request/response conversion) apiFormat: ClaudeApiFormat; onApiFormatChange: (format: ClaudeApiFormat) => void; @@ -436,7 +436,14 @@ export function ClaudeFormFields({ ? t("providerForm.apiHintResponses") : apiFormat === "openai_chat" ? t("providerForm.apiHintOAI") - : t("providerForm.apiHint") + : apiFormat === "gemini_native" + ? t("providerForm.apiHintGeminiNative") + : t("providerForm.apiHint") + } + fullUrlHint={ + apiFormat === "gemini_native" + ? t("providerForm.fullUrlHintGeminiNative") + : undefined } onManageClick={() => onEndpointModalToggle(true)} showFullUrlToggle={true} @@ -511,6 +518,11 @@ export function ClaudeFormFields({ defaultValue: "OpenAI Responses API (需转换)", })} + + {t("providerForm.apiFormatGeminiNative", { + defaultValue: "Gemini Native generateContent (需转换)", + })} +

diff --git a/src/components/providers/forms/shared/EndpointField.tsx b/src/components/providers/forms/shared/EndpointField.tsx index bf33afc92..4d5f53f1e 100644 --- a/src/components/providers/forms/shared/EndpointField.tsx +++ b/src/components/providers/forms/shared/EndpointField.tsx @@ -11,6 +11,7 @@ interface EndpointFieldProps { onChange: (value: string) => void; placeholder: string; hint?: string; + fullUrlHint?: string; showManageButton?: boolean; onManageClick?: () => void; manageButtonLabel?: string; @@ -26,6 +27,7 @@ export function EndpointField({ onChange, placeholder, hint, + fullUrlHint, showManageButton = true, onManageClick, manageButtonLabel, @@ -40,7 +42,8 @@ export function EndpointField({ }); const effectiveHint = showFullUrlToggle && isFullUrl - ? t("providerForm.fullUrlHint", { + ? fullUrlHint || + t("providerForm.fullUrlHint", { defaultValue: "💡 请填写完整请求 URL,并且必须开启代理后使用;代理将直接使用此 URL,不拼接路径", }) diff --git a/src/config/claudeProviderPresets.ts b/src/config/claudeProviderPresets.ts index 44aab5213..7fb74bd68 100644 --- a/src/config/claudeProviderPresets.ts +++ b/src/config/claudeProviderPresets.ts @@ -49,7 +49,12 @@ export interface ProviderPreset { // - "anthropic" (默认): Anthropic Messages API 格式,直接透传 // - "openai_chat": OpenAI Chat Completions 格式,需要格式转换 // - "openai_responses": OpenAI Responses API 格式,需要格式转换 - apiFormat?: "anthropic" | "openai_chat" | "openai_responses"; + // - "gemini_native": Gemini Native generateContent API 格式,需要格式转换 + apiFormat?: + | "anthropic" + | "openai_chat" + | "openai_responses" + | "gemini_native"; // 供应商类型标识(用于特殊供应商检测) // - "github_copilot": GitHub Copilot 供应商(需要 OAuth 认证) @@ -80,6 +85,27 @@ export const providerPresets: ProviderPreset[] = [ icon: "anthropic", iconColor: "#D4915D", }, + { + name: "Gemini Native", + websiteUrl: "https://ai.google.dev/gemini-api", + apiKeyUrl: "https://aistudio.google.com/app/apikey", + apiKeyField: "ANTHROPIC_API_KEY", + settingsConfig: { + env: { + ANTHROPIC_BASE_URL: "https://generativelanguage.googleapis.com", + ANTHROPIC_API_KEY: "", + ANTHROPIC_MODEL: "gemini-2.5-pro", + ANTHROPIC_DEFAULT_HAIKU_MODEL: "gemini-2.5-flash", + ANTHROPIC_DEFAULT_SONNET_MODEL: "gemini-2.5-pro", + ANTHROPIC_DEFAULT_OPUS_MODEL: "gemini-2.5-pro", + }, + }, + category: "third_party", + apiFormat: "gemini_native", + endpointCandidates: ["https://generativelanguage.googleapis.com"], + icon: "gemini", + iconColor: "#4285F4", + }, { name: "Shengsuanyun", nameKey: "providerForm.presets.shengsuanyun", diff --git a/src/i18n/locales/en.json b/src/i18n/locales/en.json index 6a4fc6215..fba579456 100644 --- a/src/i18n/locales/en.json +++ b/src/i18n/locales/en.json @@ -790,6 +790,7 @@ "modelHint": "💡 Leave blank to use provider's default model", "apiHint": "💡 Fill in Claude API compatible service endpoint, avoid trailing slash", "apiHintOAI": "💡 Fill in OpenAI Chat Completions compatible service endpoint, avoid trailing slash", + "apiHintGeminiNative": "💡 Prefer a Gemini Native base URL such as https://generativelanguage.googleapis.com or https://generativelanguage.googleapis.com/v1beta; the proxy will append models/*:generateContent automatically", "codexApiHint": "💡 Fill in service endpoint compatible with OpenAI Response format", "fillSupplierName": "Please fill in provider name", "fillConfigContent": "Please fill in configuration content", @@ -812,9 +813,11 @@ "fullUrlEnabled": "Full URL Mode", "fullUrlDisabled": "Mark as Full URL", "fullUrlHint": "💡 Enter the full request URL. This mode requires the proxy to be enabled, and the proxy will use the URL as-is without appending a path", + "fullUrlHintGeminiNative": "💡 In Gemini Native full URL mode, two inputs are supported: 1. official/structured Gemini URLs, which will still be normalized to the requested model and streaming method; 2. opaque custom relay URLs, which will be used mostly as-is with only query parameters appended", "apiFormatAnthropic": "Anthropic Messages (Native)", "apiFormatOpenAIChat": "OpenAI Chat Completions (Requires proxy)", "apiFormatOpenAIResponses": "OpenAI Responses API (Requires proxy)", + "apiFormatGeminiNative": "Gemini Native generateContent (Requires proxy)", "authField": "Auth Field", "authFieldAuthToken": "ANTHROPIC_AUTH_TOKEN (Default)", "authFieldApiKey": "ANTHROPIC_API_KEY", diff --git a/src/i18n/locales/ja.json b/src/i18n/locales/ja.json index 65d0c4aaf..edc9d9c86 100644 --- a/src/i18n/locales/ja.json +++ b/src/i18n/locales/ja.json @@ -790,6 +790,7 @@ "modelHint": "💡 空欄ならプロバイダーのデフォルトモデルを使用します", "apiHint": "💡 Claude API 互換サービスのエンドポイントを入力してください。末尾にスラッシュを付けないでください", "apiHintOAI": "💡 OpenAI Chat Completions 互換サービスのエンドポイントを入力してください。末尾にスラッシュを付けないでください", + "apiHintGeminiNative": "💡 Gemini Native では https://generativelanguage.googleapis.com または https://generativelanguage.googleapis.com/v1beta のような base URL を推奨します。プロキシが models/*:generateContent を自動補完します", "codexApiHint": "💡 OpenAI Response 互換のサービスエンドポイントを入力してください", "fillSupplierName": "プロバイダー名を入力してください", "fillConfigContent": "設定内容を入力してください", @@ -812,9 +813,11 @@ "fullUrlEnabled": "フル URL モード", "fullUrlDisabled": "フル URL として設定", "fullUrlHint": "💡 完全なリクエスト URL を入力してください。このモードはプロキシを有効にして使用する必要があり、プロキシはこの URL をそのまま使用し、パスを追加しません", + "fullUrlHintGeminiNative": "💡 Gemini Native のフル URL モードでは 2 種類の入力を扱えます。1. 公式/構造化された Gemini URL は、要求されたモデルやストリーミング方式に合わせて正規化されます。2. カスタム relay の opaque な完全 URL は、主にそのまま使用され、必要なクエリだけ追加されます", "apiFormatAnthropic": "Anthropic Messages(ネイティブ)", "apiFormatOpenAIChat": "OpenAI Chat Completions(プロキシが必要)", "apiFormatOpenAIResponses": "OpenAI Responses API(プロキシが必要)", + "apiFormatGeminiNative": "Gemini Native generateContent(プロキシが必要)", "authField": "認証フィールド", "authFieldAuthToken": "ANTHROPIC_AUTH_TOKEN(デフォルト)", "authFieldApiKey": "ANTHROPIC_API_KEY", diff --git a/src/i18n/locales/zh.json b/src/i18n/locales/zh.json index 0bd66025e..09b873086 100644 --- a/src/i18n/locales/zh.json +++ b/src/i18n/locales/zh.json @@ -791,6 +791,7 @@ "modelHint": "💡 留空将使用供应商的默认模型", "apiHint": "💡 填写兼容 Claude API 的服务端点地址,不要以斜杠结尾", "apiHintOAI": "💡 填写兼容 OpenAI Chat Completions 的服务端点地址,不要以斜杠结尾", + "apiHintGeminiNative": "💡 建议填写 Gemini Native 的 base URL,例如 https://generativelanguage.googleapis.com 或 https://generativelanguage.googleapis.com/v1beta;代理会自动补全 models/*:generateContent", "codexApiHint": "💡 填写兼容 OpenAI Response 格式的服务端点地址", "fillSupplierName": "请填写供应商名称", "fillConfigContent": "请填写配置内容", @@ -813,9 +814,11 @@ "fullUrlEnabled": "完整 URL 模式", "fullUrlDisabled": "标记为完整 URL", "fullUrlHint": "💡 请填写完整请求 URL,并且必须开启代理后使用;代理将直接使用此 URL,不拼接路径", + "fullUrlHintGeminiNative": "💡 Gemini Native 下,完整 URL 模式同时兼容两类地址:1. 官方/标准 Gemini URL,代理会按模型和流式参数自动归一化;2. 自定义 relay 的完整 URL,代理会尽量原样使用,只补查询参数,不再强行追加 models 路径", "apiFormatAnthropic": "Anthropic Messages (原生)", "apiFormatOpenAIChat": "OpenAI Chat Completions (需开启代理)", "apiFormatOpenAIResponses": "OpenAI Responses API (需开启代理)", + "apiFormatGeminiNative": "Gemini Native generateContent (需开启代理)", "authField": "认证字段", "authFieldAuthToken": "ANTHROPIC_AUTH_TOKEN(默认)", "authFieldApiKey": "ANTHROPIC_API_KEY", diff --git a/src/types.ts b/src/types.ts index 90cb06a22..1cc0a4fc8 100644 --- a/src/types.ts +++ b/src/types.ts @@ -159,7 +159,12 @@ export interface ProviderMeta { // - "anthropic": 原生 Anthropic Messages API 格式,直接透传 // - "openai_chat": OpenAI Chat Completions 格式,需要格式转换 // - "openai_responses": OpenAI Responses API 格式,需要格式转换 - apiFormat?: "anthropic" | "openai_chat" | "openai_responses"; + // - "gemini_native": Gemini Native generateContent API 格式,需要格式转换 + apiFormat?: + | "anthropic" + | "openai_chat" + | "openai_responses" + | "gemini_native"; // 通用认证绑定 authBinding?: AuthBinding; // Claude 认证字段名 @@ -184,7 +189,12 @@ export type SkillStorageLocation = "cc_switch" | "unified"; // - "anthropic": 原生 Anthropic Messages API 格式,直接透传 // - "openai_chat": OpenAI Chat Completions 格式,需要格式转换 // - "openai_responses": OpenAI Responses API 格式,需要格式转换 -export type ClaudeApiFormat = "anthropic" | "openai_chat" | "openai_responses"; +// - "gemini_native": Gemini Native generateContent API 格式,需要格式转换 +export type ClaudeApiFormat = + | "anthropic" + | "openai_chat" + | "openai_responses" + | "gemini_native"; // Claude 认证字段类型 export type ClaudeApiKeyField = "ANTHROPIC_AUTH_TOKEN" | "ANTHROPIC_API_KEY";