From 75feba210758d66629db9a7fbae96f922a596ab4 Mon Sep 17 00:00:00 2001 From: "gcp-cherry-pick-bot[bot]" <98988430+gcp-cherry-pick-bot[bot]@users.noreply.github.com> Date: Fri, 25 Apr 2025 14:47:08 +0200 Subject: [PATCH] assistant: Fix issue when using inline assistant with Gemini models (cherry-pick #29407) (#29408) Cherry-picked assistant: Fix issue when using inline assistant with Gemini models (#29407) Closes #29020 Release Notes: - assistant: Fix issue when using inline assistant with Gemini models Co-authored-by: Bennet Bo Fenner --- .../assistant_context_editor/src/context.rs | 4 +++- crates/language_models/src/provider/google.rs | 21 ++++++++++++------- 2 files changed, 17 insertions(+), 8 deletions(-) diff --git a/crates/assistant_context_editor/src/context.rs b/crates/assistant_context_editor/src/context.rs index 398a6659e13bd6b01fb51d7ea9047c666800a308..3c8548022ed3a3837ba380737dbdabcc4a3d9476 100644 --- a/crates/assistant_context_editor/src/context.rs +++ b/crates/assistant_context_editor/src/context.rs @@ -2611,7 +2611,9 @@ impl AssistantContext { .map(MessageContent::Text), ); - completion_request.messages.push(request_message); + if !request_message.contents_empty() { + completion_request.messages.push(request_message); + } } if let RequestType::SuggestEdits = request_type { diff --git a/crates/language_models/src/provider/google.rs b/crates/language_models/src/provider/google.rs index 152eef13042fa829f4a9b2b030d504fed6136f08..be7a98cd7bc390f1c61faa3f4c41d907c68ff0b4 100644 --- a/crates/language_models/src/provider/google.rs +++ b/crates/language_models/src/provider/google.rs @@ -434,13 +434,20 @@ pub fn into_google( contents: request .messages .into_iter() - .map(|message| google_ai::Content { - parts: map_content(message.content), - role: match message.role { - Role::User => google_ai::Role::User, - Role::Assistant => google_ai::Role::Model, - Role::System => google_ai::Role::User, // Google AI doesn't have a system role - }, + .filter_map(|message| { + let parts = map_content(message.content); + if parts.is_empty() { + None + } else { + Some(google_ai::Content { + parts, + role: match message.role { + Role::User => google_ai::Role::User, + Role::Assistant => google_ai::Role::Model, + Role::System => google_ai::Role::User, // Google AI doesn't have a system role + }, + }) + } }) .collect(), generation_config: Some(google_ai::GenerationConfig {