Add Gemini 3 support to Copilot (#43096)

Richard Feldman created

Closes #43024

Release Notes:

- Add support for Gemini 3 to Copilot

Change summary

crates/copilot/src/copilot_chat.rs                  |  20 +
crates/copilot/src/copilot_responses.rs             |   3 
crates/language_models/src/provider/copilot_chat.rs | 156 +++++++++++++++
3 files changed, 171 insertions(+), 8 deletions(-)

Detailed changes

crates/copilot/src/copilot_chat.rs 🔗

@@ -294,6 +294,10 @@ pub enum ChatMessage {
         content: ChatMessageContent,
         #[serde(default, skip_serializing_if = "Vec::is_empty")]
         tool_calls: Vec<ToolCall>,
+        #[serde(default, skip_serializing_if = "Option::is_none")]
+        reasoning_opaque: Option<String>,
+        #[serde(default, skip_serializing_if = "Option::is_none")]
+        reasoning_text: Option<String>,
     },
     User {
         content: ChatMessageContent,
@@ -386,6 +390,8 @@ pub struct ResponseDelta {
     pub role: Option<Role>,
     #[serde(default)]
     pub tool_calls: Vec<ToolCallChunk>,
+    pub reasoning_opaque: Option<String>,
+    pub reasoning_text: Option<String>,
 }
 #[derive(Deserialize, Debug, Eq, PartialEq)]
 pub struct ToolCallChunk {
@@ -786,13 +792,13 @@ async fn stream_completion(
     is_user_initiated: bool,
 ) -> Result<BoxStream<'static, Result<ResponseEvent>>> {
     let is_vision_request = request.messages.iter().any(|message| match message {
-      ChatMessage::User { content }
-      | ChatMessage::Assistant { content, .. }
-      | ChatMessage::Tool { content, .. } => {
-          matches!(content, ChatMessageContent::Multipart(parts) if parts.iter().any(|part| matches!(part, ChatMessagePart::Image { .. })))
-      }
-      _ => false,
-  });
+        ChatMessage::User { content }
+        | ChatMessage::Assistant { content, .. }
+        | ChatMessage::Tool { content, .. } => {
+            matches!(content, ChatMessageContent::Multipart(parts) if parts.iter().any(|part| matches!(part, ChatMessagePart::Image { .. })))
+        }
+        _ => false,
+    });
 
     let request_initiator = if is_user_initiated { "user" } else { "agent" };
 

crates/copilot/src/copilot_responses.rs 🔗

@@ -313,7 +313,8 @@ pub async fn stream_response(
     };
 
     let is_streaming = request.stream;
-    let request = request_builder.body(AsyncBody::from(serde_json::to_string(&request)?))?;
+    let json = serde_json::to_string(&request)?;
+    let request = request_builder.body(AsyncBody::from(json))?;
     let mut response = client.send(request).await?;
 
     if !response.status().is_success() {

crates/language_models/src/provider/copilot_chat.rs 🔗

@@ -367,12 +367,16 @@ pub fn map_to_language_model_completion_events(
     struct State {
         events: Pin<Box<dyn Send + Stream<Item = Result<ResponseEvent>>>>,
         tool_calls_by_index: HashMap<usize, RawToolCall>,
+        reasoning_opaque: Option<String>,
+        reasoning_text: Option<String>,
     }
 
     futures::stream::unfold(
         State {
             events,
             tool_calls_by_index: HashMap::default(),
+            reasoning_opaque: None,
+            reasoning_text: None,
         },
         move |mut state| async move {
             if let Some(event) = state.events.next().await {
@@ -403,6 +407,14 @@ pub fn map_to_language_model_completion_events(
                             events.push(Ok(LanguageModelCompletionEvent::Text(content)));
                         }
 
+                        // Capture reasoning data from the delta (e.g. for Gemini 3)
+                        if let Some(opaque) = delta.reasoning_opaque.clone() {
+                            state.reasoning_opaque = Some(opaque);
+                        }
+                        if let Some(text) = delta.reasoning_text.clone() {
+                            state.reasoning_text = Some(text);
+                        }
+
                         for (index, tool_call) in delta.tool_calls.iter().enumerate() {
                             let tool_index = tool_call.index.unwrap_or(index);
                             let entry = state.tool_calls_by_index.entry(tool_index).or_default();
@@ -445,6 +457,32 @@ pub fn map_to_language_model_completion_events(
                                 )));
                             }
                             Some("tool_calls") => {
+                                // Gemini 3 models send reasoning_opaque/reasoning_text that must
+                                // be preserved and sent back in subsequent requests. Emit as
+                                // ReasoningDetails so the agent stores it in the message.
+                                if state.reasoning_opaque.is_some()
+                                    || state.reasoning_text.is_some()
+                                {
+                                    let mut details = serde_json::Map::new();
+                                    if let Some(opaque) = state.reasoning_opaque.take() {
+                                        details.insert(
+                                            "reasoning_opaque".to_string(),
+                                            serde_json::Value::String(opaque),
+                                        );
+                                    }
+                                    if let Some(text) = state.reasoning_text.take() {
+                                        details.insert(
+                                            "reasoning_text".to_string(),
+                                            serde_json::Value::String(text),
+                                        );
+                                    }
+                                    events.push(Ok(
+                                        LanguageModelCompletionEvent::ReasoningDetails(
+                                            serde_json::Value::Object(details),
+                                        ),
+                                    ));
+                                }
+
                                 events.extend(state.tool_calls_by_index.drain().map(
                                     |(_, tool_call)| {
                                         // The model can output an empty string
@@ -807,6 +845,22 @@ fn into_copilot_chat(
                     buffer
                 };
 
+                // Extract reasoning_opaque and reasoning_text from reasoning_details
+                let (reasoning_opaque, reasoning_text) =
+                    if let Some(details) = &message.reasoning_details {
+                        let opaque = details
+                            .get("reasoning_opaque")
+                            .and_then(|v| v.as_str())
+                            .map(|s| s.to_string());
+                        let text = details
+                            .get("reasoning_text")
+                            .and_then(|v| v.as_str())
+                            .map(|s| s.to_string());
+                        (opaque, text)
+                    } else {
+                        (None, None)
+                    };
+
                 messages.push(ChatMessage::Assistant {
                     content: if text_content.is_empty() {
                         ChatMessageContent::empty()
@@ -814,6 +868,8 @@ fn into_copilot_chat(
                         text_content.into()
                     },
                     tool_calls,
+                    reasoning_opaque,
+                    reasoning_text,
                 });
             }
             Role::System => messages.push(ChatMessage::System {
@@ -1317,6 +1373,106 @@ mod tests {
             other => panic!("expected HttpResponseError, got {:?}", other),
         }
     }
+
+    #[test]
+    fn chat_completions_stream_maps_reasoning_data() {
+        use copilot::copilot_chat::ResponseEvent;
+
+        let events = vec![
+            ResponseEvent {
+                choices: vec![copilot::copilot_chat::ResponseChoice {
+                    index: Some(0),
+                    finish_reason: None,
+                    delta: Some(copilot::copilot_chat::ResponseDelta {
+                        content: None,
+                        role: Some(copilot::copilot_chat::Role::Assistant),
+                        tool_calls: vec![copilot::copilot_chat::ToolCallChunk {
+                            index: Some(0),
+                            id: Some("call_abc123".to_string()),
+                            function: Some(copilot::copilot_chat::FunctionChunk {
+                                name: Some("list_directory".to_string()),
+                                arguments: Some("{\"path\":\"test\"}".to_string()),
+                                thought_signature: None,
+                            }),
+                        }],
+                        reasoning_opaque: Some("encrypted_reasoning_token_xyz".to_string()),
+                        reasoning_text: Some("Let me check the directory".to_string()),
+                    }),
+                    message: None,
+                }],
+                id: "chatcmpl-123".to_string(),
+                usage: None,
+            },
+            ResponseEvent {
+                choices: vec![copilot::copilot_chat::ResponseChoice {
+                    index: Some(0),
+                    finish_reason: Some("tool_calls".to_string()),
+                    delta: Some(copilot::copilot_chat::ResponseDelta {
+                        content: None,
+                        role: None,
+                        tool_calls: vec![],
+                        reasoning_opaque: None,
+                        reasoning_text: None,
+                    }),
+                    message: None,
+                }],
+                id: "chatcmpl-123".to_string(),
+                usage: None,
+            },
+        ];
+
+        let mapped = futures::executor::block_on(async {
+            map_to_language_model_completion_events(
+                Box::pin(futures::stream::iter(events.into_iter().map(Ok))),
+                true,
+            )
+            .collect::<Vec<_>>()
+            .await
+        });
+
+        let mut has_reasoning_details = false;
+        let mut has_tool_use = false;
+        let mut reasoning_opaque_value: Option<String> = None;
+        let mut reasoning_text_value: Option<String> = None;
+
+        for event_result in mapped {
+            match event_result {
+                Ok(LanguageModelCompletionEvent::ReasoningDetails(details)) => {
+                    has_reasoning_details = true;
+                    reasoning_opaque_value = details
+                        .get("reasoning_opaque")
+                        .and_then(|v| v.as_str())
+                        .map(|s| s.to_string());
+                    reasoning_text_value = details
+                        .get("reasoning_text")
+                        .and_then(|v| v.as_str())
+                        .map(|s| s.to_string());
+                }
+                Ok(LanguageModelCompletionEvent::ToolUse(tool_use)) => {
+                    has_tool_use = true;
+                    assert_eq!(tool_use.id.to_string(), "call_abc123");
+                    assert_eq!(tool_use.name.as_ref(), "list_directory");
+                }
+                _ => {}
+            }
+        }
+
+        assert!(
+            has_reasoning_details,
+            "Should emit ReasoningDetails event for Gemini 3 reasoning"
+        );
+        assert!(has_tool_use, "Should emit ToolUse event");
+        assert_eq!(
+            reasoning_opaque_value,
+            Some("encrypted_reasoning_token_xyz".to_string()),
+            "Should capture reasoning_opaque"
+        );
+        assert_eq!(
+            reasoning_text_value,
+            Some("Let me check the directory".to_string()),
+            "Should capture reasoning_text"
+        );
+    }
 }
 struct ConfigurationView {
     copilot_status: Option<copilot::Status>,