google.rs

  1use anyhow::Result;
  2use futures::{FutureExt, Stream, StreamExt, future::BoxFuture};
  3use google_ai::{
  4    FunctionDeclaration, GenerateContentResponse, GoogleModelMode, Part, SystemInstruction,
  5    ThinkingConfig, UsageMetadata,
  6};
  7use gpui::{App, AppContext as _};
  8use language_model::{
  9    LanguageModelCompletionError, LanguageModelCompletionEvent, LanguageModelRequest,
 10    LanguageModelToolChoice, LanguageModelToolUse, LanguageModelToolUseId, MessageContent, Role,
 11    StopReason,
 12};
 13use schemars::JsonSchema;
 14use serde::{Deserialize, Serialize};
 15pub use settings::GoogleAvailableModel as AvailableModel;
 16use std::{
 17    pin::Pin,
 18    sync::atomic::{self, AtomicU64},
 19};
 20
 21#[derive(Default, Clone, Debug, PartialEq)]
 22pub struct GoogleSettings {
 23    pub api_url: String,
 24    pub available_models: Vec<AvailableModel>,
 25}
 26
 27#[derive(Clone, Copy, Debug, Default, PartialEq, Serialize, Deserialize, JsonSchema)]
 28#[serde(tag = "type", rename_all = "lowercase")]
 29pub enum ModelMode {
 30    #[default]
 31    Default,
 32    Thinking {
 33        /// The maximum number of tokens to use for reasoning. Must be lower than the model's `max_output_tokens`.
 34        budget_tokens: Option<u32>,
 35    },
 36}
 37
 38pub fn into_google(
 39    mut request: LanguageModelRequest,
 40    model_id: String,
 41    mode: GoogleModelMode,
 42) -> google_ai::GenerateContentRequest {
 43    fn map_content(content: Vec<MessageContent>) -> Vec<Part> {
 44        content
 45            .into_iter()
 46            .flat_map(|content| match content {
 47                language_model::MessageContent::Text(text) => {
 48                    if !text.is_empty() {
 49                        vec![Part::TextPart(google_ai::TextPart { text })]
 50                    } else {
 51                        vec![]
 52                    }
 53                }
 54                language_model::MessageContent::Thinking {
 55                    text: _,
 56                    signature: Some(signature),
 57                } => {
 58                    if !signature.is_empty() {
 59                        vec![Part::ThoughtPart(google_ai::ThoughtPart {
 60                            thought: true,
 61                            thought_signature: signature,
 62                        })]
 63                    } else {
 64                        vec![]
 65                    }
 66                }
 67                language_model::MessageContent::Thinking { .. } => {
 68                    vec![]
 69                }
 70                language_model::MessageContent::RedactedThinking(_) => vec![],
 71                language_model::MessageContent::Image(image) => {
 72                    vec![Part::InlineDataPart(google_ai::InlineDataPart {
 73                        inline_data: google_ai::GenerativeContentBlob {
 74                            mime_type: "image/png".to_string(),
 75                            data: image.source.to_string(),
 76                        },
 77                    })]
 78                }
 79                language_model::MessageContent::ToolUse(tool_use) => {
 80                    let thought_signature = tool_use.thought_signature.filter(|s| !s.is_empty());
 81
 82                    vec![Part::FunctionCallPart(google_ai::FunctionCallPart {
 83                        function_call: google_ai::FunctionCall {
 84                            name: tool_use.name.to_string(),
 85                            args: tool_use.input,
 86                        },
 87                        thought_signature,
 88                    })]
 89                }
 90                language_model::MessageContent::ToolResult(tool_result) => {
 91                    match tool_result.content {
 92                        language_model::LanguageModelToolResultContent::Text(text) => {
 93                            vec![Part::FunctionResponsePart(
 94                                google_ai::FunctionResponsePart {
 95                                    function_response: google_ai::FunctionResponse {
 96                                        name: tool_result.tool_name.to_string(),
 97                                        response: serde_json::json!({
 98                                            "output": text
 99                                        }),
100                                    },
101                                },
102                            )]
103                        }
104                        language_model::LanguageModelToolResultContent::Image(image) => {
105                            vec![
106                                Part::FunctionResponsePart(google_ai::FunctionResponsePart {
107                                    function_response: google_ai::FunctionResponse {
108                                        name: tool_result.tool_name.to_string(),
109                                        response: serde_json::json!({
110                                            "output": "Tool responded with an image"
111                                        }),
112                                    },
113                                }),
114                                Part::InlineDataPart(google_ai::InlineDataPart {
115                                    inline_data: google_ai::GenerativeContentBlob {
116                                        mime_type: "image/png".to_string(),
117                                        data: image.source.to_string(),
118                                    },
119                                }),
120                            ]
121                        }
122                    }
123                }
124            })
125            .collect()
126    }
127
128    let system_instructions = if request
129        .messages
130        .first()
131        .is_some_and(|msg| matches!(msg.role, Role::System))
132    {
133        let message = request.messages.remove(0);
134        Some(SystemInstruction {
135            parts: map_content(message.content),
136        })
137    } else {
138        None
139    };
140
141    google_ai::GenerateContentRequest {
142        model: google_ai::ModelName { model_id },
143        system_instruction: system_instructions,
144        contents: request
145            .messages
146            .into_iter()
147            .filter_map(|message| {
148                let parts = map_content(message.content);
149                if parts.is_empty() {
150                    None
151                } else {
152                    Some(google_ai::Content {
153                        parts,
154                        role: match message.role {
155                            Role::User => google_ai::Role::User,
156                            Role::Assistant => google_ai::Role::Model,
157                            Role::System => google_ai::Role::User,
158                        },
159                    })
160                }
161            })
162            .collect(),
163        generation_config: Some(google_ai::GenerationConfig {
164            candidate_count: Some(1),
165            stop_sequences: Some(request.stop),
166            max_output_tokens: None,
167            temperature: request.temperature.map(|t| t as f64).or(Some(1.0)),
168            thinking_config: match (request.thinking_allowed, mode) {
169                (true, GoogleModelMode::Thinking { budget_tokens }) => {
170                    budget_tokens.map(|thinking_budget| ThinkingConfig { thinking_budget })
171                }
172                _ => None,
173            },
174            top_p: None,
175            top_k: None,
176        }),
177        safety_settings: None,
178        tools: (!request.tools.is_empty()).then(|| {
179            vec![google_ai::Tool {
180                function_declarations: request
181                    .tools
182                    .into_iter()
183                    .map(|tool| FunctionDeclaration {
184                        name: tool.name,
185                        description: tool.description,
186                        parameters: tool.input_schema,
187                    })
188                    .collect(),
189            }]
190        }),
191        tool_config: request.tool_choice.map(|choice| google_ai::ToolConfig {
192            function_calling_config: google_ai::FunctionCallingConfig {
193                mode: match choice {
194                    LanguageModelToolChoice::Auto => google_ai::FunctionCallingMode::Auto,
195                    LanguageModelToolChoice::Any => google_ai::FunctionCallingMode::Any,
196                    LanguageModelToolChoice::None => google_ai::FunctionCallingMode::None,
197                },
198                allowed_function_names: None,
199            },
200        }),
201    }
202}
203
204pub struct GoogleEventMapper {
205    usage: UsageMetadata,
206    stop_reason: StopReason,
207}
208
209impl GoogleEventMapper {
210    pub fn new() -> Self {
211        Self {
212            usage: UsageMetadata::default(),
213            stop_reason: StopReason::EndTurn,
214        }
215    }
216
217    pub fn map_stream(
218        mut self,
219        events: Pin<Box<dyn Send + Stream<Item = Result<GenerateContentResponse>>>>,
220    ) -> impl Stream<Item = Result<LanguageModelCompletionEvent, LanguageModelCompletionError>>
221    {
222        events
223            .map(Some)
224            .chain(futures::stream::once(async { None }))
225            .flat_map(move |event| {
226                futures::stream::iter(match event {
227                    Some(Ok(event)) => self.map_event(event),
228                    Some(Err(error)) => {
229                        vec![Err(LanguageModelCompletionError::from(error))]
230                    }
231                    None => vec![Ok(LanguageModelCompletionEvent::Stop(self.stop_reason))],
232                })
233            })
234    }
235
236    pub fn map_event(
237        &mut self,
238        event: GenerateContentResponse,
239    ) -> Vec<Result<LanguageModelCompletionEvent, LanguageModelCompletionError>> {
240        static TOOL_CALL_COUNTER: AtomicU64 = AtomicU64::new(0);
241
242        let mut events: Vec<_> = Vec::new();
243        let mut wants_to_use_tool = false;
244        if let Some(usage_metadata) = event.usage_metadata {
245            update_usage(&mut self.usage, &usage_metadata);
246            events.push(Ok(LanguageModelCompletionEvent::UsageUpdate(
247                convert_usage(&self.usage),
248            )))
249        }
250
251        if let Some(prompt_feedback) = event.prompt_feedback
252            && let Some(block_reason) = prompt_feedback.block_reason.as_deref()
253        {
254            self.stop_reason = match block_reason {
255                "SAFETY" | "OTHER" | "BLOCKLIST" | "PROHIBITED_CONTENT" | "IMAGE_SAFETY" => {
256                    StopReason::Refusal
257                }
258                _ => {
259                    log::error!("Unexpected Google block_reason: {block_reason}");
260                    StopReason::Refusal
261                }
262            };
263            events.push(Ok(LanguageModelCompletionEvent::Stop(self.stop_reason)));
264
265            return events;
266        }
267
268        if let Some(candidates) = event.candidates {
269            for candidate in candidates {
270                if let Some(finish_reason) = candidate.finish_reason.as_deref() {
271                    self.stop_reason = match finish_reason {
272                        "STOP" => StopReason::EndTurn,
273                        "MAX_TOKENS" => StopReason::MaxTokens,
274                        _ => {
275                            log::error!("Unexpected google finish_reason: {finish_reason}");
276                            StopReason::EndTurn
277                        }
278                    };
279                }
280                candidate
281                    .content
282                    .parts
283                    .into_iter()
284                    .for_each(|part| match part {
285                        Part::TextPart(text_part) => {
286                            events.push(Ok(LanguageModelCompletionEvent::Text(text_part.text)))
287                        }
288                        Part::InlineDataPart(_) => {}
289                        Part::FunctionCallPart(function_call_part) => {
290                            wants_to_use_tool = true;
291                            let name: std::sync::Arc<str> =
292                                function_call_part.function_call.name.into();
293                            let next_tool_id =
294                                TOOL_CALL_COUNTER.fetch_add(1, atomic::Ordering::SeqCst);
295                            let id: LanguageModelToolUseId =
296                                format!("{}-{}", name, next_tool_id).into();
297
298                            let thought_signature = function_call_part
299                                .thought_signature
300                                .filter(|s| !s.is_empty());
301
302                            events.push(Ok(LanguageModelCompletionEvent::ToolUse(
303                                LanguageModelToolUse {
304                                    id,
305                                    name,
306                                    is_input_complete: true,
307                                    raw_input: function_call_part.function_call.args.to_string(),
308                                    input: function_call_part.function_call.args,
309                                    thought_signature,
310                                },
311                            )));
312                        }
313                        Part::FunctionResponsePart(_) => {}
314                        Part::ThoughtPart(part) => {
315                            events.push(Ok(LanguageModelCompletionEvent::Thinking {
316                                text: "(Encrypted thought)".to_string(),
317                                signature: Some(part.thought_signature),
318                            }));
319                        }
320                    });
321            }
322        }
323
324        if wants_to_use_tool {
325            self.stop_reason = StopReason::ToolUse;
326            events.push(Ok(LanguageModelCompletionEvent::Stop(StopReason::ToolUse)));
327        }
328        events
329    }
330}
331
332pub fn count_google_tokens(
333    request: LanguageModelRequest,
334    cx: &App,
335) -> BoxFuture<'static, Result<u64>> {
336    cx.background_spawn(async move {
337        let messages = request
338            .messages
339            .into_iter()
340            .map(|message| tiktoken_rs::ChatCompletionRequestMessage {
341                role: match message.role {
342                    Role::User => "user".into(),
343                    Role::Assistant => "assistant".into(),
344                    Role::System => "system".into(),
345                },
346                content: Some(message.string_contents()),
347                name: None,
348                function_call: None,
349            })
350            .collect::<Vec<_>>();
351
352        tiktoken_rs::num_tokens_from_messages("gpt-4", &messages).map(|tokens| tokens as u64)
353    })
354    .boxed()
355}
356
357fn update_usage(usage: &mut UsageMetadata, new: &UsageMetadata) {
358    if let Some(prompt_token_count) = new.prompt_token_count {
359        usage.prompt_token_count = Some(prompt_token_count);
360    }
361    if let Some(cached_content_token_count) = new.cached_content_token_count {
362        usage.cached_content_token_count = Some(cached_content_token_count);
363    }
364    if let Some(candidates_token_count) = new.candidates_token_count {
365        usage.candidates_token_count = Some(candidates_token_count);
366    }
367    if let Some(tool_use_prompt_token_count) = new.tool_use_prompt_token_count {
368        usage.tool_use_prompt_token_count = Some(tool_use_prompt_token_count);
369    }
370    if let Some(thoughts_token_count) = new.thoughts_token_count {
371        usage.thoughts_token_count = Some(thoughts_token_count);
372    }
373    if let Some(total_token_count) = new.total_token_count {
374        usage.total_token_count = Some(total_token_count);
375    }
376}
377
378fn convert_usage(usage: &UsageMetadata) -> language_model::TokenUsage {
379    let prompt_tokens = usage.prompt_token_count.unwrap_or(0);
380    let cached_tokens = usage.cached_content_token_count.unwrap_or(0);
381    let input_tokens = prompt_tokens - cached_tokens;
382    let output_tokens = usage.candidates_token_count.unwrap_or(0);
383
384    language_model::TokenUsage {
385        input_tokens,
386        output_tokens,
387        cache_read_input_tokens: cached_tokens,
388        cache_creation_input_tokens: 0,
389    }
390}
391
392#[cfg(test)]
393mod tests {
394    use super::*;
395    use google_ai::{
396        Content, FunctionCall, FunctionCallPart, GenerateContentCandidate, GenerateContentResponse,
397        Part, Role as GoogleRole, TextPart,
398    };
399    use language_model::{LanguageModelToolUseId, MessageContent, Role};
400    use serde_json::json;
401
402    #[test]
403    fn test_function_call_with_signature_creates_tool_use_with_signature() {
404        let mut mapper = GoogleEventMapper::new();
405
406        let response = GenerateContentResponse {
407            candidates: Some(vec![GenerateContentCandidate {
408                index: Some(0),
409                content: Content {
410                    parts: vec![Part::FunctionCallPart(FunctionCallPart {
411                        function_call: FunctionCall {
412                            name: "test_function".to_string(),
413                            args: json!({"arg": "value"}),
414                        },
415                        thought_signature: Some("test_signature_123".to_string()),
416                    })],
417                    role: GoogleRole::Model,
418                },
419                finish_reason: None,
420                finish_message: None,
421                safety_ratings: None,
422                citation_metadata: None,
423            }]),
424            prompt_feedback: None,
425            usage_metadata: None,
426        };
427
428        let events = mapper.map_event(response);
429
430        assert_eq!(events.len(), 2);
431
432        if let Ok(LanguageModelCompletionEvent::ToolUse(tool_use)) = &events[0] {
433            assert_eq!(tool_use.name.as_ref(), "test_function");
434            assert_eq!(
435                tool_use.thought_signature.as_deref(),
436                Some("test_signature_123")
437            );
438        } else {
439            panic!("Expected ToolUse event");
440        }
441    }
442
443    #[test]
444    fn test_function_call_without_signature_has_none() {
445        let mut mapper = GoogleEventMapper::new();
446
447        let response = GenerateContentResponse {
448            candidates: Some(vec![GenerateContentCandidate {
449                index: Some(0),
450                content: Content {
451                    parts: vec![Part::FunctionCallPart(FunctionCallPart {
452                        function_call: FunctionCall {
453                            name: "test_function".to_string(),
454                            args: json!({"arg": "value"}),
455                        },
456                        thought_signature: None,
457                    })],
458                    role: GoogleRole::Model,
459                },
460                finish_reason: None,
461                finish_message: None,
462                safety_ratings: None,
463                citation_metadata: None,
464            }]),
465            prompt_feedback: None,
466            usage_metadata: None,
467        };
468
469        let events = mapper.map_event(response);
470
471        if let Ok(LanguageModelCompletionEvent::ToolUse(tool_use)) = &events[0] {
472            assert_eq!(tool_use.thought_signature, None);
473        } else {
474            panic!("Expected ToolUse event");
475        }
476    }
477
478    #[test]
479    fn test_empty_string_signature_normalized_to_none() {
480        let mut mapper = GoogleEventMapper::new();
481
482        let response = GenerateContentResponse {
483            candidates: Some(vec![GenerateContentCandidate {
484                index: Some(0),
485                content: Content {
486                    parts: vec![Part::FunctionCallPart(FunctionCallPart {
487                        function_call: FunctionCall {
488                            name: "test_function".to_string(),
489                            args: json!({"arg": "value"}),
490                        },
491                        thought_signature: Some("".to_string()),
492                    })],
493                    role: GoogleRole::Model,
494                },
495                finish_reason: None,
496                finish_message: None,
497                safety_ratings: None,
498                citation_metadata: None,
499            }]),
500            prompt_feedback: None,
501            usage_metadata: None,
502        };
503
504        let events = mapper.map_event(response);
505
506        if let Ok(LanguageModelCompletionEvent::ToolUse(tool_use)) = &events[0] {
507            assert_eq!(tool_use.thought_signature, None);
508        } else {
509            panic!("Expected ToolUse event");
510        }
511    }
512
513    #[test]
514    fn test_parallel_function_calls_preserve_signatures() {
515        let mut mapper = GoogleEventMapper::new();
516
517        let response = GenerateContentResponse {
518            candidates: Some(vec![GenerateContentCandidate {
519                index: Some(0),
520                content: Content {
521                    parts: vec![
522                        Part::FunctionCallPart(FunctionCallPart {
523                            function_call: FunctionCall {
524                                name: "function_a".to_string(),
525                                args: json!({}),
526                            },
527                            thought_signature: Some("sig_a".to_string()),
528                        }),
529                        Part::FunctionCallPart(FunctionCallPart {
530                            function_call: FunctionCall {
531                                name: "function_b".to_string(),
532                                args: json!({}),
533                            },
534                            thought_signature: None,
535                        }),
536                        Part::FunctionCallPart(FunctionCallPart {
537                            function_call: FunctionCall {
538                                name: "function_c".to_string(),
539                                args: json!({}),
540                            },
541                            thought_signature: Some("sig_c".to_string()),
542                        }),
543                    ],
544                    role: GoogleRole::Model,
545                },
546                finish_reason: None,
547                finish_message: None,
548                safety_ratings: None,
549                citation_metadata: None,
550            }]),
551            prompt_feedback: None,
552            usage_metadata: None,
553        };
554
555        let events = mapper.map_event(response);
556
557        let tool_uses: Vec<_> = events
558            .iter()
559            .filter_map(|e| {
560                if let Ok(LanguageModelCompletionEvent::ToolUse(tool_use)) = e {
561                    Some(tool_use)
562                } else {
563                    None
564                }
565            })
566            .collect();
567
568        assert_eq!(tool_uses.len(), 3);
569        assert_eq!(tool_uses[0].thought_signature.as_deref(), Some("sig_a"));
570        assert_eq!(tool_uses[1].thought_signature, None);
571        assert_eq!(tool_uses[2].thought_signature.as_deref(), Some("sig_c"));
572    }
573
574    #[test]
575    fn test_tool_use_with_signature_converts_to_function_call_part() {
576        let tool_use = language_model::LanguageModelToolUse {
577            id: LanguageModelToolUseId::from("test-id"),
578            name: "test_tool".into(),
579            input: json!({"key": "value"}),
580            raw_input: r#"{"key": "value"}"#.to_string(),
581            is_input_complete: true,
582            thought_signature: Some("test_sig".to_string()),
583        };
584
585        let request = into_google(
586            LanguageModelRequest {
587                messages: vec![language_model::LanguageModelRequestMessage {
588                    role: Role::Assistant,
589                    content: vec![MessageContent::ToolUse(tool_use)],
590                    cache: false,
591                    reasoning_details: None,
592                }],
593                ..Default::default()
594            },
595            "gemini-2.5-flash".to_string(),
596            GoogleModelMode::Default,
597        );
598
599        let parts = &request.contents[0].parts;
600        assert_eq!(parts.len(), 1);
601
602        if let Part::FunctionCallPart(fcp) = &parts[0] {
603            assert_eq!(fcp.thought_signature.as_deref(), Some("test_sig"));
604        } else {
605            panic!("Expected FunctionCallPart");
606        }
607    }
608
609    #[test]
610    fn test_tool_use_without_signature_omits_field() {
611        let tool_use = language_model::LanguageModelToolUse {
612            id: LanguageModelToolUseId::from("test-id"),
613            name: "test_tool".into(),
614            input: json!({"key": "value"}),
615            raw_input: r#"{"key": "value"}"#.to_string(),
616            is_input_complete: true,
617            thought_signature: None,
618        };
619
620        let request = into_google(
621            LanguageModelRequest {
622                messages: vec![language_model::LanguageModelRequestMessage {
623                    role: Role::Assistant,
624                    content: vec![MessageContent::ToolUse(tool_use)],
625                    cache: false,
626                    reasoning_details: None,
627                }],
628                ..Default::default()
629            },
630            "gemini-2.5-flash".to_string(),
631            GoogleModelMode::Default,
632        );
633
634        let parts = &request.contents[0].parts;
635
636        if let Part::FunctionCallPart(fcp) = &parts[0] {
637            assert_eq!(fcp.thought_signature, None);
638        } else {
639            panic!("Expected FunctionCallPart");
640        }
641    }
642
643    #[test]
644    fn test_empty_signature_in_tool_use_normalized_to_none() {
645        let tool_use = language_model::LanguageModelToolUse {
646            id: LanguageModelToolUseId::from("test-id"),
647            name: "test_tool".into(),
648            input: json!({}),
649            raw_input: "{}".to_string(),
650            is_input_complete: true,
651            thought_signature: Some("".to_string()),
652        };
653
654        let request = into_google(
655            LanguageModelRequest {
656                messages: vec![language_model::LanguageModelRequestMessage {
657                    role: Role::Assistant,
658                    content: vec![MessageContent::ToolUse(tool_use)],
659                    cache: false,
660                    reasoning_details: None,
661                }],
662                ..Default::default()
663            },
664            "gemini-2.5-flash".to_string(),
665            GoogleModelMode::Default,
666        );
667
668        let parts = &request.contents[0].parts;
669
670        if let Part::FunctionCallPart(fcp) = &parts[0] {
671            assert_eq!(fcp.thought_signature, None);
672        } else {
673            panic!("Expected FunctionCallPart");
674        }
675    }
676
677    #[test]
678    fn test_round_trip_preserves_signature() {
679        let original_signature = "original_thought_signature_abc123";
680
681        let response = GenerateContentResponse {
682            candidates: Some(vec![GenerateContentCandidate {
683                index: Some(0),
684                content: Content {
685                    parts: vec![Part::FunctionCallPart(FunctionCallPart {
686                        function_call: FunctionCall {
687                            name: "test_function".to_string(),
688                            args: json!({"arg": "value"}),
689                        },
690                        thought_signature: Some(original_signature.to_string()),
691                    })],
692                    role: GoogleRole::Model,
693                },
694                finish_reason: None,
695                finish_message: None,
696                safety_ratings: None,
697                citation_metadata: None,
698            }]),
699            prompt_feedback: None,
700            usage_metadata: None,
701        };
702
703        let mut mapper = GoogleEventMapper::new();
704        let events = mapper.map_event(response);
705
706        let tool_use = if let Ok(LanguageModelCompletionEvent::ToolUse(tool_use)) = &events[0] {
707            tool_use.clone()
708        } else {
709            panic!("Expected ToolUse event");
710        };
711
712        let request = into_google(
713            LanguageModelRequest {
714                messages: vec![language_model::LanguageModelRequestMessage {
715                    role: Role::Assistant,
716                    content: vec![MessageContent::ToolUse(tool_use)],
717                    cache: false,
718                    reasoning_details: None,
719                }],
720                ..Default::default()
721            },
722            "gemini-2.5-flash".to_string(),
723            GoogleModelMode::Default,
724        );
725
726        let parts = &request.contents[0].parts;
727        if let Part::FunctionCallPart(fcp) = &parts[0] {
728            assert_eq!(fcp.thought_signature.as_deref(), Some(original_signature));
729        } else {
730            panic!("Expected FunctionCallPart");
731        }
732    }
733
734    #[test]
735    fn test_mixed_text_and_function_call_with_signature() {
736        let mut mapper = GoogleEventMapper::new();
737
738        let response = GenerateContentResponse {
739            candidates: Some(vec![GenerateContentCandidate {
740                index: Some(0),
741                content: Content {
742                    parts: vec![
743                        Part::TextPart(TextPart {
744                            text: "Let me help you with that.".to_string(),
745                        }),
746                        Part::FunctionCallPart(FunctionCallPart {
747                            function_call: FunctionCall {
748                                name: "search".to_string(),
749                                args: json!({"query": "test"}),
750                            },
751                            thought_signature: Some("thinking_sig".to_string()),
752                        }),
753                    ],
754                    role: GoogleRole::Model,
755                },
756                finish_reason: None,
757                finish_message: None,
758                safety_ratings: None,
759                citation_metadata: None,
760            }]),
761            prompt_feedback: None,
762            usage_metadata: None,
763        };
764
765        let events = mapper.map_event(response);
766
767        let mut found_text = false;
768        let mut found_tool_with_sig = false;
769
770        for event in events {
771            match event {
772                Ok(LanguageModelCompletionEvent::Text(text)) => {
773                    assert_eq!(text, "Let me help you with that.");
774                    found_text = true;
775                }
776                Ok(LanguageModelCompletionEvent::ToolUse(tool_use)) => {
777                    assert_eq!(tool_use.thought_signature.as_deref(), Some("thinking_sig"));
778                    found_tool_with_sig = true;
779                }
780                _ => {}
781            }
782        }
783
784        assert!(found_text, "Should have found text event");
785        assert!(
786            found_tool_with_sig,
787            "Should have found tool use with signature"
788        );
789    }
790
791    #[test]
792    fn test_special_characters_in_signature_preserved() {
793        let special_signature = "sig/with+special=chars&more%stuff";
794
795        let mut mapper = GoogleEventMapper::new();
796
797        let response = GenerateContentResponse {
798            candidates: Some(vec![GenerateContentCandidate {
799                index: Some(0),
800                content: Content {
801                    parts: vec![Part::FunctionCallPart(FunctionCallPart {
802                        function_call: FunctionCall {
803                            name: "test".to_string(),
804                            args: json!({}),
805                        },
806                        thought_signature: Some(special_signature.to_string()),
807                    })],
808                    role: GoogleRole::Model,
809                },
810                finish_reason: None,
811                finish_message: None,
812                safety_ratings: None,
813                citation_metadata: None,
814            }]),
815            prompt_feedback: None,
816            usage_metadata: None,
817        };
818
819        let events = mapper.map_event(response);
820
821        if let Ok(LanguageModelCompletionEvent::ToolUse(tool_use)) = &events[0] {
822            assert_eq!(
823                tool_use.thought_signature.as_deref(),
824                Some(special_signature)
825            );
826        } else {
827            panic!("Expected ToolUse event");
828        }
829    }
830}