1use std::io::{Cursor, Write};
2use std::sync::Arc;
3
4use anyhow::Result;
5use base64::write::EncoderWriter;
6use cloud_llm_client::CompletionIntent;
7use gpui::{
8 App, AppContext as _, DevicePixels, Image, ImageFormat, ObjectFit, SharedString, Size, Task,
9 point, px, size,
10};
11use image::GenericImageView as _;
12use image::codecs::png::PngEncoder;
13use serde::{Deserialize, Serialize};
14use util::ResultExt;
15
16use crate::role::Role;
17use crate::{LanguageModelToolUse, LanguageModelToolUseId};
18
19#[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Hash)]
20pub struct LanguageModelImage {
21 /// A base64-encoded PNG image.
22 pub source: SharedString,
23 #[serde(default, skip_serializing_if = "Option::is_none")]
24 pub size: Option<Size<DevicePixels>>,
25}
26
27impl LanguageModelImage {
28 pub fn len(&self) -> usize {
29 self.source.len()
30 }
31
32 pub fn is_empty(&self) -> bool {
33 self.source.is_empty()
34 }
35
36 // Parse Self from a JSON object with case-insensitive field names
37 pub fn from_json(obj: &serde_json::Map<String, serde_json::Value>) -> Option<Self> {
38 let mut source = None;
39 let mut size_obj = None;
40
41 // Find source and size fields (case-insensitive)
42 for (k, v) in obj.iter() {
43 match k.to_lowercase().as_str() {
44 "source" => source = v.as_str(),
45 "size" => size_obj = v.as_object(),
46 _ => {}
47 }
48 }
49
50 let source = source?;
51 let size_obj = size_obj?;
52
53 let mut width = None;
54 let mut height = None;
55
56 // Find width and height in size object (case-insensitive)
57 for (k, v) in size_obj.iter() {
58 match k.to_lowercase().as_str() {
59 "width" => width = v.as_i64().map(|w| w as i32),
60 "height" => height = v.as_i64().map(|h| h as i32),
61 _ => {}
62 }
63 }
64
65 Some(Self {
66 size: Some(size(DevicePixels(width?), DevicePixels(height?))),
67 source: SharedString::from(source.to_string()),
68 })
69 }
70}
71
72impl std::fmt::Debug for LanguageModelImage {
73 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
74 f.debug_struct("LanguageModelImage")
75 .field("source", &format!("<{} bytes>", self.source.len()))
76 .field("size", &self.size)
77 .finish()
78 }
79}
80
81/// Anthropic wants uploaded images to be smaller than this in both dimensions.
82const ANTHROPIC_SIZE_LIMIT: f32 = 1568.;
83
84/// Default per-image hard limit (in bytes) for the encoded image payload we send upstream.
85///
86/// NOTE: `LanguageModelImage.source` is base64-encoded PNG bytes (without the `data:` prefix).
87/// This limit is enforced on the encoded PNG bytes *before* base64 encoding.
88const DEFAULT_IMAGE_MAX_BYTES: usize = 5 * 1024 * 1024;
89
90/// Conservative cap on how many times we'll attempt to shrink/re-encode an image to fit
91/// `DEFAULT_IMAGE_MAX_BYTES`.
92const MAX_IMAGE_DOWNSCALE_PASSES: usize = 8;
93
94impl LanguageModelImage {
95 // All language model images are encoded as PNGs.
96 pub const FORMAT: ImageFormat = ImageFormat::Png;
97
98 pub fn empty() -> Self {
99 Self {
100 source: "".into(),
101 size: None,
102 }
103 }
104
105 pub fn from_image(data: Arc<Image>, cx: &mut App) -> Task<Option<Self>> {
106 cx.background_spawn(async move {
107 let image_bytes = Cursor::new(data.bytes());
108 let dynamic_image = match data.format() {
109 ImageFormat::Png => image::codecs::png::PngDecoder::new(image_bytes)
110 .and_then(image::DynamicImage::from_decoder),
111 ImageFormat::Jpeg => image::codecs::jpeg::JpegDecoder::new(image_bytes)
112 .and_then(image::DynamicImage::from_decoder),
113 ImageFormat::Webp => image::codecs::webp::WebPDecoder::new(image_bytes)
114 .and_then(image::DynamicImage::from_decoder),
115 ImageFormat::Gif => image::codecs::gif::GifDecoder::new(image_bytes)
116 .and_then(image::DynamicImage::from_decoder),
117 ImageFormat::Bmp => image::codecs::bmp::BmpDecoder::new(image_bytes)
118 .and_then(image::DynamicImage::from_decoder),
119 ImageFormat::Tiff => image::codecs::tiff::TiffDecoder::new(image_bytes)
120 .and_then(image::DynamicImage::from_decoder),
121 _ => return None,
122 }
123 .log_err()?;
124
125 let width = dynamic_image.width();
126 let height = dynamic_image.height();
127 let image_size = size(DevicePixels(width as i32), DevicePixels(height as i32));
128
129 // First apply any provider-specific dimension constraints we know about (Anthropic).
130 let mut processed_image = if image_size.width.0 > ANTHROPIC_SIZE_LIMIT as i32
131 || image_size.height.0 > ANTHROPIC_SIZE_LIMIT as i32
132 {
133 let new_bounds = ObjectFit::ScaleDown.get_bounds(
134 gpui::Bounds {
135 origin: point(px(0.0), px(0.0)),
136 size: size(px(ANTHROPIC_SIZE_LIMIT), px(ANTHROPIC_SIZE_LIMIT)),
137 },
138 image_size,
139 );
140 dynamic_image.resize(
141 new_bounds.size.width.into(),
142 new_bounds.size.height.into(),
143 image::imageops::FilterType::Triangle,
144 )
145 } else {
146 dynamic_image
147 };
148
149 // Then enforce a default per-image size cap on the encoded PNG bytes.
150 //
151 // We always send PNG bytes (either original PNG bytes, or re-encoded PNG) base64'd.
152 // The upstream provider limit we want to respect is effectively on the binary image
153 // payload size, so we enforce against the encoded PNG bytes before base64 encoding.
154 let mut encoded_png = encode_png_bytes(&processed_image).log_err()?;
155 for _pass in 0..MAX_IMAGE_DOWNSCALE_PASSES {
156 if encoded_png.len() <= DEFAULT_IMAGE_MAX_BYTES {
157 break;
158 }
159
160 // Scale down geometrically to converge quickly. We don't know the final PNG size
161 // as a function of pixels, so we iteratively shrink.
162 let (w, h) = processed_image.dimensions();
163 if w <= 1 || h <= 1 {
164 break;
165 }
166
167 // Shrink by ~15% each pass (0.85). This is a compromise between speed and
168 // preserving image detail.
169 let new_w = ((w as f32) * 0.85).round().max(1.0) as u32;
170 let new_h = ((h as f32) * 0.85).round().max(1.0) as u32;
171
172 processed_image =
173 processed_image.resize(new_w, new_h, image::imageops::FilterType::Triangle);
174 encoded_png = encode_png_bytes(&processed_image).log_err()?;
175 }
176
177 if encoded_png.len() > DEFAULT_IMAGE_MAX_BYTES {
178 // Still too large after multiple passes; treat as non-convertible for now.
179 // (Provider-specific handling can be introduced later.)
180 return None;
181 }
182
183 // Now base64 encode the PNG bytes.
184 let base64_image = encode_bytes_as_base64(encoded_png.as_slice()).log_err()?;
185
186 // SAFETY: The base64 encoder should not produce non-UTF8.
187 let source = unsafe { String::from_utf8_unchecked(base64_image) };
188
189 Some(LanguageModelImage {
190 size: Some(image_size),
191 source: source.into(),
192 })
193 })
194 }
195
196 pub fn estimate_tokens(&self) -> usize {
197 let Some(size) = self.size.as_ref() else {
198 return 0;
199 };
200 let width = size.width.0.unsigned_abs() as usize;
201 let height = size.height.0.unsigned_abs() as usize;
202
203 // From: https://docs.anthropic.com/en/docs/build-with-claude/vision#calculate-image-costs
204 // Note that are a lot of conditions on Anthropic's API, and OpenAI doesn't use this,
205 // so this method is more of a rough guess.
206 (width * height) / 750
207 }
208
209 pub fn to_base64_url(&self) -> String {
210 format!("data:image/png;base64,{}", self.source)
211 }
212}
213
214fn encode_png_bytes(image: &image::DynamicImage) -> Result<Vec<u8>> {
215 let mut png = Vec::new();
216 image.write_with_encoder(PngEncoder::new(&mut png))?;
217 Ok(png)
218}
219
220fn encode_bytes_as_base64(bytes: &[u8]) -> Result<Vec<u8>> {
221 let mut base64_image = Vec::new();
222 {
223 let mut base64_encoder = EncoderWriter::new(
224 Cursor::new(&mut base64_image),
225 &base64::engine::general_purpose::STANDARD,
226 );
227 base64_encoder.write_all(bytes)?;
228 }
229 Ok(base64_image)
230}
231
232#[derive(Debug, Clone, Serialize, Deserialize, Eq, PartialEq, Hash)]
233pub struct LanguageModelToolResult {
234 pub tool_use_id: LanguageModelToolUseId,
235 pub tool_name: Arc<str>,
236 pub is_error: bool,
237 pub content: LanguageModelToolResultContent,
238 pub output: Option<serde_json::Value>,
239}
240
241#[derive(Debug, Clone, Serialize, Eq, PartialEq, Hash)]
242pub enum LanguageModelToolResultContent {
243 Text(Arc<str>),
244 Image(LanguageModelImage),
245}
246
247impl<'de> Deserialize<'de> for LanguageModelToolResultContent {
248 fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
249 where
250 D: serde::Deserializer<'de>,
251 {
252 use serde::de::Error;
253
254 let value = serde_json::Value::deserialize(deserializer)?;
255
256 // Models can provide these responses in several styles. Try each in order.
257
258 // 1. Try as plain string
259 if let Ok(text) = serde_json::from_value::<String>(value.clone()) {
260 return Ok(Self::Text(Arc::from(text)));
261 }
262
263 // 2. Try as object
264 if let Some(obj) = value.as_object() {
265 // get a JSON field case-insensitively
266 fn get_field<'a>(
267 obj: &'a serde_json::Map<String, serde_json::Value>,
268 field: &str,
269 ) -> Option<&'a serde_json::Value> {
270 obj.iter()
271 .find(|(k, _)| k.to_lowercase() == field.to_lowercase())
272 .map(|(_, v)| v)
273 }
274
275 // Accept wrapped text format: { "type": "text", "text": "..." }
276 if let (Some(type_value), Some(text_value)) =
277 (get_field(obj, "type"), get_field(obj, "text"))
278 && let Some(type_str) = type_value.as_str()
279 && type_str.to_lowercase() == "text"
280 && let Some(text) = text_value.as_str()
281 {
282 return Ok(Self::Text(Arc::from(text)));
283 }
284
285 // Check for wrapped Text variant: { "text": "..." }
286 if let Some((_key, value)) = obj.iter().find(|(k, _)| k.to_lowercase() == "text")
287 && obj.len() == 1
288 {
289 // Only one field, and it's "text" (case-insensitive)
290 if let Some(text) = value.as_str() {
291 return Ok(Self::Text(Arc::from(text)));
292 }
293 }
294
295 // Check for wrapped Image variant: { "image": { "source": "...", "size": ... } }
296 if let Some((_key, value)) = obj.iter().find(|(k, _)| k.to_lowercase() == "image")
297 && obj.len() == 1
298 {
299 // Only one field, and it's "image" (case-insensitive)
300 // Try to parse the nested image object
301 if let Some(image_obj) = value.as_object()
302 && let Some(image) = LanguageModelImage::from_json(image_obj)
303 {
304 return Ok(Self::Image(image));
305 }
306 }
307
308 // Try as direct Image (object with "source" and "size" fields)
309 if let Some(image) = LanguageModelImage::from_json(obj) {
310 return Ok(Self::Image(image));
311 }
312 }
313
314 // If none of the variants match, return an error with the problematic JSON
315 Err(D::Error::custom(format!(
316 "data did not match any variant of LanguageModelToolResultContent. Expected either a string, \
317 an object with 'type': 'text', a wrapped variant like {{\"Text\": \"...\"}}, or an image object. Got: {}",
318 serde_json::to_string_pretty(&value).unwrap_or_else(|_| value.to_string())
319 )))
320 }
321}
322
323impl LanguageModelToolResultContent {
324 pub fn to_str(&self) -> Option<&str> {
325 match self {
326 Self::Text(text) => Some(text),
327 Self::Image(_) => None,
328 }
329 }
330
331 pub fn is_empty(&self) -> bool {
332 match self {
333 Self::Text(text) => text.chars().all(|c| c.is_whitespace()),
334 Self::Image(_) => false,
335 }
336 }
337}
338
339impl From<&str> for LanguageModelToolResultContent {
340 fn from(value: &str) -> Self {
341 Self::Text(Arc::from(value))
342 }
343}
344
345impl From<String> for LanguageModelToolResultContent {
346 fn from(value: String) -> Self {
347 Self::Text(Arc::from(value))
348 }
349}
350
351impl From<LanguageModelImage> for LanguageModelToolResultContent {
352 fn from(image: LanguageModelImage) -> Self {
353 Self::Image(image)
354 }
355}
356
357#[derive(Debug, Clone, Serialize, Deserialize, Eq, PartialEq, Hash)]
358pub enum MessageContent {
359 Text(String),
360 Thinking {
361 text: String,
362 signature: Option<String>,
363 },
364 RedactedThinking(String),
365 Image(LanguageModelImage),
366 ToolUse(LanguageModelToolUse),
367 ToolResult(LanguageModelToolResult),
368}
369
370impl MessageContent {
371 pub fn to_str(&self) -> Option<&str> {
372 match self {
373 MessageContent::Text(text) => Some(text.as_str()),
374 MessageContent::Thinking { text, .. } => Some(text.as_str()),
375 MessageContent::RedactedThinking(_) => None,
376 MessageContent::ToolResult(tool_result) => tool_result.content.to_str(),
377 MessageContent::ToolUse(_) | MessageContent::Image(_) => None,
378 }
379 }
380
381 pub fn is_empty(&self) -> bool {
382 match self {
383 MessageContent::Text(text) => text.chars().all(|c| c.is_whitespace()),
384 MessageContent::Thinking { text, .. } => text.chars().all(|c| c.is_whitespace()),
385 MessageContent::ToolResult(tool_result) => tool_result.content.is_empty(),
386 MessageContent::RedactedThinking(_)
387 | MessageContent::ToolUse(_)
388 | MessageContent::Image(_) => false,
389 }
390 }
391}
392
393impl From<String> for MessageContent {
394 fn from(value: String) -> Self {
395 MessageContent::Text(value)
396 }
397}
398
399impl From<&str> for MessageContent {
400 fn from(value: &str) -> Self {
401 MessageContent::Text(value.to_string())
402 }
403}
404
405#[derive(Clone, Serialize, Deserialize, Debug, PartialEq, Hash)]
406pub struct LanguageModelRequestMessage {
407 pub role: Role,
408 pub content: Vec<MessageContent>,
409 pub cache: bool,
410 #[serde(default, skip_serializing_if = "Option::is_none")]
411 pub reasoning_details: Option<serde_json::Value>,
412}
413
414impl LanguageModelRequestMessage {
415 pub fn string_contents(&self) -> String {
416 let mut buffer = String::new();
417 for string in self.content.iter().filter_map(|content| content.to_str()) {
418 buffer.push_str(string);
419 }
420
421 buffer
422 }
423
424 pub fn contents_empty(&self) -> bool {
425 self.content.iter().all(|content| content.is_empty())
426 }
427}
428
429#[derive(Debug, PartialEq, Hash, Clone, Serialize, Deserialize)]
430pub struct LanguageModelRequestTool {
431 pub name: String,
432 pub description: String,
433 pub input_schema: serde_json::Value,
434}
435
436#[derive(Debug, PartialEq, Hash, Clone, Serialize, Deserialize)]
437pub enum LanguageModelToolChoice {
438 Auto,
439 Any,
440 None,
441}
442
443#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq)]
444pub struct LanguageModelRequest {
445 pub thread_id: Option<String>,
446 pub prompt_id: Option<String>,
447 pub intent: Option<CompletionIntent>,
448 pub messages: Vec<LanguageModelRequestMessage>,
449 pub tools: Vec<LanguageModelRequestTool>,
450 pub tool_choice: Option<LanguageModelToolChoice>,
451 pub stop: Vec<String>,
452 pub temperature: Option<f32>,
453 pub thinking_allowed: bool,
454 /// When true, this request bypasses the rate limiter. Used for nested requests
455 /// (like edit agent requests spawned from within a tool call) that are already
456 /// "part of" a rate-limited request to avoid deadlocks.
457 #[serde(default)]
458 pub bypass_rate_limit: bool,
459}
460
461#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
462pub struct LanguageModelResponseMessage {
463 pub role: Option<Role>,
464 pub content: Option<String>,
465}
466
467#[cfg(test)]
468mod tests {
469 use super::*;
470 use base64::Engine as _;
471 use gpui::TestAppContext;
472 use image::ImageDecoder as _;
473
474 fn base64_to_png_bytes(base64_png: &str) -> Vec<u8> {
475 base64::engine::general_purpose::STANDARD
476 .decode(base64_png.as_bytes())
477 .expect("base64 should decode")
478 }
479
480 fn png_dimensions(png_bytes: &[u8]) -> (u32, u32) {
481 let decoder =
482 image::codecs::png::PngDecoder::new(Cursor::new(png_bytes)).expect("png should decode");
483 decoder.dimensions()
484 }
485
486 fn make_noisy_png_bytes(width: u32, height: u32) -> Vec<u8> {
487 // Create an RGBA image with per-pixel variance to avoid PNG compressing too well.
488 let mut img = image::RgbaImage::new(width, height);
489 for y in 0..height {
490 for x in 0..width {
491 let r = ((x ^ y) & 0xFF) as u8;
492 let g = ((x.wrapping_mul(31) ^ y.wrapping_mul(17)) & 0xFF) as u8;
493 let b = ((x.wrapping_mul(131) ^ y.wrapping_mul(7)) & 0xFF) as u8;
494 img.put_pixel(x, y, image::Rgba([r, g, b, 0xFF]));
495 }
496 }
497
498 let mut out = Vec::new();
499 image::DynamicImage::ImageRgba8(img)
500 .write_with_encoder(PngEncoder::new(&mut out))
501 .expect("png encoding should succeed");
502 out
503 }
504
505 #[gpui::test]
506 async fn test_from_image_downscales_to_default_5mb_limit(cx: &mut TestAppContext) {
507 // Pick a size that reliably produces a PNG > 5MB when filled with noise.
508 // If this fails (image is too small), bump dimensions.
509 let original_png = make_noisy_png_bytes(4096, 4096);
510 assert!(
511 original_png.len() > DEFAULT_IMAGE_MAX_BYTES,
512 "precondition failed: noisy PNG must exceed DEFAULT_IMAGE_MAX_BYTES"
513 );
514
515 let image = gpui::Image::from_bytes(ImageFormat::Png, original_png);
516 let lm_image = cx
517 .update(|cx| LanguageModelImage::from_image(Arc::new(image), cx))
518 .await
519 .expect("image conversion should succeed");
520
521 let encoded_png = base64_to_png_bytes(lm_image.source.as_ref());
522 assert!(
523 encoded_png.len() <= DEFAULT_IMAGE_MAX_BYTES,
524 "expected encoded PNG <= DEFAULT_IMAGE_MAX_BYTES, got {} bytes",
525 encoded_png.len()
526 );
527
528 // Ensure we actually downscaled in pixels (not just re-encoded).
529 let (w, h) = png_dimensions(&encoded_png);
530 assert!(
531 w < 4096 || h < 4096,
532 "expected image to be downscaled in at least one dimension; got {w}x{h}"
533 );
534 }
535
536 #[test]
537 fn test_language_model_tool_result_content_deserialization() {
538 let json = r#""This is plain text""#;
539 let result: LanguageModelToolResultContent = serde_json::from_str(json).unwrap();
540 assert_eq!(
541 result,
542 LanguageModelToolResultContent::Text("This is plain text".into())
543 );
544
545 let json = r#"{"type": "text", "text": "This is wrapped text"}"#;
546 let result: LanguageModelToolResultContent = serde_json::from_str(json).unwrap();
547 assert_eq!(
548 result,
549 LanguageModelToolResultContent::Text("This is wrapped text".into())
550 );
551
552 let json = r#"{"Type": "TEXT", "TEXT": "Case insensitive"}"#;
553 let result: LanguageModelToolResultContent = serde_json::from_str(json).unwrap();
554 assert_eq!(
555 result,
556 LanguageModelToolResultContent::Text("Case insensitive".into())
557 );
558
559 let json = r#"{"Text": "Wrapped variant"}"#;
560 let result: LanguageModelToolResultContent = serde_json::from_str(json).unwrap();
561 assert_eq!(
562 result,
563 LanguageModelToolResultContent::Text("Wrapped variant".into())
564 );
565
566 let json = r#"{"text": "Lowercase wrapped"}"#;
567 let result: LanguageModelToolResultContent = serde_json::from_str(json).unwrap();
568 assert_eq!(
569 result,
570 LanguageModelToolResultContent::Text("Lowercase wrapped".into())
571 );
572
573 // Test image deserialization
574 let json = r#"{
575 "source": "base64encodedimagedata",
576 "size": {
577 "width": 100,
578 "height": 200
579 }
580 }"#;
581 let result: LanguageModelToolResultContent = serde_json::from_str(json).unwrap();
582 match result {
583 LanguageModelToolResultContent::Image(image) => {
584 assert_eq!(image.source.as_ref(), "base64encodedimagedata");
585 let size = image.size.expect("size");
586 assert_eq!(size.width.0, 100);
587 assert_eq!(size.height.0, 200);
588 }
589 _ => panic!("Expected Image variant"),
590 }
591
592 // Test wrapped Image variant
593 let json = r#"{
594 "Image": {
595 "source": "wrappedimagedata",
596 "size": {
597 "width": 50,
598 "height": 75
599 }
600 }
601 }"#;
602 let result: LanguageModelToolResultContent = serde_json::from_str(json).unwrap();
603 match result {
604 LanguageModelToolResultContent::Image(image) => {
605 assert_eq!(image.source.as_ref(), "wrappedimagedata");
606 let size = image.size.expect("size");
607 assert_eq!(size.width.0, 50);
608 assert_eq!(size.height.0, 75);
609 }
610 _ => panic!("Expected Image variant"),
611 }
612
613 // Test wrapped Image variant with case insensitive
614 let json = r#"{
615 "image": {
616 "Source": "caseinsensitive",
617 "SIZE": {
618 "width": 30,
619 "height": 40
620 }
621 }
622 }"#;
623 let result: LanguageModelToolResultContent = serde_json::from_str(json).unwrap();
624 match result {
625 LanguageModelToolResultContent::Image(image) => {
626 assert_eq!(image.source.as_ref(), "caseinsensitive");
627 let size = image.size.expect("size");
628 assert_eq!(size.width.0, 30);
629 assert_eq!(size.height.0, 40);
630 }
631 _ => panic!("Expected Image variant"),
632 }
633
634 // Test that wrapped text with wrong type fails
635 let json = r#"{"type": "blahblah", "text": "This should fail"}"#;
636 let result: Result<LanguageModelToolResultContent, _> = serde_json::from_str(json);
637 assert!(result.is_err());
638
639 // Test that malformed JSON fails
640 let json = r#"{"invalid": "structure"}"#;
641 let result: Result<LanguageModelToolResultContent, _> = serde_json::from_str(json);
642 assert!(result.is_err());
643
644 // Test edge cases
645 let json = r#""""#; // Empty string
646 let result: LanguageModelToolResultContent = serde_json::from_str(json).unwrap();
647 assert_eq!(result, LanguageModelToolResultContent::Text("".into()));
648
649 // Test with extra fields in wrapped text (should be ignored)
650 let json = r#"{"type": "text", "text": "Hello", "extra": "field"}"#;
651 let result: LanguageModelToolResultContent = serde_json::from_str(json).unwrap();
652 assert_eq!(result, LanguageModelToolResultContent::Text("Hello".into()));
653
654 // Test direct image with case-insensitive fields
655 let json = r#"{
656 "SOURCE": "directimage",
657 "Size": {
658 "width": 200,
659 "height": 300
660 }
661 }"#;
662 let result: LanguageModelToolResultContent = serde_json::from_str(json).unwrap();
663 match result {
664 LanguageModelToolResultContent::Image(image) => {
665 assert_eq!(image.source.as_ref(), "directimage");
666 let size = image.size.expect("size");
667 assert_eq!(size.width.0, 200);
668 assert_eq!(size.height.0, 300);
669 }
670 _ => panic!("Expected Image variant"),
671 }
672
673 // Test that multiple fields prevent wrapped variant interpretation
674 let json = r#"{"Text": "not wrapped", "extra": "field"}"#;
675 let result: Result<LanguageModelToolResultContent, _> = serde_json::from_str(json);
676 assert!(result.is_err());
677
678 // Test wrapped text with uppercase TEXT variant
679 let json = r#"{"TEXT": "Uppercase variant"}"#;
680 let result: LanguageModelToolResultContent = serde_json::from_str(json).unwrap();
681 assert_eq!(
682 result,
683 LanguageModelToolResultContent::Text("Uppercase variant".into())
684 );
685
686 // Test that numbers and other JSON values fail gracefully
687 let json = r#"123"#;
688 let result: Result<LanguageModelToolResultContent, _> = serde_json::from_str(json);
689 assert!(result.is_err());
690
691 let json = r#"null"#;
692 let result: Result<LanguageModelToolResultContent, _> = serde_json::from_str(json);
693 assert!(result.is_err());
694
695 let json = r#"[1, 2, 3]"#;
696 let result: Result<LanguageModelToolResultContent, _> = serde_json::from_str(json);
697 assert!(result.is_err());
698 }
699}