anthropic: Preserve custom model thinking mode after thinking-toggle refactor (#52975)

Enoch and copilot-swe-agent[bot] created

PR #51946 broke `Model::Custom` thinking behavior: `mode()`,
`supports_thinking()`, and `supports_adaptive_thinking()` all inferred
capabilities from hardcoded built-in model lists, so any `Custom`
variant always fell back to `Default` regardless of its configured
`mode` field.

### Fixes

- **`Model::mode()`** — `Custom` now short-circuits to `mode.clone()`
before the built-in inference logic
- **`Model::supports_thinking()`** — `Custom` returns `true` when `mode`
is `Thinking { .. }` or `AdaptiveThinking`
- **`Model::supports_adaptive_thinking()`** — `Custom` returns `true`
when `mode` is `AdaptiveThinking`

Built-in model behavior is unchanged.

### Tests

Three regression tests added covering the three `Custom` mode cases:
explicit `Thinking`, `AdaptiveThinking`, and `Default` (which must
disable both flags).

Self-Review Checklist:

- [x] I've reviewed my own diff for quality, security, and reliability
- [ ] Unsafe blocks (if any) have justifying comments
- [x] The content is consistent with the [UI/UX
checklist](https://github.com/zed-industries/zed/blob/main/CONTRIBUTING.md#uiux-checklist)
- [x] Tests cover the new/changed behavior
- [x] Performance impact has been considered and is acceptable

Release Notes:

- Fixed custom Anthropic models losing their configured
thinking/adaptive-thinking mode after the thinking-toggle refactor
(#51946)

---------

Co-authored-by: copilot-swe-agent[bot] <198982749+Copilot@users.noreply.github.com>

Change summary

crates/anthropic/src/anthropic.rs | 107 +++++++++++++++++++++++++++-----
1 file changed, 88 insertions(+), 19 deletions(-)

Detailed changes

crates/anthropic/src/anthropic.rs 🔗

@@ -288,33 +288,43 @@ impl Model {
     }
 
     pub fn mode(&self) -> AnthropicModelMode {
-        if self.supports_adaptive_thinking() {
-            AnthropicModelMode::AdaptiveThinking
-        } else if self.supports_thinking() {
-            AnthropicModelMode::Thinking {
+        match self {
+            Self::Custom { mode, .. } => mode.clone(),
+            _ if self.supports_adaptive_thinking() => AnthropicModelMode::AdaptiveThinking,
+            _ if self.supports_thinking() => AnthropicModelMode::Thinking {
                 budget_tokens: Some(4_096),
-            }
-        } else {
-            AnthropicModelMode::Default
+            },
+            _ => AnthropicModelMode::Default,
         }
     }
 
     pub fn supports_thinking(&self) -> bool {
-        matches!(
-            self,
-            Self::ClaudeOpus4
-                | Self::ClaudeOpus4_1
-                | Self::ClaudeOpus4_5
-                | Self::ClaudeOpus4_6
-                | Self::ClaudeSonnet4
-                | Self::ClaudeSonnet4_5
-                | Self::ClaudeSonnet4_6
-                | Self::ClaudeHaiku4_5
-        )
+        match self {
+            Self::Custom { mode, .. } => {
+                matches!(
+                    mode,
+                    AnthropicModelMode::Thinking { .. } | AnthropicModelMode::AdaptiveThinking
+                )
+            }
+            _ => matches!(
+                self,
+                Self::ClaudeOpus4
+                    | Self::ClaudeOpus4_1
+                    | Self::ClaudeOpus4_5
+                    | Self::ClaudeOpus4_6
+                    | Self::ClaudeSonnet4
+                    | Self::ClaudeSonnet4_5
+                    | Self::ClaudeSonnet4_6
+                    | Self::ClaudeHaiku4_5
+            ),
+        }
     }
 
     pub fn supports_adaptive_thinking(&self) -> bool {
-        matches!(self, Self::ClaudeOpus4_6 | Self::ClaudeSonnet4_6)
+        match self {
+            Self::Custom { mode, .. } => matches!(mode, AnthropicModelMode::AdaptiveThinking),
+            _ => matches!(self, Self::ClaudeOpus4_6 | Self::ClaudeSonnet4_6),
+        }
     }
 
     pub fn beta_headers(&self) -> Option<String> {
@@ -1110,6 +1120,65 @@ impl From<ApiError> for language_model_core::LanguageModelCompletionError {
     }
 }
 
+#[test]
+fn custom_mode_thinking_is_preserved() {
+    let model = Model::Custom {
+        name: "my-custom-model".to_string(),
+        max_tokens: 8192,
+        display_name: None,
+        tool_override: None,
+        cache_configuration: None,
+        max_output_tokens: None,
+        default_temperature: None,
+        extra_beta_headers: vec![],
+        mode: AnthropicModelMode::Thinking {
+            budget_tokens: Some(2048),
+        },
+    };
+    assert_eq!(
+        model.mode(),
+        AnthropicModelMode::Thinking {
+            budget_tokens: Some(2048)
+        }
+    );
+    assert!(model.supports_thinking());
+}
+
+#[test]
+fn custom_mode_adaptive_is_preserved() {
+    let model = Model::Custom {
+        name: "my-custom-model".to_string(),
+        max_tokens: 8192,
+        display_name: None,
+        tool_override: None,
+        cache_configuration: None,
+        max_output_tokens: None,
+        default_temperature: None,
+        extra_beta_headers: vec![],
+        mode: AnthropicModelMode::AdaptiveThinking,
+    };
+    assert_eq!(model.mode(), AnthropicModelMode::AdaptiveThinking);
+    assert!(model.supports_adaptive_thinking());
+    assert!(model.supports_thinking());
+}
+
+#[test]
+fn custom_mode_default_disables_thinking() {
+    let model = Model::Custom {
+        name: "my-custom-model".to_string(),
+        max_tokens: 8192,
+        display_name: None,
+        tool_override: None,
+        cache_configuration: None,
+        max_output_tokens: None,
+        default_temperature: None,
+        extra_beta_headers: vec![],
+        mode: AnthropicModelMode::Default,
+    };
+    assert!(!model.supports_thinking());
+    assert!(!model.supports_adaptive_thinking());
+}
+
 #[test]
 fn test_match_window_exceeded() {
     let error = ApiError {