anthropic: Add support for Claude 4 (#31203)

Marshall Bowers , Antonio Scandurra , and Richard Feldman created

This PR adds support for [Claude
4](https://www.anthropic.com/news/claude-4).

Release Notes:

- Added support for Claude Opus 4 and Claude Sonnet 4.

---------

Co-authored-by: Antonio Scandurra <me@as-cii.com>
Co-authored-by: Richard Feldman <oss@rtfeldman.com>

Change summary

crates/anthropic/src/anthropic.rs              | 62 ++++++++++++++++++-
crates/language_model/src/language_model.rs    | 15 ----
crates/language_model/src/model/cloud_model.rs | 55 -----------------
crates/language_models/src/provider/cloud.rs   | 16 +++-
4 files changed, 68 insertions(+), 80 deletions(-)

Detailed changes

crates/anthropic/src/anthropic.rs 🔗

@@ -42,6 +42,20 @@ pub enum Model {
         alias = "claude-3-7-sonnet-thinking-latest"
     )]
     Claude3_7SonnetThinking,
+    #[serde(rename = "claude-opus-4", alias = "claude-opus-4-latest")]
+    ClaudeOpus4,
+    #[serde(
+        rename = "claude-opus-4-thinking",
+        alias = "claude-opus-4-thinking-latest"
+    )]
+    ClaudeOpus4Thinking,
+    #[serde(rename = "claude-sonnet-4", alias = "claude-sonnet-4-latest")]
+    ClaudeSonnet4,
+    #[serde(
+        rename = "claude-sonnet-4-thinking",
+        alias = "claude-sonnet-4-thinking-latest"
+    )]
+    ClaudeSonnet4Thinking,
     #[serde(rename = "claude-3-5-haiku", alias = "claude-3-5-haiku-latest")]
     Claude3_5Haiku,
     #[serde(rename = "claude-3-opus", alias = "claude-3-opus-latest")]
@@ -89,6 +103,14 @@ impl Model {
             Ok(Self::Claude3Sonnet)
         } else if id.starts_with("claude-3-haiku") {
             Ok(Self::Claude3Haiku)
+        } else if id.starts_with("claude-opus-4-thinking") {
+            Ok(Self::ClaudeOpus4Thinking)
+        } else if id.starts_with("claude-opus-4") {
+            Ok(Self::ClaudeOpus4)
+        } else if id.starts_with("claude-sonnet-4-thinking") {
+            Ok(Self::ClaudeSonnet4Thinking)
+        } else if id.starts_with("claude-sonnet-4") {
+            Ok(Self::ClaudeSonnet4)
         } else {
             anyhow::bail!("invalid model id {id}");
         }
@@ -96,6 +118,10 @@ impl Model {
 
     pub fn id(&self) -> &str {
         match self {
+            Model::ClaudeOpus4 => "claude-opus-4-latest",
+            Model::ClaudeOpus4Thinking => "claude-opus-4-thinking-latest",
+            Model::ClaudeSonnet4 => "claude-sonnet-4-latest",
+            Model::ClaudeSonnet4Thinking => "claude-sonnet-4-thinking-latest",
             Model::Claude3_5Sonnet => "claude-3-5-sonnet-latest",
             Model::Claude3_7Sonnet => "claude-3-7-sonnet-latest",
             Model::Claude3_7SonnetThinking => "claude-3-7-sonnet-thinking-latest",
@@ -110,6 +136,8 @@ impl Model {
     /// The id of the model that should be used for making API requests
     pub fn request_id(&self) -> &str {
         match self {
+            Model::ClaudeOpus4 | Model::ClaudeOpus4Thinking => "claude-opus-4-20250514",
+            Model::ClaudeSonnet4 | Model::ClaudeSonnet4Thinking => "claude-sonnet-4-20250514",
             Model::Claude3_5Sonnet => "claude-3-5-sonnet-latest",
             Model::Claude3_7Sonnet | Model::Claude3_7SonnetThinking => "claude-3-7-sonnet-latest",
             Model::Claude3_5Haiku => "claude-3-5-haiku-latest",
@@ -122,6 +150,10 @@ impl Model {
 
     pub fn display_name(&self) -> &str {
         match self {
+            Model::ClaudeOpus4 => "Claude 4 Opus",
+            Model::ClaudeOpus4Thinking => "Claude 4 Opus Thinking",
+            Model::ClaudeSonnet4 => "Claude 4 Sonnet",
+            Model::ClaudeSonnet4Thinking => "Claude 4 Sonnet Thinking",
             Self::Claude3_7Sonnet => "Claude 3.7 Sonnet",
             Self::Claude3_5Sonnet => "Claude 3.5 Sonnet",
             Self::Claude3_7SonnetThinking => "Claude 3.7 Sonnet Thinking",
@@ -137,7 +169,11 @@ impl Model {
 
     pub fn cache_configuration(&self) -> Option<AnthropicModelCacheConfiguration> {
         match self {
-            Self::Claude3_5Sonnet
+            Self::ClaudeOpus4
+            | Self::ClaudeOpus4Thinking
+            | Self::ClaudeSonnet4
+            | Self::ClaudeSonnet4Thinking
+            | Self::Claude3_5Sonnet
             | Self::Claude3_5Haiku
             | Self::Claude3_7Sonnet
             | Self::Claude3_7SonnetThinking
@@ -156,7 +192,11 @@ impl Model {
 
     pub fn max_token_count(&self) -> usize {
         match self {
-            Self::Claude3_5Sonnet
+            Self::ClaudeOpus4
+            | Self::ClaudeOpus4Thinking
+            | Self::ClaudeSonnet4
+            | Self::ClaudeSonnet4Thinking
+            | Self::Claude3_5Sonnet
             | Self::Claude3_5Haiku
             | Self::Claude3_7Sonnet
             | Self::Claude3_7SonnetThinking
@@ -173,7 +213,11 @@ impl Model {
             Self::Claude3_5Sonnet
             | Self::Claude3_7Sonnet
             | Self::Claude3_7SonnetThinking
-            | Self::Claude3_5Haiku => 8_192,
+            | Self::Claude3_5Haiku
+            | Self::ClaudeOpus4
+            | Self::ClaudeOpus4Thinking
+            | Self::ClaudeSonnet4
+            | Self::ClaudeSonnet4Thinking => 8_192,
             Self::Custom {
                 max_output_tokens, ..
             } => max_output_tokens.unwrap_or(4_096),
@@ -182,7 +226,11 @@ impl Model {
 
     pub fn default_temperature(&self) -> f32 {
         match self {
-            Self::Claude3_5Sonnet
+            Self::ClaudeOpus4
+            | Self::ClaudeOpus4Thinking
+            | Self::ClaudeSonnet4
+            | Self::ClaudeSonnet4Thinking
+            | Self::Claude3_5Sonnet
             | Self::Claude3_7Sonnet
             | Self::Claude3_7SonnetThinking
             | Self::Claude3_5Haiku
@@ -201,10 +249,14 @@ impl Model {
             Self::Claude3_5Sonnet
             | Self::Claude3_7Sonnet
             | Self::Claude3_5Haiku
+            | Self::ClaudeOpus4
+            | Self::ClaudeSonnet4
             | Self::Claude3Opus
             | Self::Claude3Sonnet
             | Self::Claude3Haiku => AnthropicModelMode::Default,
-            Self::Claude3_7SonnetThinking => AnthropicModelMode::Thinking {
+            Self::Claude3_7SonnetThinking
+            | Self::ClaudeOpus4Thinking
+            | Self::ClaudeSonnet4Thinking => AnthropicModelMode::Thinking {
                 budget_tokens: Some(4_096),
             },
             Self::Custom { mode, .. } => mode.clone(),

crates/language_model/src/language_model.rs 🔗

@@ -16,7 +16,6 @@ use gpui::{AnyElement, AnyView, App, AsyncApp, SharedString, Task, Window};
 use http_client::http::{HeaderMap, HeaderValue};
 use icons::IconName;
 use parking_lot::Mutex;
-use proto::Plan;
 use schemars::JsonSchema;
 use serde::{Deserialize, Serialize, de::DeserializeOwned};
 use std::fmt;
@@ -48,15 +47,6 @@ pub fn init_settings(cx: &mut App) {
     registry::init(cx);
 }
 
-/// The availability of a [`LanguageModel`].
-#[derive(Debug, PartialEq, Eq, Clone, Copy)]
-pub enum LanguageModelAvailability {
-    /// The language model is available to the general public.
-    Public,
-    /// The language model is available to users on the indicated plan.
-    RequiresPlan(Plan),
-}
-
 /// Configuration for caching language model messages.
 #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
 pub struct LanguageModelCacheConfiguration {
@@ -242,11 +232,6 @@ pub trait LanguageModel: Send + Sync {
         None
     }
 
-    /// Returns the availability of this language model.
-    fn availability(&self) -> LanguageModelAvailability {
-        LanguageModelAvailability::Public
-    }
-
     /// Whether this model supports images
     fn supports_images(&self) -> bool;
 

crates/language_model/src/model/cloud_model.rs 🔗

@@ -13,7 +13,7 @@ use smol::lock::{RwLock, RwLockUpgradableReadGuard, RwLockWriteGuard};
 use strum::EnumIter;
 use thiserror::Error;
 
-use crate::{LanguageModelAvailability, LanguageModelToolSchemaFormat};
+use crate::LanguageModelToolSchemaFormat;
 
 #[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
 #[serde(tag = "provider", rename_all = "lowercase")]
@@ -60,59 +60,6 @@ impl CloudModel {
         }
     }
 
-    /// Returns the availability of this model.
-    pub fn availability(&self) -> LanguageModelAvailability {
-        match self {
-            Self::Anthropic(model) => match model {
-                anthropic::Model::Claude3_5Sonnet
-                | anthropic::Model::Claude3_7Sonnet
-                | anthropic::Model::Claude3_7SonnetThinking => {
-                    LanguageModelAvailability::RequiresPlan(Plan::Free)
-                }
-                anthropic::Model::Claude3Opus
-                | anthropic::Model::Claude3Sonnet
-                | anthropic::Model::Claude3Haiku
-                | anthropic::Model::Claude3_5Haiku
-                | anthropic::Model::Custom { .. } => {
-                    LanguageModelAvailability::RequiresPlan(Plan::ZedPro)
-                }
-            },
-            Self::OpenAi(model) => match model {
-                open_ai::Model::ThreePointFiveTurbo
-                | open_ai::Model::Four
-                | open_ai::Model::FourTurbo
-                | open_ai::Model::FourOmni
-                | open_ai::Model::FourOmniMini
-                | open_ai::Model::FourPointOne
-                | open_ai::Model::FourPointOneMini
-                | open_ai::Model::FourPointOneNano
-                | open_ai::Model::O1Mini
-                | open_ai::Model::O1Preview
-                | open_ai::Model::O1
-                | open_ai::Model::O3Mini
-                | open_ai::Model::O3
-                | open_ai::Model::O4Mini
-                | open_ai::Model::Custom { .. } => {
-                    LanguageModelAvailability::RequiresPlan(Plan::ZedPro)
-                }
-            },
-            Self::Google(model) => match model {
-                google_ai::Model::Gemini15Pro
-                | google_ai::Model::Gemini15Flash
-                | google_ai::Model::Gemini20Pro
-                | google_ai::Model::Gemini20Flash
-                | google_ai::Model::Gemini20FlashThinking
-                | google_ai::Model::Gemini20FlashLite
-                | google_ai::Model::Gemini25ProExp0325
-                | google_ai::Model::Gemini25ProPreview0325
-                | google_ai::Model::Gemini25FlashPreview0417
-                | google_ai::Model::Custom { .. } => {
-                    LanguageModelAvailability::RequiresPlan(Plan::ZedPro)
-                }
-            },
-        }
-    }
-
     pub fn tool_input_format(&self) -> LanguageModelToolSchemaFormat {
         match self {
             Self::Anthropic(_) | Self::OpenAi(_) => LanguageModelToolSchemaFormat::JsonSchema,

crates/language_models/src/provider/cloud.rs 🔗

@@ -19,8 +19,8 @@ use language_model::{
     ZED_CLOUD_PROVIDER_ID,
 };
 use language_model::{
-    LanguageModelAvailability, LanguageModelCompletionEvent, LanguageModelProvider, LlmApiToken,
-    PaymentRequiredError, RefreshLlmTokenListener,
+    LanguageModelCompletionEvent, LanguageModelProvider, LlmApiToken, PaymentRequiredError,
+    RefreshLlmTokenListener,
 };
 use proto::Plan;
 use release_channel::AppVersion;
@@ -331,6 +331,14 @@ impl LanguageModelProvider for CloudLanguageModelProvider {
                 anthropic::Model::Claude3_7SonnetThinking.id().to_string(),
                 CloudModel::Anthropic(anthropic::Model::Claude3_7SonnetThinking),
             );
+            models.insert(
+                anthropic::Model::ClaudeSonnet4.id().to_string(),
+                CloudModel::Anthropic(anthropic::Model::ClaudeSonnet4),
+            );
+            models.insert(
+                anthropic::Model::ClaudeSonnet4Thinking.id().to_string(),
+                CloudModel::Anthropic(anthropic::Model::ClaudeSonnet4Thinking),
+            );
         }
 
         let llm_closed_beta_models = if cx.has_flag::<LlmClosedBetaFeatureFlag>() {
@@ -699,10 +707,6 @@ impl LanguageModel for CloudLanguageModel {
         format!("zed.dev/{}", self.model.id())
     }
 
-    fn availability(&self) -> LanguageModelAvailability {
-        self.model.availability()
-    }
-
     fn tool_input_format(&self) -> LanguageModelToolSchemaFormat {
         self.model.tool_input_format()
     }