language_model: Remove `CloudModel` enum (#31322)

Marshall Bowers created

This PR removes the `CloudModel` enum, as it is no longer needed after
#31316.

Release Notes:

- N/A

Change summary

Cargo.lock                                          |  3 
crates/assistant_settings/src/assistant_settings.rs |  6 
crates/language_model/Cargo.toml                    |  3 
crates/language_model/src/model/cloud_model.rs      | 58 ---------------
4 files changed, 3 insertions(+), 67 deletions(-)

Detailed changes

Cargo.lock 🔗

@@ -8803,19 +8803,16 @@ dependencies = [
  "client",
  "collections",
  "futures 0.3.31",
- "google_ai",
  "gpui",
  "http_client",
  "icons",
  "image",
- "open_ai",
  "parking_lot",
  "proto",
  "schemars",
  "serde",
  "serde_json",
  "smol",
- "strum 0.27.1",
  "telemetry_events",
  "thiserror 2.0.12",
  "util",

crates/assistant_settings/src/assistant_settings.rs 🔗

@@ -8,7 +8,7 @@ use anyhow::{Result, bail};
 use collections::IndexMap;
 use deepseek::Model as DeepseekModel;
 use gpui::{App, Pixels, SharedString};
-use language_model::{CloudModel, LanguageModel};
+use language_model::LanguageModel;
 use lmstudio::Model as LmStudioModel;
 use mistral::Model as MistralModel;
 use ollama::Model as OllamaModel;
@@ -45,7 +45,7 @@ pub enum NotifyWhenAgentWaiting {
 #[schemars(deny_unknown_fields)]
 pub enum AssistantProviderContentV1 {
     #[serde(rename = "zed.dev")]
-    ZedDotDev { default_model: Option<CloudModel> },
+    ZedDotDev { default_model: Option<String> },
     #[serde(rename = "openai")]
     OpenAi {
         default_model: Option<OpenAiModel>,
@@ -222,7 +222,7 @@ impl AssistantSettingsContent {
                             AssistantProviderContentV1::ZedDotDev { default_model } => {
                                 default_model.map(|model| LanguageModelSelection {
                                     provider: "zed.dev".into(),
-                                    model: model.id().to_string(),
+                                    model,
                                 })
                             }
                             AssistantProviderContentV1::OpenAi { default_model, .. } => {

crates/language_model/Cargo.toml 🔗

@@ -22,19 +22,16 @@ base64.workspace = true
 client.workspace = true
 collections.workspace = true
 futures.workspace = true
-google_ai = { workspace = true, features = ["schemars"] }
 gpui.workspace = true
 http_client.workspace = true
 icons.workspace = true
 image.workspace = true
-open_ai = { workspace = true, features = ["schemars"] }
 parking_lot.workspace = true
 proto.workspace = true
 schemars.workspace = true
 serde.workspace = true
 serde_json.workspace = true
 smol.workspace = true
-strum.workspace = true
 telemetry_events.workspace = true
 thiserror.workspace = true
 util.workspace = true

crates/language_model/src/model/cloud_model.rs 🔗

@@ -7,67 +7,9 @@ use gpui::{
     App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Global, ReadGlobal as _,
 };
 use proto::{Plan, TypedEnvelope};
-use schemars::JsonSchema;
-use serde::{Deserialize, Serialize};
 use smol::lock::{RwLock, RwLockUpgradableReadGuard, RwLockWriteGuard};
-use strum::EnumIter;
 use thiserror::Error;
 
-use crate::LanguageModelToolSchemaFormat;
-
-#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
-#[serde(tag = "provider", rename_all = "lowercase")]
-pub enum CloudModel {
-    Anthropic(anthropic::Model),
-    OpenAi(open_ai::Model),
-    Google(google_ai::Model),
-}
-
-#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema, EnumIter)]
-pub enum ZedModel {
-    #[serde(rename = "Qwen/Qwen2-7B-Instruct")]
-    Qwen2_7bInstruct,
-}
-
-impl Default for CloudModel {
-    fn default() -> Self {
-        Self::Anthropic(anthropic::Model::default())
-    }
-}
-
-impl CloudModel {
-    pub fn id(&self) -> &str {
-        match self {
-            Self::Anthropic(model) => model.id(),
-            Self::OpenAi(model) => model.id(),
-            Self::Google(model) => model.id(),
-        }
-    }
-
-    pub fn display_name(&self) -> &str {
-        match self {
-            Self::Anthropic(model) => model.display_name(),
-            Self::OpenAi(model) => model.display_name(),
-            Self::Google(model) => model.display_name(),
-        }
-    }
-
-    pub fn max_token_count(&self) -> usize {
-        match self {
-            Self::Anthropic(model) => model.max_token_count(),
-            Self::OpenAi(model) => model.max_token_count(),
-            Self::Google(model) => model.max_token_count(),
-        }
-    }
-
-    pub fn tool_input_format(&self) -> LanguageModelToolSchemaFormat {
-        match self {
-            Self::Anthropic(_) | Self::OpenAi(_) => LanguageModelToolSchemaFormat::JsonSchema,
-            Self::Google(_) => LanguageModelToolSchemaFormat::JsonSchemaSubset,
-        }
-    }
-}
-
 #[derive(Error, Debug)]
 pub struct PaymentRequiredError;