Decouple cloud provider from Model in Zed (#40281)

David Kleingeld and Marshall Bowers created

Release Notes:

- N/A

Co-authored-by: Marshall Bowers <git@maxdeviant.com>

Change summary

crates/cloud_llm_client/src/cloud_llm_client.rs |  3 ++
crates/language_models/src/provider/cloud.rs    | 20 +++++-------------
2 files changed, 9 insertions(+), 14 deletions(-)

Detailed changes

crates/cloud_llm_client/src/cloud_llm_client.rs 🔗

@@ -322,6 +322,9 @@ pub struct LanguageModel {
     pub supports_images: bool,
     pub supports_thinking: bool,
     pub supports_max_mode: bool,
+    // only used by OpenAI and xAI
+    #[serde(default)]
+    pub supports_parallel_tool_calls: bool,
 }
 
 #[derive(Debug, Serialize, Deserialize)]

crates/language_models/src/provider/cloud.rs 🔗

@@ -810,15 +810,11 @@ impl LanguageModel for CloudLanguageModel {
             }
             cloud_llm_client::LanguageModelProvider::OpenAi => {
                 let client = self.client.clone();
-                let model = match open_ai::Model::from_id(&self.model.id.0) {
-                    Ok(model) => model,
-                    Err(err) => return async move { Err(anyhow!(err).into()) }.boxed(),
-                };
                 let request = into_open_ai(
                     request,
-                    model.id(),
-                    model.supports_parallel_tool_calls(),
-                    model.supports_prompt_cache_key(),
+                    &self.model.id.0,
+                    self.model.supports_parallel_tool_calls,
+                    true,
                     None,
                     None,
                 );
@@ -860,15 +856,11 @@ impl LanguageModel for CloudLanguageModel {
             }
             cloud_llm_client::LanguageModelProvider::XAi => {
                 let client = self.client.clone();
-                let model = match x_ai::Model::from_id(&self.model.id.0) {
-                    Ok(model) => model,
-                    Err(err) => return async move { Err(anyhow!(err).into()) }.boxed(),
-                };
                 let request = into_open_ai(
                     request,
-                    model.id(),
-                    model.supports_parallel_tool_calls(),
-                    model.supports_prompt_cache_key(),
+                    &self.model.id.0,
+                    self.model.supports_parallel_tool_calls,
+                    false,
                     None,
                     None,
                 );