language_models: Pass up `mode` from the `LanguageModelRequest` (#29552)

Marshall Bowers created

This PR makes it so we pass up the `mode` from the
`LanguageModelRequest` when interacting with the Zed provider instead of
passing a hard-coded value.

Release Notes:

- N/A

Change summary

crates/language_models/src/provider/cloud.rs | 9 +++++----
1 file changed, 5 insertions(+), 4 deletions(-)

Detailed changes

crates/language_models/src/provider/cloud.rs 🔗

@@ -35,7 +35,7 @@ use strum::IntoEnumIterator;
 use thiserror::Error;
 use ui::{TintColor, prelude::*};
 use zed_llm_client::{
-    CURRENT_PLAN_HEADER_NAME, CompletionBody, CompletionMode, CountTokensBody, CountTokensResponse,
+    CURRENT_PLAN_HEADER_NAME, CompletionBody, CountTokensBody, CountTokensResponse,
     EXPIRED_LLM_TOKEN_HEADER_NAME, MAX_LLM_MONTHLY_SPEND_REACHED_HEADER_NAME,
     MODEL_REQUESTS_RESOURCE_HEADER_VALUE, SUBSCRIPTION_LIMIT_RESOURCE_HEADER_NAME,
 };
@@ -769,6 +769,7 @@ impl LanguageModel for CloudLanguageModel {
     > {
         let thread_id = request.thread_id.clone();
         let prompt_id = request.prompt_id.clone();
+        let mode = request.mode;
         match &self.model {
             CloudModel::Anthropic(model) => {
                 let request = into_anthropic(
@@ -787,7 +788,7 @@ impl LanguageModel for CloudLanguageModel {
                         CompletionBody {
                             thread_id,
                             prompt_id,
-                            mode: Some(CompletionMode::Max),
+                            mode,
                             provider: zed_llm_client::LanguageModelProvider::Anthropic,
                             model: request.model.clone(),
                             provider_request: serde_json::to_value(&request)?,
@@ -834,7 +835,7 @@ impl LanguageModel for CloudLanguageModel {
                         CompletionBody {
                             thread_id,
                             prompt_id,
-                            mode: Some(CompletionMode::Max),
+                            mode,
                             provider: zed_llm_client::LanguageModelProvider::OpenAi,
                             model: request.model.clone(),
                             provider_request: serde_json::to_value(&request)?,
@@ -865,7 +866,7 @@ impl LanguageModel for CloudLanguageModel {
                         CompletionBody {
                             thread_id,
                             prompt_id,
-                            mode: Some(CompletionMode::Max),
+                            mode,
                             provider: zed_llm_client::LanguageModelProvider::Google,
                             model: request.model.clone(),
                             provider_request: serde_json::to_value(&request)?,