language_models: Fix `grok-code-fast-1` support for Copilot (#37116)

Umesh Yadav created

This PR fixes a deserialization issue in GitHub Copilot Chat that was
causing warnings when encountering xAI models from the GitHub Copilot
API and skipping the Grok model from model selector.

Release Notes:

- Fixed support for xAI models that are now available through GitHub
Copilot Chat.

Change summary

crates/copilot/src/copilot_chat.rs                  |  2 ++
crates/language_models/src/provider/copilot_chat.rs | 10 +++++++++-
2 files changed, 11 insertions(+), 1 deletion(-)

Detailed changes

crates/copilot/src/copilot_chat.rs 🔗

@@ -164,6 +164,8 @@ pub enum ModelVendor {
     OpenAI,
     Google,
     Anthropic,
+    #[serde(rename = "xAI")]
+    XAI,
 }
 
 #[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]

crates/language_models/src/provider/copilot_chat.rs 🔗

@@ -32,6 +32,8 @@ use std::time::Duration;
 use ui::prelude::*;
 use util::debug_panic;
 
+use crate::provider::x_ai::count_xai_tokens;
+
 use super::anthropic::count_anthropic_tokens;
 use super::google::count_google_tokens;
 use super::open_ai::count_open_ai_tokens;
@@ -228,7 +230,9 @@ impl LanguageModel for CopilotChatLanguageModel {
             ModelVendor::OpenAI | ModelVendor::Anthropic => {
                 LanguageModelToolSchemaFormat::JsonSchema
             }
-            ModelVendor::Google => LanguageModelToolSchemaFormat::JsonSchemaSubset,
+            ModelVendor::Google | ModelVendor::XAI => {
+                LanguageModelToolSchemaFormat::JsonSchemaSubset
+            }
         }
     }
 
@@ -256,6 +260,10 @@ impl LanguageModel for CopilotChatLanguageModel {
         match self.model.vendor() {
             ModelVendor::Anthropic => count_anthropic_tokens(request, cx),
             ModelVendor::Google => count_google_tokens(request, cx),
+            ModelVendor::XAI => {
+                let model = x_ai::Model::from_id(self.model.id()).unwrap_or_default();
+                count_xai_tokens(request, model, cx)
+            }
             ModelVendor::OpenAI => {
                 let model = open_ai::Model::from_id(self.model.id()).unwrap_or_default();
                 count_open_ai_tokens(request, model, cx)