copilot_chat.rs

  1use std::path::PathBuf;
  2use std::sync::Arc;
  3use std::sync::OnceLock;
  4
  5use anyhow::Context as _;
  6use anyhow::{Result, anyhow};
  7use chrono::DateTime;
  8use collections::HashSet;
  9use fs::Fs;
 10use futures::{AsyncBufReadExt, AsyncReadExt, StreamExt, io::BufReader, stream::BoxStream};
 11use gpui::WeakEntity;
 12use gpui::{App, AsyncApp, Global, prelude::*};
 13use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest};
 14use itertools::Itertools;
 15use paths::home_dir;
 16use serde::{Deserialize, Serialize};
 17use settings::watch_config_dir;
 18
 19pub const COPILOT_OAUTH_ENV_VAR: &str = "GH_COPILOT_TOKEN";
 20
 21#[derive(Default, Clone, Debug, PartialEq)]
 22pub struct CopilotChatSettings {
 23    pub api_url: Arc<str>,
 24    pub auth_url: Arc<str>,
 25    pub models_url: Arc<str>,
 26}
 27
 28// Copilot's base model; defined by Microsoft in premium requests table
 29// This will be moved to the front of the Copilot model list, and will be used for
 30// 'fast' requests (e.g. title generation)
 31// https://docs.github.com/en/copilot/managing-copilot/monitoring-usage-and-entitlements/about-premium-requests
 32const DEFAULT_MODEL_ID: &str = "gpt-4.1";
 33
 34#[derive(Clone, Copy, Serialize, Deserialize, Debug, Eq, PartialEq)]
 35#[serde(rename_all = "lowercase")]
 36pub enum Role {
 37    User,
 38    Assistant,
 39    System,
 40}
 41
 42#[derive(Deserialize)]
 43struct ModelSchema {
 44    #[serde(deserialize_with = "deserialize_models_skip_errors")]
 45    data: Vec<Model>,
 46}
 47
 48fn deserialize_models_skip_errors<'de, D>(deserializer: D) -> Result<Vec<Model>, D::Error>
 49where
 50    D: serde::Deserializer<'de>,
 51{
 52    let raw_values = Vec::<serde_json::Value>::deserialize(deserializer)?;
 53    let models = raw_values
 54        .into_iter()
 55        .filter_map(|value| match serde_json::from_value::<Model>(value) {
 56            Ok(model) => Some(model),
 57            Err(err) => {
 58                log::warn!("GitHub Copilot Chat model failed to deserialize: {:?}", err);
 59                None
 60            }
 61        })
 62        .collect();
 63
 64    Ok(models)
 65}
 66
 67#[derive(Clone, Serialize, Deserialize, Debug, Eq, PartialEq)]
 68pub struct Model {
 69    capabilities: ModelCapabilities,
 70    id: String,
 71    name: String,
 72    policy: Option<ModelPolicy>,
 73    vendor: ModelVendor,
 74    model_picker_enabled: bool,
 75}
 76
 77#[derive(Clone, Serialize, Deserialize, Debug, Eq, PartialEq)]
 78struct ModelCapabilities {
 79    family: String,
 80    #[serde(default)]
 81    limits: ModelLimits,
 82    supports: ModelSupportedFeatures,
 83}
 84
 85#[derive(Default, Clone, Serialize, Deserialize, Debug, Eq, PartialEq)]
 86struct ModelLimits {
 87    #[serde(default)]
 88    max_context_window_tokens: usize,
 89    #[serde(default)]
 90    max_output_tokens: usize,
 91    #[serde(default)]
 92    max_prompt_tokens: usize,
 93}
 94
 95#[derive(Clone, Serialize, Deserialize, Debug, Eq, PartialEq)]
 96struct ModelPolicy {
 97    state: String,
 98}
 99
100#[derive(Clone, Serialize, Deserialize, Debug, Eq, PartialEq)]
101struct ModelSupportedFeatures {
102    #[serde(default)]
103    streaming: bool,
104    #[serde(default)]
105    tool_calls: bool,
106    #[serde(default)]
107    parallel_tool_calls: bool,
108    #[serde(default)]
109    vision: bool,
110}
111
112#[derive(Clone, Copy, Serialize, Deserialize, Debug, Eq, PartialEq)]
113pub enum ModelVendor {
114    // Azure OpenAI should have no functional difference from OpenAI in Copilot Chat
115    #[serde(alias = "Azure OpenAI")]
116    OpenAI,
117    Google,
118    Anthropic,
119}
120
121#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
122#[serde(tag = "type")]
123pub enum ChatMessagePart {
124    #[serde(rename = "text")]
125    Text { text: String },
126    #[serde(rename = "image_url")]
127    Image { image_url: ImageUrl },
128}
129
130#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
131pub struct ImageUrl {
132    pub url: String,
133}
134
135impl Model {
136    pub fn uses_streaming(&self) -> bool {
137        self.capabilities.supports.streaming
138    }
139
140    pub fn id(&self) -> &str {
141        self.id.as_str()
142    }
143
144    pub fn display_name(&self) -> &str {
145        self.name.as_str()
146    }
147
148    pub fn max_token_count(&self) -> usize {
149        self.capabilities.limits.max_prompt_tokens
150    }
151
152    pub fn supports_tools(&self) -> bool {
153        self.capabilities.supports.tool_calls
154    }
155
156    pub fn vendor(&self) -> ModelVendor {
157        self.vendor
158    }
159
160    pub fn supports_vision(&self) -> bool {
161        self.capabilities.supports.vision
162    }
163
164    pub fn supports_parallel_tool_calls(&self) -> bool {
165        self.capabilities.supports.parallel_tool_calls
166    }
167}
168
169#[derive(Serialize, Deserialize)]
170pub struct Request {
171    pub intent: bool,
172    pub n: usize,
173    pub stream: bool,
174    pub temperature: f32,
175    pub model: String,
176    pub messages: Vec<ChatMessage>,
177    #[serde(default, skip_serializing_if = "Vec::is_empty")]
178    pub tools: Vec<Tool>,
179    #[serde(default, skip_serializing_if = "Option::is_none")]
180    pub tool_choice: Option<ToolChoice>,
181}
182
183#[derive(Serialize, Deserialize)]
184pub struct Function {
185    pub name: String,
186    pub description: String,
187    pub parameters: serde_json::Value,
188}
189
190#[derive(Serialize, Deserialize)]
191#[serde(tag = "type", rename_all = "snake_case")]
192pub enum Tool {
193    Function { function: Function },
194}
195
196#[derive(Serialize, Deserialize)]
197#[serde(rename_all = "lowercase")]
198pub enum ToolChoice {
199    Auto,
200    Any,
201    None,
202}
203
204#[derive(Serialize, Deserialize, Debug)]
205#[serde(tag = "role", rename_all = "lowercase")]
206pub enum ChatMessage {
207    Assistant {
208        content: ChatMessageContent,
209        #[serde(default, skip_serializing_if = "Vec::is_empty")]
210        tool_calls: Vec<ToolCall>,
211    },
212    User {
213        content: ChatMessageContent,
214    },
215    System {
216        content: String,
217    },
218    Tool {
219        content: ChatMessageContent,
220        tool_call_id: String,
221    },
222}
223
224#[derive(Debug, Serialize, Deserialize)]
225#[serde(untagged)]
226pub enum ChatMessageContent {
227    Plain(String),
228    Multipart(Vec<ChatMessagePart>),
229}
230
231impl ChatMessageContent {
232    pub fn empty() -> Self {
233        ChatMessageContent::Multipart(vec![])
234    }
235}
236
237impl From<Vec<ChatMessagePart>> for ChatMessageContent {
238    fn from(mut parts: Vec<ChatMessagePart>) -> Self {
239        if let [ChatMessagePart::Text { text }] = parts.as_mut_slice() {
240            ChatMessageContent::Plain(std::mem::take(text))
241        } else {
242            ChatMessageContent::Multipart(parts)
243        }
244    }
245}
246
247impl From<String> for ChatMessageContent {
248    fn from(text: String) -> Self {
249        ChatMessageContent::Plain(text)
250    }
251}
252
253#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
254pub struct ToolCall {
255    pub id: String,
256    #[serde(flatten)]
257    pub content: ToolCallContent,
258}
259
260#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
261#[serde(tag = "type", rename_all = "lowercase")]
262pub enum ToolCallContent {
263    Function { function: FunctionContent },
264}
265
266#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
267pub struct FunctionContent {
268    pub name: String,
269    pub arguments: String,
270}
271
272#[derive(Deserialize, Debug)]
273#[serde(tag = "type", rename_all = "snake_case")]
274pub struct ResponseEvent {
275    pub choices: Vec<ResponseChoice>,
276    pub id: String,
277}
278
279#[derive(Debug, Deserialize)]
280pub struct ResponseChoice {
281    pub index: usize,
282    pub finish_reason: Option<String>,
283    pub delta: Option<ResponseDelta>,
284    pub message: Option<ResponseDelta>,
285}
286
287#[derive(Debug, Deserialize)]
288pub struct ResponseDelta {
289    pub content: Option<String>,
290    pub role: Option<Role>,
291    #[serde(default)]
292    pub tool_calls: Vec<ToolCallChunk>,
293}
294
295#[derive(Deserialize, Debug, Eq, PartialEq)]
296pub struct ToolCallChunk {
297    pub index: usize,
298    pub id: Option<String>,
299    pub function: Option<FunctionChunk>,
300}
301
302#[derive(Deserialize, Debug, Eq, PartialEq)]
303pub struct FunctionChunk {
304    pub name: Option<String>,
305    pub arguments: Option<String>,
306}
307
308#[derive(Deserialize)]
309struct ApiTokenResponse {
310    token: String,
311    expires_at: i64,
312}
313
314#[derive(Clone)]
315struct ApiToken {
316    api_key: String,
317    expires_at: DateTime<chrono::Utc>,
318}
319
320impl ApiToken {
321    pub fn remaining_seconds(&self) -> i64 {
322        self.expires_at
323            .timestamp()
324            .saturating_sub(chrono::Utc::now().timestamp())
325    }
326}
327
328impl TryFrom<ApiTokenResponse> for ApiToken {
329    type Error = anyhow::Error;
330
331    fn try_from(response: ApiTokenResponse) -> Result<Self, Self::Error> {
332        let expires_at =
333            DateTime::from_timestamp(response.expires_at, 0).context("invalid expires_at")?;
334
335        Ok(Self {
336            api_key: response.token,
337            expires_at,
338        })
339    }
340}
341
342struct GlobalCopilotChat(gpui::Entity<CopilotChat>);
343
344impl Global for GlobalCopilotChat {}
345
346pub struct CopilotChat {
347    oauth_token: Option<String>,
348    api_token: Option<ApiToken>,
349    settings: CopilotChatSettings,
350    models: Option<Vec<Model>>,
351    client: Arc<dyn HttpClient>,
352}
353
354pub fn init(fs: Arc<dyn Fs>, client: Arc<dyn HttpClient>, cx: &mut App) {
355    let copilot_chat = cx.new(|cx| CopilotChat::new(fs, client, cx));
356    cx.set_global(GlobalCopilotChat(copilot_chat));
357}
358
359pub fn copilot_chat_config_dir() -> &'static PathBuf {
360    static COPILOT_CHAT_CONFIG_DIR: OnceLock<PathBuf> = OnceLock::new();
361
362    COPILOT_CHAT_CONFIG_DIR.get_or_init(|| {
363        if cfg!(target_os = "windows") {
364            home_dir().join("AppData").join("Local")
365        } else {
366            home_dir().join(".config")
367        }
368        .join("github-copilot")
369    })
370}
371
372fn copilot_chat_config_paths() -> [PathBuf; 2] {
373    let base_dir = copilot_chat_config_dir();
374    [base_dir.join("hosts.json"), base_dir.join("apps.json")]
375}
376
377impl CopilotChat {
378    pub fn global(cx: &App) -> Option<gpui::Entity<Self>> {
379        cx.try_global::<GlobalCopilotChat>()
380            .map(|model| model.0.clone())
381    }
382
383    fn new(fs: Arc<dyn Fs>, client: Arc<dyn HttpClient>, cx: &mut Context<Self>) -> Self {
384        let config_paths: HashSet<PathBuf> = copilot_chat_config_paths().into_iter().collect();
385        let dir_path = copilot_chat_config_dir();
386        let settings = CopilotChatSettings::default();
387        cx.spawn(async move |this, cx| {
388            let mut parent_watch_rx = watch_config_dir(
389                cx.background_executor(),
390                fs.clone(),
391                dir_path.clone(),
392                config_paths,
393            );
394            while let Some(contents) = parent_watch_rx.next().await {
395                let oauth_token = extract_oauth_token(contents);
396
397                this.update(cx, |this, cx| {
398                    this.oauth_token = oauth_token.clone();
399                    cx.notify();
400                })?;
401
402                if oauth_token.is_some() {
403                    Self::update_models(&this, cx).await?;
404                }
405            }
406            anyhow::Ok(())
407        })
408        .detach_and_log_err(cx);
409
410        let this = Self {
411            oauth_token: std::env::var(COPILOT_OAUTH_ENV_VAR).ok(),
412            api_token: None,
413            models: None,
414            settings,
415            client,
416        };
417        if this.oauth_token.is_some() {
418            cx.spawn(async move |this, mut cx| Self::update_models(&this, &mut cx).await)
419                .detach_and_log_err(cx);
420        }
421
422        this
423    }
424
425    async fn update_models(this: &WeakEntity<Self>, cx: &mut AsyncApp) -> Result<()> {
426        let (oauth_token, client, auth_url) = this.read_with(cx, |this, _| {
427            (
428                this.oauth_token.clone(),
429                this.client.clone(),
430                this.settings.auth_url.clone(),
431            )
432        })?;
433        let api_token = request_api_token(
434            &oauth_token.ok_or_else(|| {
435                anyhow!("OAuth token is missing while updating Copilot Chat models")
436            })?,
437            auth_url,
438            client.clone(),
439        )
440        .await?;
441
442        let models_url = this.update(cx, |this, cx| {
443            this.api_token = Some(api_token.clone());
444            cx.notify();
445            this.settings.models_url.clone()
446        })?;
447        let models = get_models(models_url, api_token.api_key, client.clone()).await?;
448
449        this.update(cx, |this, cx| {
450            this.models = Some(models);
451            cx.notify();
452        })?;
453        anyhow::Ok(())
454    }
455
456    pub fn is_authenticated(&self) -> bool {
457        self.oauth_token.is_some()
458    }
459
460    pub fn models(&self) -> Option<&[Model]> {
461        self.models.as_deref()
462    }
463
464    pub async fn stream_completion(
465        request: Request,
466        mut cx: AsyncApp,
467    ) -> Result<BoxStream<'static, Result<ResponseEvent>>> {
468        let this = cx
469            .update(|cx| Self::global(cx))
470            .ok()
471            .flatten()
472            .context("Copilot chat is not enabled")?;
473
474        let (oauth_token, api_token, client, api_url, auth_url) =
475            this.read_with(&cx, |this, _| {
476                (
477                    this.oauth_token.clone(),
478                    this.api_token.clone(),
479                    this.client.clone(),
480                    this.settings.api_url.clone(),
481                    this.settings.auth_url.clone(),
482                )
483            })?;
484
485        let oauth_token = oauth_token.context("No OAuth token available")?;
486
487        let token = match api_token {
488            Some(api_token) if api_token.remaining_seconds() > 5 * 60 => api_token.clone(),
489            _ => {
490                let token = request_api_token(&oauth_token, auth_url, client.clone()).await?;
491                this.update(&mut cx, |this, cx| {
492                    this.api_token = Some(token.clone());
493                    cx.notify();
494                })?;
495                token
496            }
497        };
498
499        stream_completion(client.clone(), token.api_key, api_url, request).await
500    }
501
502    pub fn set_settings(&mut self, settings: CopilotChatSettings, cx: &mut Context<Self>) {
503        let same_settings = self.settings == settings;
504        self.settings = settings;
505        if !same_settings {
506            cx.spawn(async move |this, cx| {
507                Self::update_models(&this, cx).await?;
508                Ok::<_, anyhow::Error>(())
509            })
510            .detach();
511        }
512    }
513}
514
515async fn get_models(
516    models_url: Arc<str>,
517    api_token: String,
518    client: Arc<dyn HttpClient>,
519) -> Result<Vec<Model>> {
520    let all_models = request_models(models_url, api_token, client).await?;
521
522    let mut models: Vec<Model> = all_models
523        .into_iter()
524        .filter(|model| {
525            // Ensure user has access to the model; Policy is present only for models that must be
526            // enabled in the GitHub dashboard
527            model.model_picker_enabled
528                && model
529                    .policy
530                    .as_ref()
531                    .is_none_or(|policy| policy.state == "enabled")
532        })
533        // The first model from the API response, in any given family, appear to be the non-tagged
534        // models, which are likely the best choice (e.g. gpt-4o rather than gpt-4o-2024-11-20)
535        .dedup_by(|a, b| a.capabilities.family == b.capabilities.family)
536        .collect();
537
538    if let Some(default_model_position) =
539        models.iter().position(|model| model.id == DEFAULT_MODEL_ID)
540    {
541        let default_model = models.remove(default_model_position);
542        models.insert(0, default_model);
543    }
544
545    Ok(models)
546}
547
548async fn request_models(
549    models_url: Arc<str>,
550    api_token: String,
551    client: Arc<dyn HttpClient>,
552) -> Result<Vec<Model>> {
553    let request_builder = HttpRequest::builder()
554        .method(Method::GET)
555        .uri(models_url.as_ref())
556        .header("Authorization", format!("Bearer {}", api_token))
557        .header("Content-Type", "application/json")
558        .header("Copilot-Integration-Id", "vscode-chat");
559
560    let request = request_builder.body(AsyncBody::empty())?;
561
562    let mut response = client.send(request).await?;
563
564    anyhow::ensure!(
565        response.status().is_success(),
566        "Failed to request models: {}",
567        response.status()
568    );
569    let mut body = Vec::new();
570    response.body_mut().read_to_end(&mut body).await?;
571
572    let body_str = std::str::from_utf8(&body)?;
573
574    let models = serde_json::from_str::<ModelSchema>(body_str)?.data;
575
576    Ok(models)
577}
578
579async fn request_api_token(
580    oauth_token: &str,
581    auth_url: Arc<str>,
582    client: Arc<dyn HttpClient>,
583) -> Result<ApiToken> {
584    let request_builder = HttpRequest::builder()
585        .method(Method::GET)
586        .uri(auth_url.as_ref())
587        .header("Authorization", format!("token {}", oauth_token))
588        .header("Accept", "application/json");
589
590    let request = request_builder.body(AsyncBody::empty())?;
591
592    let mut response = client.send(request).await?;
593
594    if response.status().is_success() {
595        let mut body = Vec::new();
596        response.body_mut().read_to_end(&mut body).await?;
597
598        let body_str = std::str::from_utf8(&body)?;
599
600        let parsed: ApiTokenResponse = serde_json::from_str(body_str)?;
601        ApiToken::try_from(parsed)
602    } else {
603        let mut body = Vec::new();
604        response.body_mut().read_to_end(&mut body).await?;
605
606        let body_str = std::str::from_utf8(&body)?;
607        anyhow::bail!("Failed to request API token: {body_str}");
608    }
609}
610
611fn extract_oauth_token(contents: String) -> Option<String> {
612    serde_json::from_str::<serde_json::Value>(&contents)
613        .map(|v| {
614            v.as_object().and_then(|obj| {
615                obj.iter().find_map(|(key, value)| {
616                    if key.starts_with("github.com") {
617                        value["oauth_token"].as_str().map(|v| v.to_string())
618                    } else {
619                        None
620                    }
621                })
622            })
623        })
624        .ok()
625        .flatten()
626}
627
628async fn stream_completion(
629    client: Arc<dyn HttpClient>,
630    api_key: String,
631    completion_url: Arc<str>,
632    request: Request,
633) -> Result<BoxStream<'static, Result<ResponseEvent>>> {
634    let is_vision_request = request.messages.last().map_or(false, |message| match message {
635        ChatMessage::User { content }
636        | ChatMessage::Assistant { content, .. }
637        | ChatMessage::Tool { content, .. } => {
638            matches!(content, ChatMessageContent::Multipart(parts) if parts.iter().any(|part| matches!(part, ChatMessagePart::Image { .. })))
639        }
640        _ => false,
641    });
642
643    let request_builder = HttpRequest::builder()
644        .method(Method::POST)
645        .uri(completion_url.as_ref())
646        .header(
647            "Editor-Version",
648            format!(
649                "Zed/{}",
650                option_env!("CARGO_PKG_VERSION").unwrap_or("unknown")
651            ),
652        )
653        .header("Authorization", format!("Bearer {}", api_key))
654        .header("Content-Type", "application/json")
655        .header("Copilot-Integration-Id", "vscode-chat")
656        .header("Copilot-Vision-Request", is_vision_request.to_string());
657
658    let is_streaming = request.stream;
659
660    let json = serde_json::to_string(&request)?;
661    let request = request_builder.body(AsyncBody::from(json))?;
662    let mut response = client.send(request).await?;
663
664    if !response.status().is_success() {
665        let mut body = Vec::new();
666        response.body_mut().read_to_end(&mut body).await?;
667        let body_str = std::str::from_utf8(&body)?;
668        anyhow::bail!(
669            "Failed to connect to API: {} {}",
670            response.status(),
671            body_str
672        );
673    }
674
675    if is_streaming {
676        let reader = BufReader::new(response.into_body());
677        Ok(reader
678            .lines()
679            .filter_map(|line| async move {
680                match line {
681                    Ok(line) => {
682                        let line = line.strip_prefix("data: ")?;
683                        if line.starts_with("[DONE]") {
684                            return None;
685                        }
686
687                        match serde_json::from_str::<ResponseEvent>(line) {
688                            Ok(response) => {
689                                if response.choices.is_empty() {
690                                    None
691                                } else {
692                                    Some(Ok(response))
693                                }
694                            }
695                            Err(error) => Some(Err(anyhow!(error))),
696                        }
697                    }
698                    Err(error) => Some(Err(anyhow!(error))),
699                }
700            })
701            .boxed())
702    } else {
703        let mut body = Vec::new();
704        response.body_mut().read_to_end(&mut body).await?;
705        let body_str = std::str::from_utf8(&body)?;
706        let response: ResponseEvent = serde_json::from_str(body_str)?;
707
708        Ok(futures::stream::once(async move { Ok(response) }).boxed())
709    }
710}
711
712#[cfg(test)]
713mod tests {
714    use super::*;
715
716    #[test]
717    fn test_resilient_model_schema_deserialize() {
718        let json = r#"{
719              "data": [
720                {
721                  "capabilities": {
722                    "family": "gpt-4",
723                    "limits": {
724                      "max_context_window_tokens": 32768,
725                      "max_output_tokens": 4096,
726                      "max_prompt_tokens": 32768
727                    },
728                    "object": "model_capabilities",
729                    "supports": { "streaming": true, "tool_calls": true },
730                    "tokenizer": "cl100k_base",
731                    "type": "chat"
732                  },
733                  "id": "gpt-4",
734                  "model_picker_enabled": false,
735                  "name": "GPT 4",
736                  "object": "model",
737                  "preview": false,
738                  "vendor": "Azure OpenAI",
739                  "version": "gpt-4-0613"
740                },
741                {
742                    "some-unknown-field": 123
743                },
744                {
745                  "capabilities": {
746                    "family": "claude-3.7-sonnet",
747                    "limits": {
748                      "max_context_window_tokens": 200000,
749                      "max_output_tokens": 16384,
750                      "max_prompt_tokens": 90000,
751                      "vision": {
752                        "max_prompt_image_size": 3145728,
753                        "max_prompt_images": 1,
754                        "supported_media_types": ["image/jpeg", "image/png", "image/webp"]
755                      }
756                    },
757                    "object": "model_capabilities",
758                    "supports": {
759                      "parallel_tool_calls": true,
760                      "streaming": true,
761                      "tool_calls": true,
762                      "vision": true
763                    },
764                    "tokenizer": "o200k_base",
765                    "type": "chat"
766                  },
767                  "id": "claude-3.7-sonnet",
768                  "model_picker_enabled": true,
769                  "name": "Claude 3.7 Sonnet",
770                  "object": "model",
771                  "policy": {
772                    "state": "enabled",
773                    "terms": "Enable access to the latest Claude 3.7 Sonnet model from Anthropic. [Learn more about how GitHub Copilot serves Claude 3.7 Sonnet](https://docs.github.com/copilot/using-github-copilot/using-claude-sonnet-in-github-copilot)."
774                  },
775                  "preview": false,
776                  "vendor": "Anthropic",
777                  "version": "claude-3.7-sonnet"
778                }
779              ],
780              "object": "list"
781            }"#;
782
783        let schema: ModelSchema = serde_json::from_str(&json).unwrap();
784
785        assert_eq!(schema.data.len(), 2);
786        assert_eq!(schema.data[0].id, "gpt-4");
787        assert_eq!(schema.data[1].id, "claude-3.7-sonnet");
788    }
789}