copilot_chat.rs

  1use std::path::PathBuf;
  2use std::sync::Arc;
  3use std::sync::OnceLock;
  4
  5use anyhow::Context as _;
  6use anyhow::{Result, anyhow};
  7use chrono::DateTime;
  8use collections::HashSet;
  9use fs::Fs;
 10use futures::{AsyncBufReadExt, AsyncReadExt, StreamExt, io::BufReader, stream::BoxStream};
 11use gpui::WeakEntity;
 12use gpui::{App, AsyncApp, Global, prelude::*};
 13use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest};
 14use itertools::Itertools;
 15use paths::home_dir;
 16use serde::{Deserialize, Serialize};
 17use settings::watch_config_dir;
 18
 19pub const COPILOT_OAUTH_ENV_VAR: &str = "GH_COPILOT_TOKEN";
 20
 21#[derive(Default, Clone, Debug, PartialEq)]
 22pub struct CopilotChatConfiguration {
 23    pub enterprise_uri: Option<String>,
 24}
 25
 26impl CopilotChatConfiguration {
 27    pub fn token_url(&self) -> String {
 28        if let Some(enterprise_uri) = &self.enterprise_uri {
 29            let domain = Self::parse_domain(enterprise_uri);
 30            format!("https://api.{}/copilot_internal/v2/token", domain)
 31        } else {
 32            "https://api.github.com/copilot_internal/v2/token".to_string()
 33        }
 34    }
 35
 36    pub fn oauth_domain(&self) -> String {
 37        if let Some(enterprise_uri) = &self.enterprise_uri {
 38            Self::parse_domain(enterprise_uri)
 39        } else {
 40            "github.com".to_string()
 41        }
 42    }
 43
 44    pub fn api_url_from_endpoint(&self, endpoint: &str) -> String {
 45        format!("{}/chat/completions", endpoint)
 46    }
 47
 48    pub fn models_url_from_endpoint(&self, endpoint: &str) -> String {
 49        format!("{}/models", endpoint)
 50    }
 51
 52    fn parse_domain(enterprise_uri: &str) -> String {
 53        let uri = enterprise_uri.trim_end_matches('/');
 54
 55        if let Some(domain) = uri.strip_prefix("https://") {
 56            domain.split('/').next().unwrap_or(domain).to_string()
 57        } else if let Some(domain) = uri.strip_prefix("http://") {
 58            domain.split('/').next().unwrap_or(domain).to_string()
 59        } else {
 60            uri.split('/').next().unwrap_or(uri).to_string()
 61        }
 62    }
 63}
 64
 65// Copilot's base model; defined by Microsoft in premium requests table
 66// This will be moved to the front of the Copilot model list, and will be used for
 67// 'fast' requests (e.g. title generation)
 68// https://docs.github.com/en/copilot/managing-copilot/monitoring-usage-and-entitlements/about-premium-requests
 69const DEFAULT_MODEL_ID: &str = "gpt-4.1";
 70
 71#[derive(Clone, Copy, Serialize, Deserialize, Debug, Eq, PartialEq)]
 72#[serde(rename_all = "lowercase")]
 73pub enum Role {
 74    User,
 75    Assistant,
 76    System,
 77}
 78
 79#[derive(Deserialize)]
 80struct ModelSchema {
 81    #[serde(deserialize_with = "deserialize_models_skip_errors")]
 82    data: Vec<Model>,
 83}
 84
 85fn deserialize_models_skip_errors<'de, D>(deserializer: D) -> Result<Vec<Model>, D::Error>
 86where
 87    D: serde::Deserializer<'de>,
 88{
 89    let raw_values = Vec::<serde_json::Value>::deserialize(deserializer)?;
 90    let models = raw_values
 91        .into_iter()
 92        .filter_map(|value| match serde_json::from_value::<Model>(value) {
 93            Ok(model) => Some(model),
 94            Err(err) => {
 95                log::warn!("GitHub Copilot Chat model failed to deserialize: {:?}", err);
 96                None
 97            }
 98        })
 99        .collect();
100
101    Ok(models)
102}
103
104#[derive(Clone, Serialize, Deserialize, Debug, Eq, PartialEq)]
105pub struct Model {
106    capabilities: ModelCapabilities,
107    id: String,
108    name: String,
109    policy: Option<ModelPolicy>,
110    vendor: ModelVendor,
111    model_picker_enabled: bool,
112}
113
114#[derive(Clone, Serialize, Deserialize, Debug, Eq, PartialEq)]
115struct ModelCapabilities {
116    family: String,
117    #[serde(default)]
118    limits: ModelLimits,
119    supports: ModelSupportedFeatures,
120}
121
122#[derive(Default, Clone, Serialize, Deserialize, Debug, Eq, PartialEq)]
123struct ModelLimits {
124    #[serde(default)]
125    max_context_window_tokens: usize,
126    #[serde(default)]
127    max_output_tokens: usize,
128    #[serde(default)]
129    max_prompt_tokens: usize,
130}
131
132#[derive(Clone, Serialize, Deserialize, Debug, Eq, PartialEq)]
133struct ModelPolicy {
134    state: String,
135}
136
137#[derive(Clone, Serialize, Deserialize, Debug, Eq, PartialEq)]
138struct ModelSupportedFeatures {
139    #[serde(default)]
140    streaming: bool,
141    #[serde(default)]
142    tool_calls: bool,
143    #[serde(default)]
144    parallel_tool_calls: bool,
145    #[serde(default)]
146    vision: bool,
147}
148
149#[derive(Clone, Copy, Serialize, Deserialize, Debug, Eq, PartialEq)]
150pub enum ModelVendor {
151    // Azure OpenAI should have no functional difference from OpenAI in Copilot Chat
152    #[serde(alias = "Azure OpenAI")]
153    OpenAI,
154    Google,
155    Anthropic,
156}
157
158#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
159#[serde(tag = "type")]
160pub enum ChatMessagePart {
161    #[serde(rename = "text")]
162    Text { text: String },
163    #[serde(rename = "image_url")]
164    Image { image_url: ImageUrl },
165}
166
167#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
168pub struct ImageUrl {
169    pub url: String,
170}
171
172impl Model {
173    pub fn uses_streaming(&self) -> bool {
174        self.capabilities.supports.streaming
175    }
176
177    pub fn id(&self) -> &str {
178        self.id.as_str()
179    }
180
181    pub fn display_name(&self) -> &str {
182        self.name.as_str()
183    }
184
185    pub fn max_token_count(&self) -> usize {
186        self.capabilities.limits.max_prompt_tokens
187    }
188
189    pub fn supports_tools(&self) -> bool {
190        self.capabilities.supports.tool_calls
191    }
192
193    pub fn vendor(&self) -> ModelVendor {
194        self.vendor
195    }
196
197    pub fn supports_vision(&self) -> bool {
198        self.capabilities.supports.vision
199    }
200
201    pub fn supports_parallel_tool_calls(&self) -> bool {
202        self.capabilities.supports.parallel_tool_calls
203    }
204}
205
206#[derive(Serialize, Deserialize)]
207pub struct Request {
208    pub intent: bool,
209    pub n: usize,
210    pub stream: bool,
211    pub temperature: f32,
212    pub model: String,
213    pub messages: Vec<ChatMessage>,
214    #[serde(default, skip_serializing_if = "Vec::is_empty")]
215    pub tools: Vec<Tool>,
216    #[serde(default, skip_serializing_if = "Option::is_none")]
217    pub tool_choice: Option<ToolChoice>,
218}
219
220#[derive(Serialize, Deserialize)]
221pub struct Function {
222    pub name: String,
223    pub description: String,
224    pub parameters: serde_json::Value,
225}
226
227#[derive(Serialize, Deserialize)]
228#[serde(tag = "type", rename_all = "snake_case")]
229pub enum Tool {
230    Function { function: Function },
231}
232
233#[derive(Serialize, Deserialize)]
234#[serde(rename_all = "lowercase")]
235pub enum ToolChoice {
236    Auto,
237    Any,
238    None,
239}
240
241#[derive(Serialize, Deserialize, Debug)]
242#[serde(tag = "role", rename_all = "lowercase")]
243pub enum ChatMessage {
244    Assistant {
245        content: ChatMessageContent,
246        #[serde(default, skip_serializing_if = "Vec::is_empty")]
247        tool_calls: Vec<ToolCall>,
248    },
249    User {
250        content: ChatMessageContent,
251    },
252    System {
253        content: String,
254    },
255    Tool {
256        content: ChatMessageContent,
257        tool_call_id: String,
258    },
259}
260
261#[derive(Debug, Serialize, Deserialize)]
262#[serde(untagged)]
263pub enum ChatMessageContent {
264    Plain(String),
265    Multipart(Vec<ChatMessagePart>),
266}
267
268impl ChatMessageContent {
269    pub fn empty() -> Self {
270        ChatMessageContent::Multipart(vec![])
271    }
272}
273
274impl From<Vec<ChatMessagePart>> for ChatMessageContent {
275    fn from(mut parts: Vec<ChatMessagePart>) -> Self {
276        if let [ChatMessagePart::Text { text }] = parts.as_mut_slice() {
277            ChatMessageContent::Plain(std::mem::take(text))
278        } else {
279            ChatMessageContent::Multipart(parts)
280        }
281    }
282}
283
284impl From<String> for ChatMessageContent {
285    fn from(text: String) -> Self {
286        ChatMessageContent::Plain(text)
287    }
288}
289
290#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
291pub struct ToolCall {
292    pub id: String,
293    #[serde(flatten)]
294    pub content: ToolCallContent,
295}
296
297#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
298#[serde(tag = "type", rename_all = "lowercase")]
299pub enum ToolCallContent {
300    Function { function: FunctionContent },
301}
302
303#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
304pub struct FunctionContent {
305    pub name: String,
306    pub arguments: String,
307}
308
309#[derive(Deserialize, Debug)]
310#[serde(tag = "type", rename_all = "snake_case")]
311pub struct ResponseEvent {
312    pub choices: Vec<ResponseChoice>,
313    pub id: String,
314}
315
316#[derive(Debug, Deserialize)]
317pub struct ResponseChoice {
318    pub index: usize,
319    pub finish_reason: Option<String>,
320    pub delta: Option<ResponseDelta>,
321    pub message: Option<ResponseDelta>,
322}
323
324#[derive(Debug, Deserialize)]
325pub struct ResponseDelta {
326    pub content: Option<String>,
327    pub role: Option<Role>,
328    #[serde(default)]
329    pub tool_calls: Vec<ToolCallChunk>,
330}
331
332#[derive(Deserialize, Debug, Eq, PartialEq)]
333pub struct ToolCallChunk {
334    pub index: usize,
335    pub id: Option<String>,
336    pub function: Option<FunctionChunk>,
337}
338
339#[derive(Deserialize, Debug, Eq, PartialEq)]
340pub struct FunctionChunk {
341    pub name: Option<String>,
342    pub arguments: Option<String>,
343}
344
345#[derive(Deserialize)]
346struct ApiTokenResponse {
347    token: String,
348    expires_at: i64,
349    endpoints: ApiTokenResponseEndpoints,
350}
351
352#[derive(Deserialize)]
353struct ApiTokenResponseEndpoints {
354    api: String,
355}
356
357#[derive(Clone)]
358struct ApiToken {
359    api_key: String,
360    expires_at: DateTime<chrono::Utc>,
361    api_endpoint: String,
362}
363
364impl ApiToken {
365    pub fn remaining_seconds(&self) -> i64 {
366        self.expires_at
367            .timestamp()
368            .saturating_sub(chrono::Utc::now().timestamp())
369    }
370}
371
372impl TryFrom<ApiTokenResponse> for ApiToken {
373    type Error = anyhow::Error;
374
375    fn try_from(response: ApiTokenResponse) -> Result<Self, Self::Error> {
376        let expires_at =
377            DateTime::from_timestamp(response.expires_at, 0).context("invalid expires_at")?;
378
379        Ok(Self {
380            api_key: response.token,
381            expires_at,
382            api_endpoint: response.endpoints.api,
383        })
384    }
385}
386
387struct GlobalCopilotChat(gpui::Entity<CopilotChat>);
388
389impl Global for GlobalCopilotChat {}
390
391pub struct CopilotChat {
392    oauth_token: Option<String>,
393    api_token: Option<ApiToken>,
394    configuration: CopilotChatConfiguration,
395    models: Option<Vec<Model>>,
396    client: Arc<dyn HttpClient>,
397}
398
399pub fn init(
400    fs: Arc<dyn Fs>,
401    client: Arc<dyn HttpClient>,
402    configuration: CopilotChatConfiguration,
403    cx: &mut App,
404) {
405    let copilot_chat = cx.new(|cx| CopilotChat::new(fs, client, configuration, cx));
406    cx.set_global(GlobalCopilotChat(copilot_chat));
407}
408
409pub fn copilot_chat_config_dir() -> &'static PathBuf {
410    static COPILOT_CHAT_CONFIG_DIR: OnceLock<PathBuf> = OnceLock::new();
411
412    COPILOT_CHAT_CONFIG_DIR.get_or_init(|| {
413        if cfg!(target_os = "windows") {
414            home_dir().join("AppData").join("Local")
415        } else {
416            home_dir().join(".config")
417        }
418        .join("github-copilot")
419    })
420}
421
422fn copilot_chat_config_paths() -> [PathBuf; 2] {
423    let base_dir = copilot_chat_config_dir();
424    [base_dir.join("hosts.json"), base_dir.join("apps.json")]
425}
426
427impl CopilotChat {
428    pub fn global(cx: &App) -> Option<gpui::Entity<Self>> {
429        cx.try_global::<GlobalCopilotChat>()
430            .map(|model| model.0.clone())
431    }
432
433    fn new(
434        fs: Arc<dyn Fs>,
435        client: Arc<dyn HttpClient>,
436        configuration: CopilotChatConfiguration,
437        cx: &mut Context<Self>,
438    ) -> Self {
439        let config_paths: HashSet<PathBuf> = copilot_chat_config_paths().into_iter().collect();
440        let dir_path = copilot_chat_config_dir();
441
442        cx.spawn(async move |this, cx| {
443            let mut parent_watch_rx = watch_config_dir(
444                cx.background_executor(),
445                fs.clone(),
446                dir_path.clone(),
447                config_paths,
448            );
449            while let Some(contents) = parent_watch_rx.next().await {
450                let oauth_domain =
451                    this.read_with(cx, |this, _| this.configuration.oauth_domain())?;
452                let oauth_token = extract_oauth_token(contents, &oauth_domain);
453
454                this.update(cx, |this, cx| {
455                    this.oauth_token = oauth_token.clone();
456                    cx.notify();
457                })?;
458
459                if oauth_token.is_some() {
460                    Self::update_models(&this, cx).await?;
461                }
462            }
463            anyhow::Ok(())
464        })
465        .detach_and_log_err(cx);
466
467        let this = Self {
468            oauth_token: std::env::var(COPILOT_OAUTH_ENV_VAR).ok(),
469            api_token: None,
470            models: None,
471            configuration,
472            client,
473        };
474
475        if this.oauth_token.is_some() {
476            cx.spawn(async move |this, mut cx| Self::update_models(&this, &mut cx).await)
477                .detach_and_log_err(cx);
478        }
479
480        this
481    }
482
483    async fn update_models(this: &WeakEntity<Self>, cx: &mut AsyncApp) -> Result<()> {
484        let (oauth_token, client, configuration) = this.read_with(cx, |this, _| {
485            (
486                this.oauth_token.clone(),
487                this.client.clone(),
488                this.configuration.clone(),
489            )
490        })?;
491
492        let oauth_token = oauth_token
493            .ok_or_else(|| anyhow!("OAuth token is missing while updating Copilot Chat models"))?;
494
495        let token_url = configuration.token_url();
496        let api_token = request_api_token(&oauth_token, token_url.into(), client.clone()).await?;
497
498        let models_url = configuration.models_url_from_endpoint(&api_token.api_endpoint);
499        let models =
500            get_models(models_url.into(), api_token.api_key.clone(), client.clone()).await?;
501
502        this.update(cx, |this, cx| {
503            this.api_token = Some(api_token);
504            this.models = Some(models);
505            cx.notify();
506        })?;
507        anyhow::Ok(())
508    }
509
510    pub fn is_authenticated(&self) -> bool {
511        self.oauth_token.is_some()
512    }
513
514    pub fn models(&self) -> Option<&[Model]> {
515        self.models.as_deref()
516    }
517
518    pub async fn stream_completion(
519        request: Request,
520        mut cx: AsyncApp,
521    ) -> Result<BoxStream<'static, Result<ResponseEvent>>> {
522        let this = cx
523            .update(|cx| Self::global(cx))
524            .ok()
525            .flatten()
526            .context("Copilot chat is not enabled")?;
527
528        let (oauth_token, api_token, client, configuration) = this.read_with(&cx, |this, _| {
529            (
530                this.oauth_token.clone(),
531                this.api_token.clone(),
532                this.client.clone(),
533                this.configuration.clone(),
534            )
535        })?;
536
537        let oauth_token = oauth_token.context("No OAuth token available")?;
538
539        let token = match api_token {
540            Some(api_token) if api_token.remaining_seconds() > 5 * 60 => api_token.clone(),
541            _ => {
542                let token_url = configuration.token_url();
543                let token =
544                    request_api_token(&oauth_token, token_url.into(), client.clone()).await?;
545                this.update(&mut cx, |this, cx| {
546                    this.api_token = Some(token.clone());
547                    cx.notify();
548                })?;
549                token
550            }
551        };
552
553        let api_url = configuration.api_url_from_endpoint(&token.api_endpoint);
554        stream_completion(client.clone(), token.api_key, api_url.into(), request).await
555    }
556
557    pub fn set_configuration(
558        &mut self,
559        configuration: CopilotChatConfiguration,
560        cx: &mut Context<Self>,
561    ) {
562        let same_configuration = self.configuration == configuration;
563        self.configuration = configuration;
564        if !same_configuration {
565            self.api_token = None;
566            cx.spawn(async move |this, cx| {
567                Self::update_models(&this, cx).await?;
568                Ok::<_, anyhow::Error>(())
569            })
570            .detach();
571        }
572    }
573}
574
575async fn get_models(
576    models_url: Arc<str>,
577    api_token: String,
578    client: Arc<dyn HttpClient>,
579) -> Result<Vec<Model>> {
580    let all_models = request_models(models_url, api_token, client).await?;
581
582    let mut models: Vec<Model> = all_models
583        .into_iter()
584        .filter(|model| {
585            model.model_picker_enabled
586                && model
587                    .policy
588                    .as_ref()
589                    .is_none_or(|policy| policy.state == "enabled")
590        })
591        .dedup_by(|a, b| a.capabilities.family == b.capabilities.family)
592        .collect();
593
594    if let Some(default_model_position) =
595        models.iter().position(|model| model.id == DEFAULT_MODEL_ID)
596    {
597        let default_model = models.remove(default_model_position);
598        models.insert(0, default_model);
599    }
600
601    Ok(models)
602}
603
604async fn request_models(
605    models_url: Arc<str>,
606    api_token: String,
607    client: Arc<dyn HttpClient>,
608) -> Result<Vec<Model>> {
609    let request_builder = HttpRequest::builder()
610        .method(Method::GET)
611        .uri(models_url.as_ref())
612        .header("Authorization", format!("Bearer {}", api_token))
613        .header("Content-Type", "application/json")
614        .header("Copilot-Integration-Id", "vscode-chat");
615
616    let request = request_builder.body(AsyncBody::empty())?;
617
618    let mut response = client.send(request).await?;
619
620    anyhow::ensure!(
621        response.status().is_success(),
622        "Failed to request models: {}",
623        response.status()
624    );
625    let mut body = Vec::new();
626    response.body_mut().read_to_end(&mut body).await?;
627
628    let body_str = std::str::from_utf8(&body)?;
629
630    let models = serde_json::from_str::<ModelSchema>(body_str)?.data;
631
632    Ok(models)
633}
634
635async fn request_api_token(
636    oauth_token: &str,
637    auth_url: Arc<str>,
638    client: Arc<dyn HttpClient>,
639) -> Result<ApiToken> {
640    let request_builder = HttpRequest::builder()
641        .method(Method::GET)
642        .uri(auth_url.as_ref())
643        .header("Authorization", format!("token {}", oauth_token))
644        .header("Accept", "application/json");
645
646    let request = request_builder.body(AsyncBody::empty())?;
647
648    let mut response = client.send(request).await?;
649
650    if response.status().is_success() {
651        let mut body = Vec::new();
652        response.body_mut().read_to_end(&mut body).await?;
653
654        let body_str = std::str::from_utf8(&body)?;
655
656        let parsed: ApiTokenResponse = serde_json::from_str(body_str)?;
657        ApiToken::try_from(parsed)
658    } else {
659        let mut body = Vec::new();
660        response.body_mut().read_to_end(&mut body).await?;
661
662        let body_str = std::str::from_utf8(&body)?;
663        anyhow::bail!("Failed to request API token: {body_str}");
664    }
665}
666
667fn extract_oauth_token(contents: String, domain: &str) -> Option<String> {
668    serde_json::from_str::<serde_json::Value>(&contents)
669        .map(|v| {
670            v.as_object().and_then(|obj| {
671                obj.iter().find_map(|(key, value)| {
672                    if key.starts_with(domain) {
673                        value["oauth_token"].as_str().map(|v| v.to_string())
674                    } else {
675                        None
676                    }
677                })
678            })
679        })
680        .ok()
681        .flatten()
682}
683
684async fn stream_completion(
685    client: Arc<dyn HttpClient>,
686    api_key: String,
687    completion_url: Arc<str>,
688    request: Request,
689) -> Result<BoxStream<'static, Result<ResponseEvent>>> {
690    let is_vision_request = request.messages.last().map_or(false, |message| match message {
691        ChatMessage::User { content }
692        | ChatMessage::Assistant { content, .. }
693        | ChatMessage::Tool { content, .. } => {
694            matches!(content, ChatMessageContent::Multipart(parts) if parts.iter().any(|part| matches!(part, ChatMessagePart::Image { .. })))
695        }
696        _ => false,
697    });
698
699    let request_builder = HttpRequest::builder()
700        .method(Method::POST)
701        .uri(completion_url.as_ref())
702        .header(
703            "Editor-Version",
704            format!(
705                "Zed/{}",
706                option_env!("CARGO_PKG_VERSION").unwrap_or("unknown")
707            ),
708        )
709        .header("Authorization", format!("Bearer {}", api_key))
710        .header("Content-Type", "application/json")
711        .header("Copilot-Integration-Id", "vscode-chat")
712        .header("Copilot-Vision-Request", is_vision_request.to_string());
713
714    let is_streaming = request.stream;
715
716    let json = serde_json::to_string(&request)?;
717    let request = request_builder.body(AsyncBody::from(json))?;
718    let mut response = client.send(request).await?;
719
720    if !response.status().is_success() {
721        let mut body = Vec::new();
722        response.body_mut().read_to_end(&mut body).await?;
723        let body_str = std::str::from_utf8(&body)?;
724        anyhow::bail!(
725            "Failed to connect to API: {} {}",
726            response.status(),
727            body_str
728        );
729    }
730
731    if is_streaming {
732        let reader = BufReader::new(response.into_body());
733        Ok(reader
734            .lines()
735            .filter_map(|line| async move {
736                match line {
737                    Ok(line) => {
738                        let line = line.strip_prefix("data: ")?;
739                        if line.starts_with("[DONE]") {
740                            return None;
741                        }
742
743                        match serde_json::from_str::<ResponseEvent>(line) {
744                            Ok(response) => {
745                                if response.choices.is_empty() {
746                                    None
747                                } else {
748                                    Some(Ok(response))
749                                }
750                            }
751                            Err(error) => Some(Err(anyhow!(error))),
752                        }
753                    }
754                    Err(error) => Some(Err(anyhow!(error))),
755                }
756            })
757            .boxed())
758    } else {
759        let mut body = Vec::new();
760        response.body_mut().read_to_end(&mut body).await?;
761        let body_str = std::str::from_utf8(&body)?;
762        let response: ResponseEvent = serde_json::from_str(body_str)?;
763
764        Ok(futures::stream::once(async move { Ok(response) }).boxed())
765    }
766}
767
768#[cfg(test)]
769mod tests {
770    use super::*;
771
772    #[test]
773    fn test_resilient_model_schema_deserialize() {
774        let json = r#"{
775              "data": [
776                {
777                  "capabilities": {
778                    "family": "gpt-4",
779                    "limits": {
780                      "max_context_window_tokens": 32768,
781                      "max_output_tokens": 4096,
782                      "max_prompt_tokens": 32768
783                    },
784                    "object": "model_capabilities",
785                    "supports": { "streaming": true, "tool_calls": true },
786                    "tokenizer": "cl100k_base",
787                    "type": "chat"
788                  },
789                  "id": "gpt-4",
790                  "model_picker_enabled": false,
791                  "name": "GPT 4",
792                  "object": "model",
793                  "preview": false,
794                  "vendor": "Azure OpenAI",
795                  "version": "gpt-4-0613"
796                },
797                {
798                    "some-unknown-field": 123
799                },
800                {
801                  "capabilities": {
802                    "family": "claude-3.7-sonnet",
803                    "limits": {
804                      "max_context_window_tokens": 200000,
805                      "max_output_tokens": 16384,
806                      "max_prompt_tokens": 90000,
807                      "vision": {
808                        "max_prompt_image_size": 3145728,
809                        "max_prompt_images": 1,
810                        "supported_media_types": ["image/jpeg", "image/png", "image/webp"]
811                      }
812                    },
813                    "object": "model_capabilities",
814                    "supports": {
815                      "parallel_tool_calls": true,
816                      "streaming": true,
817                      "tool_calls": true,
818                      "vision": true
819                    },
820                    "tokenizer": "o200k_base",
821                    "type": "chat"
822                  },
823                  "id": "claude-3.7-sonnet",
824                  "model_picker_enabled": true,
825                  "name": "Claude 3.7 Sonnet",
826                  "object": "model",
827                  "policy": {
828                    "state": "enabled",
829                    "terms": "Enable access to the latest Claude 3.7 Sonnet model from Anthropic. [Learn more about how GitHub Copilot serves Claude 3.7 Sonnet](https://docs.github.com/copilot/using-github-copilot/using-claude-sonnet-in-github-copilot)."
830                  },
831                  "preview": false,
832                  "vendor": "Anthropic",
833                  "version": "claude-3.7-sonnet"
834                }
835              ],
836              "object": "list"
837            }"#;
838
839        let schema: ModelSchema = serde_json::from_str(&json).unwrap();
840
841        assert_eq!(schema.data.len(), 2);
842        assert_eq!(schema.data[0].id, "gpt-4");
843        assert_eq!(schema.data[1].id, "claude-3.7-sonnet");
844    }
845}