copilot_chat.rs

  1use std::path::PathBuf;
  2use std::sync::Arc;
  3use std::sync::OnceLock;
  4
  5use anyhow::Context as _;
  6use anyhow::{Result, anyhow};
  7use chrono::DateTime;
  8use collections::HashSet;
  9use fs::Fs;
 10use futures::{AsyncBufReadExt, AsyncReadExt, StreamExt, io::BufReader, stream::BoxStream};
 11use gpui::WeakEntity;
 12use gpui::{App, AsyncApp, Global, prelude::*};
 13use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest};
 14use itertools::Itertools;
 15use paths::home_dir;
 16use serde::{Deserialize, Serialize};
 17use settings::watch_config_dir;
 18
 19pub const COPILOT_OAUTH_ENV_VAR: &str = "GH_COPILOT_TOKEN";
 20
 21#[derive(Default, Clone, Debug, PartialEq)]
 22pub struct CopilotChatConfiguration {
 23    pub enterprise_uri: Option<String>,
 24}
 25
 26impl CopilotChatConfiguration {
 27    pub fn token_url(&self) -> String {
 28        if let Some(enterprise_uri) = &self.enterprise_uri {
 29            let domain = Self::parse_domain(enterprise_uri);
 30            format!("https://api.{}/copilot_internal/v2/token", domain)
 31        } else {
 32            "https://api.github.com/copilot_internal/v2/token".to_string()
 33        }
 34    }
 35
 36    pub fn oauth_domain(&self) -> String {
 37        if let Some(enterprise_uri) = &self.enterprise_uri {
 38            Self::parse_domain(enterprise_uri)
 39        } else {
 40            "github.com".to_string()
 41        }
 42    }
 43
 44    pub fn api_url_from_endpoint(&self, endpoint: &str) -> String {
 45        format!("{}/chat/completions", endpoint)
 46    }
 47
 48    pub fn models_url_from_endpoint(&self, endpoint: &str) -> String {
 49        format!("{}/models", endpoint)
 50    }
 51
 52    fn parse_domain(enterprise_uri: &str) -> String {
 53        let uri = enterprise_uri.trim_end_matches('/');
 54
 55        if let Some(domain) = uri.strip_prefix("https://") {
 56            domain.split('/').next().unwrap_or(domain).to_string()
 57        } else if let Some(domain) = uri.strip_prefix("http://") {
 58            domain.split('/').next().unwrap_or(domain).to_string()
 59        } else {
 60            uri.split('/').next().unwrap_or(uri).to_string()
 61        }
 62    }
 63}
 64
 65// Copilot's base model; defined by Microsoft in premium requests table
 66// This will be moved to the front of the Copilot model list, and will be used for
 67// 'fast' requests (e.g. title generation)
 68// https://docs.github.com/en/copilot/managing-copilot/monitoring-usage-and-entitlements/about-premium-requests
 69const DEFAULT_MODEL_ID: &str = "gpt-4.1";
 70
 71#[derive(Clone, Copy, Serialize, Deserialize, Debug, Eq, PartialEq)]
 72#[serde(rename_all = "lowercase")]
 73pub enum Role {
 74    User,
 75    Assistant,
 76    System,
 77}
 78
 79#[derive(Deserialize)]
 80struct ModelSchema {
 81    #[serde(deserialize_with = "deserialize_models_skip_errors")]
 82    data: Vec<Model>,
 83}
 84
 85fn deserialize_models_skip_errors<'de, D>(deserializer: D) -> Result<Vec<Model>, D::Error>
 86where
 87    D: serde::Deserializer<'de>,
 88{
 89    let raw_values = Vec::<serde_json::Value>::deserialize(deserializer)?;
 90    let models = raw_values
 91        .into_iter()
 92        .filter_map(|value| match serde_json::from_value::<Model>(value) {
 93            Ok(model) => Some(model),
 94            Err(err) => {
 95                log::warn!("GitHub Copilot Chat model failed to deserialize: {:?}", err);
 96                None
 97            }
 98        })
 99        .collect();
100
101    Ok(models)
102}
103
104#[derive(Clone, Serialize, Deserialize, Debug, Eq, PartialEq)]
105pub struct Model {
106    capabilities: ModelCapabilities,
107    id: String,
108    name: String,
109    policy: Option<ModelPolicy>,
110    vendor: ModelVendor,
111    model_picker_enabled: bool,
112}
113
114#[derive(Clone, Serialize, Deserialize, Debug, Eq, PartialEq)]
115struct ModelCapabilities {
116    family: String,
117    #[serde(default)]
118    limits: ModelLimits,
119    supports: ModelSupportedFeatures,
120}
121
122#[derive(Default, Clone, Serialize, Deserialize, Debug, Eq, PartialEq)]
123struct ModelLimits {
124    #[serde(default)]
125    max_context_window_tokens: usize,
126    #[serde(default)]
127    max_output_tokens: usize,
128    #[serde(default)]
129    max_prompt_tokens: u64,
130}
131
132#[derive(Clone, Serialize, Deserialize, Debug, Eq, PartialEq)]
133struct ModelPolicy {
134    state: String,
135}
136
137#[derive(Clone, Serialize, Deserialize, Debug, Eq, PartialEq)]
138struct ModelSupportedFeatures {
139    #[serde(default)]
140    streaming: bool,
141    #[serde(default)]
142    tool_calls: bool,
143    #[serde(default)]
144    parallel_tool_calls: bool,
145    #[serde(default)]
146    vision: bool,
147}
148
149#[derive(Clone, Copy, Serialize, Deserialize, Debug, Eq, PartialEq)]
150pub enum ModelVendor {
151    // Azure OpenAI should have no functional difference from OpenAI in Copilot Chat
152    #[serde(alias = "Azure OpenAI")]
153    OpenAI,
154    Google,
155    Anthropic,
156}
157
158#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
159#[serde(tag = "type")]
160pub enum ChatMessagePart {
161    #[serde(rename = "text")]
162    Text { text: String },
163    #[serde(rename = "image_url")]
164    Image { image_url: ImageUrl },
165}
166
167#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
168pub struct ImageUrl {
169    pub url: String,
170}
171
172impl Model {
173    pub fn uses_streaming(&self) -> bool {
174        self.capabilities.supports.streaming
175    }
176
177    pub fn id(&self) -> &str {
178        self.id.as_str()
179    }
180
181    pub fn display_name(&self) -> &str {
182        self.name.as_str()
183    }
184
185    pub fn max_token_count(&self) -> u64 {
186        self.capabilities.limits.max_prompt_tokens
187    }
188
189    pub fn supports_tools(&self) -> bool {
190        self.capabilities.supports.tool_calls
191    }
192
193    pub fn vendor(&self) -> ModelVendor {
194        self.vendor
195    }
196
197    pub fn supports_vision(&self) -> bool {
198        self.capabilities.supports.vision
199    }
200
201    pub fn supports_parallel_tool_calls(&self) -> bool {
202        self.capabilities.supports.parallel_tool_calls
203    }
204}
205
206#[derive(Serialize, Deserialize)]
207pub struct Request {
208    pub intent: bool,
209    pub n: usize,
210    pub stream: bool,
211    pub temperature: f32,
212    pub model: String,
213    pub messages: Vec<ChatMessage>,
214    #[serde(default, skip_serializing_if = "Vec::is_empty")]
215    pub tools: Vec<Tool>,
216    #[serde(default, skip_serializing_if = "Option::is_none")]
217    pub tool_choice: Option<ToolChoice>,
218}
219
220#[derive(Serialize, Deserialize)]
221pub struct Function {
222    pub name: String,
223    pub description: String,
224    pub parameters: serde_json::Value,
225}
226
227#[derive(Serialize, Deserialize)]
228#[serde(tag = "type", rename_all = "snake_case")]
229pub enum Tool {
230    Function { function: Function },
231}
232
233#[derive(Serialize, Deserialize)]
234#[serde(rename_all = "lowercase")]
235pub enum ToolChoice {
236    Auto,
237    Any,
238    None,
239}
240
241#[derive(Serialize, Deserialize, Debug)]
242#[serde(tag = "role", rename_all = "lowercase")]
243pub enum ChatMessage {
244    Assistant {
245        content: ChatMessageContent,
246        #[serde(default, skip_serializing_if = "Vec::is_empty")]
247        tool_calls: Vec<ToolCall>,
248    },
249    User {
250        content: ChatMessageContent,
251    },
252    System {
253        content: String,
254    },
255    Tool {
256        content: ChatMessageContent,
257        tool_call_id: String,
258    },
259}
260
261#[derive(Debug, Serialize, Deserialize)]
262#[serde(untagged)]
263pub enum ChatMessageContent {
264    Plain(String),
265    Multipart(Vec<ChatMessagePart>),
266}
267
268impl ChatMessageContent {
269    pub fn empty() -> Self {
270        ChatMessageContent::Multipart(vec![])
271    }
272}
273
274impl From<Vec<ChatMessagePart>> for ChatMessageContent {
275    fn from(mut parts: Vec<ChatMessagePart>) -> Self {
276        if let [ChatMessagePart::Text { text }] = parts.as_mut_slice() {
277            ChatMessageContent::Plain(std::mem::take(text))
278        } else {
279            ChatMessageContent::Multipart(parts)
280        }
281    }
282}
283
284impl From<String> for ChatMessageContent {
285    fn from(text: String) -> Self {
286        ChatMessageContent::Plain(text)
287    }
288}
289
290#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
291pub struct ToolCall {
292    pub id: String,
293    #[serde(flatten)]
294    pub content: ToolCallContent,
295}
296
297#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
298#[serde(tag = "type", rename_all = "lowercase")]
299pub enum ToolCallContent {
300    Function { function: FunctionContent },
301}
302
303#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
304pub struct FunctionContent {
305    pub name: String,
306    pub arguments: String,
307}
308
309#[derive(Deserialize, Debug)]
310#[serde(tag = "type", rename_all = "snake_case")]
311pub struct ResponseEvent {
312    pub choices: Vec<ResponseChoice>,
313    pub id: String,
314    pub usage: Option<Usage>,
315}
316
317#[derive(Deserialize, Debug)]
318pub struct Usage {
319    pub completion_tokens: u64,
320    pub prompt_tokens: u64,
321    pub prompt_tokens_details: PromptTokensDetails,
322    pub total_tokens: u64,
323}
324
325#[derive(Deserialize, Debug)]
326pub struct PromptTokensDetails {
327    pub cached_tokens: u64,
328}
329
330#[derive(Debug, Deserialize)]
331pub struct ResponseChoice {
332    pub index: usize,
333    pub finish_reason: Option<String>,
334    pub delta: Option<ResponseDelta>,
335    pub message: Option<ResponseDelta>,
336}
337
338#[derive(Debug, Deserialize)]
339pub struct ResponseDelta {
340    pub content: Option<String>,
341    pub role: Option<Role>,
342    #[serde(default)]
343    pub tool_calls: Vec<ToolCallChunk>,
344}
345
346#[derive(Deserialize, Debug, Eq, PartialEq)]
347pub struct ToolCallChunk {
348    pub index: usize,
349    pub id: Option<String>,
350    pub function: Option<FunctionChunk>,
351}
352
353#[derive(Deserialize, Debug, Eq, PartialEq)]
354pub struct FunctionChunk {
355    pub name: Option<String>,
356    pub arguments: Option<String>,
357}
358
359#[derive(Deserialize)]
360struct ApiTokenResponse {
361    token: String,
362    expires_at: i64,
363    endpoints: ApiTokenResponseEndpoints,
364}
365
366#[derive(Deserialize)]
367struct ApiTokenResponseEndpoints {
368    api: String,
369}
370
371#[derive(Clone)]
372struct ApiToken {
373    api_key: String,
374    expires_at: DateTime<chrono::Utc>,
375    api_endpoint: String,
376}
377
378impl ApiToken {
379    pub fn remaining_seconds(&self) -> i64 {
380        self.expires_at
381            .timestamp()
382            .saturating_sub(chrono::Utc::now().timestamp())
383    }
384}
385
386impl TryFrom<ApiTokenResponse> for ApiToken {
387    type Error = anyhow::Error;
388
389    fn try_from(response: ApiTokenResponse) -> Result<Self, Self::Error> {
390        let expires_at =
391            DateTime::from_timestamp(response.expires_at, 0).context("invalid expires_at")?;
392
393        Ok(Self {
394            api_key: response.token,
395            expires_at,
396            api_endpoint: response.endpoints.api,
397        })
398    }
399}
400
401struct GlobalCopilotChat(gpui::Entity<CopilotChat>);
402
403impl Global for GlobalCopilotChat {}
404
405pub struct CopilotChat {
406    oauth_token: Option<String>,
407    api_token: Option<ApiToken>,
408    configuration: CopilotChatConfiguration,
409    models: Option<Vec<Model>>,
410    client: Arc<dyn HttpClient>,
411}
412
413pub fn init(
414    fs: Arc<dyn Fs>,
415    client: Arc<dyn HttpClient>,
416    configuration: CopilotChatConfiguration,
417    cx: &mut App,
418) {
419    let copilot_chat = cx.new(|cx| CopilotChat::new(fs, client, configuration, cx));
420    cx.set_global(GlobalCopilotChat(copilot_chat));
421}
422
423pub fn copilot_chat_config_dir() -> &'static PathBuf {
424    static COPILOT_CHAT_CONFIG_DIR: OnceLock<PathBuf> = OnceLock::new();
425
426    COPILOT_CHAT_CONFIG_DIR.get_or_init(|| {
427        if cfg!(target_os = "windows") {
428            home_dir().join("AppData").join("Local")
429        } else {
430            home_dir().join(".config")
431        }
432        .join("github-copilot")
433    })
434}
435
436fn copilot_chat_config_paths() -> [PathBuf; 2] {
437    let base_dir = copilot_chat_config_dir();
438    [base_dir.join("hosts.json"), base_dir.join("apps.json")]
439}
440
441impl CopilotChat {
442    pub fn global(cx: &App) -> Option<gpui::Entity<Self>> {
443        cx.try_global::<GlobalCopilotChat>()
444            .map(|model| model.0.clone())
445    }
446
447    fn new(
448        fs: Arc<dyn Fs>,
449        client: Arc<dyn HttpClient>,
450        configuration: CopilotChatConfiguration,
451        cx: &mut Context<Self>,
452    ) -> Self {
453        let config_paths: HashSet<PathBuf> = copilot_chat_config_paths().into_iter().collect();
454        let dir_path = copilot_chat_config_dir();
455
456        cx.spawn(async move |this, cx| {
457            let mut parent_watch_rx = watch_config_dir(
458                cx.background_executor(),
459                fs.clone(),
460                dir_path.clone(),
461                config_paths,
462            );
463            while let Some(contents) = parent_watch_rx.next().await {
464                let oauth_domain =
465                    this.read_with(cx, |this, _| this.configuration.oauth_domain())?;
466                let oauth_token = extract_oauth_token(contents, &oauth_domain);
467
468                this.update(cx, |this, cx| {
469                    this.oauth_token = oauth_token.clone();
470                    cx.notify();
471                })?;
472
473                if oauth_token.is_some() {
474                    Self::update_models(&this, cx).await?;
475                }
476            }
477            anyhow::Ok(())
478        })
479        .detach_and_log_err(cx);
480
481        let this = Self {
482            oauth_token: std::env::var(COPILOT_OAUTH_ENV_VAR).ok(),
483            api_token: None,
484            models: None,
485            configuration,
486            client,
487        };
488
489        if this.oauth_token.is_some() {
490            cx.spawn(async move |this, mut cx| Self::update_models(&this, &mut cx).await)
491                .detach_and_log_err(cx);
492        }
493
494        this
495    }
496
497    async fn update_models(this: &WeakEntity<Self>, cx: &mut AsyncApp) -> Result<()> {
498        let (oauth_token, client, configuration) = this.read_with(cx, |this, _| {
499            (
500                this.oauth_token.clone(),
501                this.client.clone(),
502                this.configuration.clone(),
503            )
504        })?;
505
506        let oauth_token = oauth_token
507            .ok_or_else(|| anyhow!("OAuth token is missing while updating Copilot Chat models"))?;
508
509        let token_url = configuration.token_url();
510        let api_token = request_api_token(&oauth_token, token_url.into(), client.clone()).await?;
511
512        let models_url = configuration.models_url_from_endpoint(&api_token.api_endpoint);
513        let models =
514            get_models(models_url.into(), api_token.api_key.clone(), client.clone()).await?;
515
516        this.update(cx, |this, cx| {
517            this.api_token = Some(api_token);
518            this.models = Some(models);
519            cx.notify();
520        })?;
521        anyhow::Ok(())
522    }
523
524    pub fn is_authenticated(&self) -> bool {
525        self.oauth_token.is_some()
526    }
527
528    pub fn models(&self) -> Option<&[Model]> {
529        self.models.as_deref()
530    }
531
532    pub async fn stream_completion(
533        request: Request,
534        mut cx: AsyncApp,
535    ) -> Result<BoxStream<'static, Result<ResponseEvent>>> {
536        let this = cx
537            .update(|cx| Self::global(cx))
538            .ok()
539            .flatten()
540            .context("Copilot chat is not enabled")?;
541
542        let (oauth_token, api_token, client, configuration) = this.read_with(&cx, |this, _| {
543            (
544                this.oauth_token.clone(),
545                this.api_token.clone(),
546                this.client.clone(),
547                this.configuration.clone(),
548            )
549        })?;
550
551        let oauth_token = oauth_token.context("No OAuth token available")?;
552
553        let token = match api_token {
554            Some(api_token) if api_token.remaining_seconds() > 5 * 60 => api_token.clone(),
555            _ => {
556                let token_url = configuration.token_url();
557                let token =
558                    request_api_token(&oauth_token, token_url.into(), client.clone()).await?;
559                this.update(&mut cx, |this, cx| {
560                    this.api_token = Some(token.clone());
561                    cx.notify();
562                })?;
563                token
564            }
565        };
566
567        let api_url = configuration.api_url_from_endpoint(&token.api_endpoint);
568        stream_completion(client.clone(), token.api_key, api_url.into(), request).await
569    }
570
571    pub fn set_configuration(
572        &mut self,
573        configuration: CopilotChatConfiguration,
574        cx: &mut Context<Self>,
575    ) {
576        let same_configuration = self.configuration == configuration;
577        self.configuration = configuration;
578        if !same_configuration {
579            self.api_token = None;
580            cx.spawn(async move |this, cx| {
581                Self::update_models(&this, cx).await?;
582                Ok::<_, anyhow::Error>(())
583            })
584            .detach();
585        }
586    }
587}
588
589async fn get_models(
590    models_url: Arc<str>,
591    api_token: String,
592    client: Arc<dyn HttpClient>,
593) -> Result<Vec<Model>> {
594    let all_models = request_models(models_url, api_token, client).await?;
595
596    let mut models: Vec<Model> = all_models
597        .into_iter()
598        .filter(|model| {
599            model.model_picker_enabled
600                && model
601                    .policy
602                    .as_ref()
603                    .is_none_or(|policy| policy.state == "enabled")
604        })
605        .dedup_by(|a, b| a.capabilities.family == b.capabilities.family)
606        .collect();
607
608    if let Some(default_model_position) =
609        models.iter().position(|model| model.id == DEFAULT_MODEL_ID)
610    {
611        let default_model = models.remove(default_model_position);
612        models.insert(0, default_model);
613    }
614
615    Ok(models)
616}
617
618async fn request_models(
619    models_url: Arc<str>,
620    api_token: String,
621    client: Arc<dyn HttpClient>,
622) -> Result<Vec<Model>> {
623    let request_builder = HttpRequest::builder()
624        .method(Method::GET)
625        .uri(models_url.as_ref())
626        .header("Authorization", format!("Bearer {}", api_token))
627        .header("Content-Type", "application/json")
628        .header("Copilot-Integration-Id", "vscode-chat");
629
630    let request = request_builder.body(AsyncBody::empty())?;
631
632    let mut response = client.send(request).await?;
633
634    anyhow::ensure!(
635        response.status().is_success(),
636        "Failed to request models: {}",
637        response.status()
638    );
639    let mut body = Vec::new();
640    response.body_mut().read_to_end(&mut body).await?;
641
642    let body_str = std::str::from_utf8(&body)?;
643
644    let models = serde_json::from_str::<ModelSchema>(body_str)?.data;
645
646    Ok(models)
647}
648
649async fn request_api_token(
650    oauth_token: &str,
651    auth_url: Arc<str>,
652    client: Arc<dyn HttpClient>,
653) -> Result<ApiToken> {
654    let request_builder = HttpRequest::builder()
655        .method(Method::GET)
656        .uri(auth_url.as_ref())
657        .header("Authorization", format!("token {}", oauth_token))
658        .header("Accept", "application/json");
659
660    let request = request_builder.body(AsyncBody::empty())?;
661
662    let mut response = client.send(request).await?;
663
664    if response.status().is_success() {
665        let mut body = Vec::new();
666        response.body_mut().read_to_end(&mut body).await?;
667
668        let body_str = std::str::from_utf8(&body)?;
669
670        let parsed: ApiTokenResponse = serde_json::from_str(body_str)?;
671        ApiToken::try_from(parsed)
672    } else {
673        let mut body = Vec::new();
674        response.body_mut().read_to_end(&mut body).await?;
675
676        let body_str = std::str::from_utf8(&body)?;
677        anyhow::bail!("Failed to request API token: {body_str}");
678    }
679}
680
681fn extract_oauth_token(contents: String, domain: &str) -> Option<String> {
682    serde_json::from_str::<serde_json::Value>(&contents)
683        .map(|v| {
684            v.as_object().and_then(|obj| {
685                obj.iter().find_map(|(key, value)| {
686                    if key.starts_with(domain) {
687                        value["oauth_token"].as_str().map(|v| v.to_string())
688                    } else {
689                        None
690                    }
691                })
692            })
693        })
694        .ok()
695        .flatten()
696}
697
698async fn stream_completion(
699    client: Arc<dyn HttpClient>,
700    api_key: String,
701    completion_url: Arc<str>,
702    request: Request,
703) -> Result<BoxStream<'static, Result<ResponseEvent>>> {
704    let is_vision_request = request.messages.last().map_or(false, |message| match message {
705        ChatMessage::User { content }
706        | ChatMessage::Assistant { content, .. }
707        | ChatMessage::Tool { content, .. } => {
708            matches!(content, ChatMessageContent::Multipart(parts) if parts.iter().any(|part| matches!(part, ChatMessagePart::Image { .. })))
709        }
710        _ => false,
711    });
712
713    let request_builder = HttpRequest::builder()
714        .method(Method::POST)
715        .uri(completion_url.as_ref())
716        .header(
717            "Editor-Version",
718            format!(
719                "Zed/{}",
720                option_env!("CARGO_PKG_VERSION").unwrap_or("unknown")
721            ),
722        )
723        .header("Authorization", format!("Bearer {}", api_key))
724        .header("Content-Type", "application/json")
725        .header("Copilot-Integration-Id", "vscode-chat")
726        .header("Copilot-Vision-Request", is_vision_request.to_string());
727
728    let is_streaming = request.stream;
729
730    let json = serde_json::to_string(&request)?;
731    let request = request_builder.body(AsyncBody::from(json))?;
732    let mut response = client.send(request).await?;
733
734    if !response.status().is_success() {
735        let mut body = Vec::new();
736        response.body_mut().read_to_end(&mut body).await?;
737        let body_str = std::str::from_utf8(&body)?;
738        anyhow::bail!(
739            "Failed to connect to API: {} {}",
740            response.status(),
741            body_str
742        );
743    }
744
745    if is_streaming {
746        let reader = BufReader::new(response.into_body());
747        Ok(reader
748            .lines()
749            .filter_map(|line| async move {
750                match line {
751                    Ok(line) => {
752                        let line = line.strip_prefix("data: ")?;
753                        if line.starts_with("[DONE]") {
754                            return None;
755                        }
756
757                        match serde_json::from_str::<ResponseEvent>(line) {
758                            Ok(response) => {
759                                if response.choices.is_empty() {
760                                    None
761                                } else {
762                                    Some(Ok(response))
763                                }
764                            }
765                            Err(error) => Some(Err(anyhow!(error))),
766                        }
767                    }
768                    Err(error) => Some(Err(anyhow!(error))),
769                }
770            })
771            .boxed())
772    } else {
773        let mut body = Vec::new();
774        response.body_mut().read_to_end(&mut body).await?;
775        let body_str = std::str::from_utf8(&body)?;
776        let response: ResponseEvent = serde_json::from_str(body_str)?;
777
778        Ok(futures::stream::once(async move { Ok(response) }).boxed())
779    }
780}
781
782#[cfg(test)]
783mod tests {
784    use super::*;
785
786    #[test]
787    fn test_resilient_model_schema_deserialize() {
788        let json = r#"{
789              "data": [
790                {
791                  "capabilities": {
792                    "family": "gpt-4",
793                    "limits": {
794                      "max_context_window_tokens": 32768,
795                      "max_output_tokens": 4096,
796                      "max_prompt_tokens": 32768
797                    },
798                    "object": "model_capabilities",
799                    "supports": { "streaming": true, "tool_calls": true },
800                    "tokenizer": "cl100k_base",
801                    "type": "chat"
802                  },
803                  "id": "gpt-4",
804                  "model_picker_enabled": false,
805                  "name": "GPT 4",
806                  "object": "model",
807                  "preview": false,
808                  "vendor": "Azure OpenAI",
809                  "version": "gpt-4-0613"
810                },
811                {
812                    "some-unknown-field": 123
813                },
814                {
815                  "capabilities": {
816                    "family": "claude-3.7-sonnet",
817                    "limits": {
818                      "max_context_window_tokens": 200000,
819                      "max_output_tokens": 16384,
820                      "max_prompt_tokens": 90000,
821                      "vision": {
822                        "max_prompt_image_size": 3145728,
823                        "max_prompt_images": 1,
824                        "supported_media_types": ["image/jpeg", "image/png", "image/webp"]
825                      }
826                    },
827                    "object": "model_capabilities",
828                    "supports": {
829                      "parallel_tool_calls": true,
830                      "streaming": true,
831                      "tool_calls": true,
832                      "vision": true
833                    },
834                    "tokenizer": "o200k_base",
835                    "type": "chat"
836                  },
837                  "id": "claude-3.7-sonnet",
838                  "model_picker_enabled": true,
839                  "name": "Claude 3.7 Sonnet",
840                  "object": "model",
841                  "policy": {
842                    "state": "enabled",
843                    "terms": "Enable access to the latest Claude 3.7 Sonnet model from Anthropic. [Learn more about how GitHub Copilot serves Claude 3.7 Sonnet](https://docs.github.com/copilot/using-github-copilot/using-claude-sonnet-in-github-copilot)."
844                  },
845                  "preview": false,
846                  "vendor": "Anthropic",
847                  "version": "claude-3.7-sonnet"
848                }
849              ],
850              "object": "list"
851            }"#;
852
853        let schema: ModelSchema = serde_json::from_str(&json).unwrap();
854
855        assert_eq!(schema.data.len(), 2);
856        assert_eq!(schema.data[0].id, "gpt-4");
857        assert_eq!(schema.data[1].id, "claude-3.7-sonnet");
858    }
859}