copilot_chat.rs

  1use std::path::PathBuf;
  2use std::sync::Arc;
  3use std::sync::OnceLock;
  4
  5use anyhow::Context as _;
  6use anyhow::{Result, anyhow};
  7use chrono::DateTime;
  8use collections::HashSet;
  9use fs::Fs;
 10use futures::{AsyncBufReadExt, AsyncReadExt, StreamExt, io::BufReader, stream::BoxStream};
 11use gpui::WeakEntity;
 12use gpui::{App, AsyncApp, Global, prelude::*};
 13use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest};
 14use itertools::Itertools;
 15use paths::home_dir;
 16use serde::{Deserialize, Serialize};
 17use settings::watch_config_dir;
 18
 19pub const COPILOT_OAUTH_ENV_VAR: &str = "GH_COPILOT_TOKEN";
 20
 21#[derive(Default, Clone, Debug, PartialEq)]
 22pub struct CopilotChatConfiguration {
 23    pub enterprise_uri: Option<String>,
 24}
 25
 26impl CopilotChatConfiguration {
 27    pub fn token_url(&self) -> String {
 28        if let Some(enterprise_uri) = &self.enterprise_uri {
 29            let domain = Self::parse_domain(enterprise_uri);
 30            format!("https://api.{}/copilot_internal/v2/token", domain)
 31        } else {
 32            "https://api.github.com/copilot_internal/v2/token".to_string()
 33        }
 34    }
 35
 36    pub fn oauth_domain(&self) -> String {
 37        if let Some(enterprise_uri) = &self.enterprise_uri {
 38            Self::parse_domain(enterprise_uri)
 39        } else {
 40            "github.com".to_string()
 41        }
 42    }
 43
 44    pub fn api_url_from_endpoint(&self, endpoint: &str) -> String {
 45        format!("{}/chat/completions", endpoint)
 46    }
 47
 48    pub fn models_url_from_endpoint(&self, endpoint: &str) -> String {
 49        format!("{}/models", endpoint)
 50    }
 51
 52    fn parse_domain(enterprise_uri: &str) -> String {
 53        let uri = enterprise_uri.trim_end_matches('/');
 54
 55        if let Some(domain) = uri.strip_prefix("https://") {
 56            domain.split('/').next().unwrap_or(domain).to_string()
 57        } else if let Some(domain) = uri.strip_prefix("http://") {
 58            domain.split('/').next().unwrap_or(domain).to_string()
 59        } else {
 60            uri.split('/').next().unwrap_or(uri).to_string()
 61        }
 62    }
 63}
 64
 65// Copilot's base model; defined by Microsoft in premium requests table
 66// This will be moved to the front of the Copilot model list, and will be used for
 67// 'fast' requests (e.g. title generation)
 68// https://docs.github.com/en/copilot/managing-copilot/monitoring-usage-and-entitlements/about-premium-requests
 69const DEFAULT_MODEL_ID: &str = "gpt-4.1";
 70
 71#[derive(Clone, Copy, Serialize, Deserialize, Debug, Eq, PartialEq)]
 72#[serde(rename_all = "lowercase")]
 73pub enum Role {
 74    User,
 75    Assistant,
 76    System,
 77}
 78
 79#[derive(Deserialize)]
 80struct ModelSchema {
 81    #[serde(deserialize_with = "deserialize_models_skip_errors")]
 82    data: Vec<Model>,
 83}
 84
 85fn deserialize_models_skip_errors<'de, D>(deserializer: D) -> Result<Vec<Model>, D::Error>
 86where
 87    D: serde::Deserializer<'de>,
 88{
 89    let raw_values = Vec::<serde_json::Value>::deserialize(deserializer)?;
 90    let models = raw_values
 91        .into_iter()
 92        .filter_map(|value| match serde_json::from_value::<Model>(value) {
 93            Ok(model) => Some(model),
 94            Err(err) => {
 95                log::warn!("GitHub Copilot Chat model failed to deserialize: {:?}", err);
 96                None
 97            }
 98        })
 99        .collect();
100
101    Ok(models)
102}
103
104#[derive(Clone, Serialize, Deserialize, Debug, Eq, PartialEq)]
105pub struct Model {
106    capabilities: ModelCapabilities,
107    id: String,
108    name: String,
109    policy: Option<ModelPolicy>,
110    vendor: ModelVendor,
111    model_picker_enabled: bool,
112}
113
114#[derive(Clone, Serialize, Deserialize, Debug, Eq, PartialEq)]
115struct ModelCapabilities {
116    family: String,
117    #[serde(default)]
118    limits: ModelLimits,
119    supports: ModelSupportedFeatures,
120}
121
122#[derive(Default, Clone, Serialize, Deserialize, Debug, Eq, PartialEq)]
123struct ModelLimits {
124    #[serde(default)]
125    max_context_window_tokens: usize,
126    #[serde(default)]
127    max_output_tokens: usize,
128    #[serde(default)]
129    max_prompt_tokens: u64,
130}
131
132#[derive(Clone, Serialize, Deserialize, Debug, Eq, PartialEq)]
133struct ModelPolicy {
134    state: String,
135}
136
137#[derive(Clone, Serialize, Deserialize, Debug, Eq, PartialEq)]
138struct ModelSupportedFeatures {
139    #[serde(default)]
140    streaming: bool,
141    #[serde(default)]
142    tool_calls: bool,
143    #[serde(default)]
144    parallel_tool_calls: bool,
145    #[serde(default)]
146    vision: bool,
147}
148
149#[derive(Clone, Copy, Serialize, Deserialize, Debug, Eq, PartialEq)]
150pub enum ModelVendor {
151    // Azure OpenAI should have no functional difference from OpenAI in Copilot Chat
152    #[serde(alias = "Azure OpenAI")]
153    OpenAI,
154    Google,
155    Anthropic,
156}
157
158#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
159#[serde(tag = "type")]
160pub enum ChatMessagePart {
161    #[serde(rename = "text")]
162    Text { text: String },
163    #[serde(rename = "image_url")]
164    Image { image_url: ImageUrl },
165}
166
167#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
168pub struct ImageUrl {
169    pub url: String,
170}
171
172impl Model {
173    pub fn uses_streaming(&self) -> bool {
174        self.capabilities.supports.streaming
175    }
176
177    pub fn id(&self) -> &str {
178        self.id.as_str()
179    }
180
181    pub fn display_name(&self) -> &str {
182        self.name.as_str()
183    }
184
185    pub fn max_token_count(&self) -> u64 {
186        self.capabilities.limits.max_prompt_tokens
187    }
188
189    pub fn supports_tools(&self) -> bool {
190        self.capabilities.supports.tool_calls
191    }
192
193    pub fn vendor(&self) -> ModelVendor {
194        self.vendor
195    }
196
197    pub fn supports_vision(&self) -> bool {
198        self.capabilities.supports.vision
199    }
200
201    pub fn supports_parallel_tool_calls(&self) -> bool {
202        self.capabilities.supports.parallel_tool_calls
203    }
204}
205
206#[derive(Serialize, Deserialize)]
207pub struct Request {
208    pub intent: bool,
209    pub n: usize,
210    pub stream: bool,
211    pub temperature: f32,
212    pub model: String,
213    pub messages: Vec<ChatMessage>,
214    #[serde(default, skip_serializing_if = "Vec::is_empty")]
215    pub tools: Vec<Tool>,
216    #[serde(default, skip_serializing_if = "Option::is_none")]
217    pub tool_choice: Option<ToolChoice>,
218}
219
220#[derive(Serialize, Deserialize)]
221pub struct Function {
222    pub name: String,
223    pub description: String,
224    pub parameters: serde_json::Value,
225}
226
227#[derive(Serialize, Deserialize)]
228#[serde(tag = "type", rename_all = "snake_case")]
229pub enum Tool {
230    Function { function: Function },
231}
232
233#[derive(Serialize, Deserialize)]
234#[serde(rename_all = "lowercase")]
235pub enum ToolChoice {
236    Auto,
237    Any,
238    None,
239}
240
241#[derive(Serialize, Deserialize, Debug)]
242#[serde(tag = "role", rename_all = "lowercase")]
243pub enum ChatMessage {
244    Assistant {
245        content: ChatMessageContent,
246        #[serde(default, skip_serializing_if = "Vec::is_empty")]
247        tool_calls: Vec<ToolCall>,
248    },
249    User {
250        content: ChatMessageContent,
251    },
252    System {
253        content: String,
254    },
255    Tool {
256        content: ChatMessageContent,
257        tool_call_id: String,
258    },
259}
260
261#[derive(Debug, Serialize, Deserialize)]
262#[serde(untagged)]
263pub enum ChatMessageContent {
264    Plain(String),
265    Multipart(Vec<ChatMessagePart>),
266}
267
268impl ChatMessageContent {
269    pub fn empty() -> Self {
270        ChatMessageContent::Multipart(vec![])
271    }
272}
273
274impl From<Vec<ChatMessagePart>> for ChatMessageContent {
275    fn from(mut parts: Vec<ChatMessagePart>) -> Self {
276        if let [ChatMessagePart::Text { text }] = parts.as_mut_slice() {
277            ChatMessageContent::Plain(std::mem::take(text))
278        } else {
279            ChatMessageContent::Multipart(parts)
280        }
281    }
282}
283
284impl From<String> for ChatMessageContent {
285    fn from(text: String) -> Self {
286        ChatMessageContent::Plain(text)
287    }
288}
289
290#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
291pub struct ToolCall {
292    pub id: String,
293    #[serde(flatten)]
294    pub content: ToolCallContent,
295}
296
297#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
298#[serde(tag = "type", rename_all = "lowercase")]
299pub enum ToolCallContent {
300    Function { function: FunctionContent },
301}
302
303#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
304pub struct FunctionContent {
305    pub name: String,
306    pub arguments: String,
307}
308
309#[derive(Deserialize, Debug)]
310#[serde(tag = "type", rename_all = "snake_case")]
311pub struct ResponseEvent {
312    pub choices: Vec<ResponseChoice>,
313    pub id: String,
314    pub usage: Option<Usage>,
315}
316
317#[derive(Deserialize, Debug)]
318pub struct Usage {
319    pub completion_tokens: u64,
320    pub prompt_tokens: u64,
321    pub prompt_tokens_details: PromptTokensDetails,
322    pub total_tokens: u64,
323}
324
325#[derive(Deserialize, Debug)]
326pub struct PromptTokensDetails {
327    pub cached_tokens: u64,
328}
329
330#[derive(Debug, Deserialize)]
331pub struct ResponseChoice {
332    pub index: usize,
333    pub finish_reason: Option<String>,
334    pub delta: Option<ResponseDelta>,
335    pub message: Option<ResponseDelta>,
336}
337
338#[derive(Debug, Deserialize)]
339pub struct ResponseDelta {
340    pub content: Option<String>,
341    pub role: Option<Role>,
342    #[serde(default)]
343    pub tool_calls: Vec<ToolCallChunk>,
344}
345
346#[derive(Deserialize, Debug, Eq, PartialEq)]
347pub struct ToolCallChunk {
348    pub index: usize,
349    pub id: Option<String>,
350    pub function: Option<FunctionChunk>,
351}
352
353#[derive(Deserialize, Debug, Eq, PartialEq)]
354pub struct FunctionChunk {
355    pub name: Option<String>,
356    pub arguments: Option<String>,
357}
358
359#[derive(Deserialize)]
360struct ApiTokenResponse {
361    token: String,
362    expires_at: i64,
363    endpoints: ApiTokenResponseEndpoints,
364}
365
366#[derive(Deserialize)]
367struct ApiTokenResponseEndpoints {
368    api: String,
369}
370
371#[derive(Clone)]
372struct ApiToken {
373    api_key: String,
374    expires_at: DateTime<chrono::Utc>,
375    api_endpoint: String,
376}
377
378impl ApiToken {
379    pub fn remaining_seconds(&self) -> i64 {
380        self.expires_at
381            .timestamp()
382            .saturating_sub(chrono::Utc::now().timestamp())
383    }
384}
385
386impl TryFrom<ApiTokenResponse> for ApiToken {
387    type Error = anyhow::Error;
388
389    fn try_from(response: ApiTokenResponse) -> Result<Self, Self::Error> {
390        let expires_at =
391            DateTime::from_timestamp(response.expires_at, 0).context("invalid expires_at")?;
392
393        Ok(Self {
394            api_key: response.token,
395            expires_at,
396            api_endpoint: response.endpoints.api,
397        })
398    }
399}
400
401struct GlobalCopilotChat(gpui::Entity<CopilotChat>);
402
403impl Global for GlobalCopilotChat {}
404
405pub struct CopilotChat {
406    oauth_token: Option<String>,
407    api_token: Option<ApiToken>,
408    configuration: CopilotChatConfiguration,
409    models: Option<Vec<Model>>,
410    client: Arc<dyn HttpClient>,
411}
412
413pub fn init(
414    fs: Arc<dyn Fs>,
415    client: Arc<dyn HttpClient>,
416    configuration: CopilotChatConfiguration,
417    cx: &mut App,
418) {
419    let copilot_chat = cx.new(|cx| CopilotChat::new(fs, client, configuration, cx));
420    cx.set_global(GlobalCopilotChat(copilot_chat));
421}
422
423pub fn copilot_chat_config_dir() -> &'static PathBuf {
424    static COPILOT_CHAT_CONFIG_DIR: OnceLock<PathBuf> = OnceLock::new();
425
426    COPILOT_CHAT_CONFIG_DIR.get_or_init(|| {
427        let config_dir = if cfg!(target_os = "windows") {
428            dirs::data_local_dir().expect("failed to determine LocalAppData directory")
429        } else {
430            std::env::var("XDG_CONFIG_HOME")
431                .map(PathBuf::from)
432                .unwrap_or_else(|_| home_dir().join(".config"))
433        };
434
435        config_dir.join("github-copilot")
436    })
437}
438
439fn copilot_chat_config_paths() -> [PathBuf; 2] {
440    let base_dir = copilot_chat_config_dir();
441    [base_dir.join("hosts.json"), base_dir.join("apps.json")]
442}
443
444impl CopilotChat {
445    pub fn global(cx: &App) -> Option<gpui::Entity<Self>> {
446        cx.try_global::<GlobalCopilotChat>()
447            .map(|model| model.0.clone())
448    }
449
450    fn new(
451        fs: Arc<dyn Fs>,
452        client: Arc<dyn HttpClient>,
453        configuration: CopilotChatConfiguration,
454        cx: &mut Context<Self>,
455    ) -> Self {
456        let config_paths: HashSet<PathBuf> = copilot_chat_config_paths().into_iter().collect();
457        let dir_path = copilot_chat_config_dir();
458
459        cx.spawn(async move |this, cx| {
460            let mut parent_watch_rx = watch_config_dir(
461                cx.background_executor(),
462                fs.clone(),
463                dir_path.clone(),
464                config_paths,
465            );
466            while let Some(contents) = parent_watch_rx.next().await {
467                let oauth_domain =
468                    this.read_with(cx, |this, _| this.configuration.oauth_domain())?;
469                let oauth_token = extract_oauth_token(contents, &oauth_domain);
470
471                this.update(cx, |this, cx| {
472                    this.oauth_token = oauth_token.clone();
473                    cx.notify();
474                })?;
475
476                if oauth_token.is_some() {
477                    Self::update_models(&this, cx).await?;
478                }
479            }
480            anyhow::Ok(())
481        })
482        .detach_and_log_err(cx);
483
484        let this = Self {
485            oauth_token: std::env::var(COPILOT_OAUTH_ENV_VAR).ok(),
486            api_token: None,
487            models: None,
488            configuration,
489            client,
490        };
491
492        if this.oauth_token.is_some() {
493            cx.spawn(async move |this, mut cx| Self::update_models(&this, &mut cx).await)
494                .detach_and_log_err(cx);
495        }
496
497        this
498    }
499
500    async fn update_models(this: &WeakEntity<Self>, cx: &mut AsyncApp) -> Result<()> {
501        let (oauth_token, client, configuration) = this.read_with(cx, |this, _| {
502            (
503                this.oauth_token.clone(),
504                this.client.clone(),
505                this.configuration.clone(),
506            )
507        })?;
508
509        let oauth_token = oauth_token
510            .ok_or_else(|| anyhow!("OAuth token is missing while updating Copilot Chat models"))?;
511
512        let token_url = configuration.token_url();
513        let api_token = request_api_token(&oauth_token, token_url.into(), client.clone()).await?;
514
515        let models_url = configuration.models_url_from_endpoint(&api_token.api_endpoint);
516        let models =
517            get_models(models_url.into(), api_token.api_key.clone(), client.clone()).await?;
518
519        this.update(cx, |this, cx| {
520            this.api_token = Some(api_token);
521            this.models = Some(models);
522            cx.notify();
523        })?;
524        anyhow::Ok(())
525    }
526
527    pub fn is_authenticated(&self) -> bool {
528        self.oauth_token.is_some()
529    }
530
531    pub fn models(&self) -> Option<&[Model]> {
532        self.models.as_deref()
533    }
534
535    pub async fn stream_completion(
536        request: Request,
537        mut cx: AsyncApp,
538    ) -> Result<BoxStream<'static, Result<ResponseEvent>>> {
539        let this = cx
540            .update(|cx| Self::global(cx))
541            .ok()
542            .flatten()
543            .context("Copilot chat is not enabled")?;
544
545        let (oauth_token, api_token, client, configuration) = this.read_with(&cx, |this, _| {
546            (
547                this.oauth_token.clone(),
548                this.api_token.clone(),
549                this.client.clone(),
550                this.configuration.clone(),
551            )
552        })?;
553
554        let oauth_token = oauth_token.context("No OAuth token available")?;
555
556        let token = match api_token {
557            Some(api_token) if api_token.remaining_seconds() > 5 * 60 => api_token.clone(),
558            _ => {
559                let token_url = configuration.token_url();
560                let token =
561                    request_api_token(&oauth_token, token_url.into(), client.clone()).await?;
562                this.update(&mut cx, |this, cx| {
563                    this.api_token = Some(token.clone());
564                    cx.notify();
565                })?;
566                token
567            }
568        };
569
570        let api_url = configuration.api_url_from_endpoint(&token.api_endpoint);
571        stream_completion(client.clone(), token.api_key, api_url.into(), request).await
572    }
573
574    pub fn set_configuration(
575        &mut self,
576        configuration: CopilotChatConfiguration,
577        cx: &mut Context<Self>,
578    ) {
579        let same_configuration = self.configuration == configuration;
580        self.configuration = configuration;
581        if !same_configuration {
582            self.api_token = None;
583            cx.spawn(async move |this, cx| {
584                Self::update_models(&this, cx).await?;
585                Ok::<_, anyhow::Error>(())
586            })
587            .detach();
588        }
589    }
590}
591
592async fn get_models(
593    models_url: Arc<str>,
594    api_token: String,
595    client: Arc<dyn HttpClient>,
596) -> Result<Vec<Model>> {
597    let all_models = request_models(models_url, api_token, client).await?;
598
599    let mut models: Vec<Model> = all_models
600        .into_iter()
601        .filter(|model| {
602            model.model_picker_enabled
603                && model
604                    .policy
605                    .as_ref()
606                    .is_none_or(|policy| policy.state == "enabled")
607        })
608        .dedup_by(|a, b| a.capabilities.family == b.capabilities.family)
609        .collect();
610
611    if let Some(default_model_position) =
612        models.iter().position(|model| model.id == DEFAULT_MODEL_ID)
613    {
614        let default_model = models.remove(default_model_position);
615        models.insert(0, default_model);
616    }
617
618    Ok(models)
619}
620
621async fn request_models(
622    models_url: Arc<str>,
623    api_token: String,
624    client: Arc<dyn HttpClient>,
625) -> Result<Vec<Model>> {
626    let request_builder = HttpRequest::builder()
627        .method(Method::GET)
628        .uri(models_url.as_ref())
629        .header("Authorization", format!("Bearer {}", api_token))
630        .header("Content-Type", "application/json")
631        .header("Copilot-Integration-Id", "vscode-chat");
632
633    let request = request_builder.body(AsyncBody::empty())?;
634
635    let mut response = client.send(request).await?;
636
637    anyhow::ensure!(
638        response.status().is_success(),
639        "Failed to request models: {}",
640        response.status()
641    );
642    let mut body = Vec::new();
643    response.body_mut().read_to_end(&mut body).await?;
644
645    let body_str = std::str::from_utf8(&body)?;
646
647    let models = serde_json::from_str::<ModelSchema>(body_str)?.data;
648
649    Ok(models)
650}
651
652async fn request_api_token(
653    oauth_token: &str,
654    auth_url: Arc<str>,
655    client: Arc<dyn HttpClient>,
656) -> Result<ApiToken> {
657    let request_builder = HttpRequest::builder()
658        .method(Method::GET)
659        .uri(auth_url.as_ref())
660        .header("Authorization", format!("token {}", oauth_token))
661        .header("Accept", "application/json");
662
663    let request = request_builder.body(AsyncBody::empty())?;
664
665    let mut response = client.send(request).await?;
666
667    if response.status().is_success() {
668        let mut body = Vec::new();
669        response.body_mut().read_to_end(&mut body).await?;
670
671        let body_str = std::str::from_utf8(&body)?;
672
673        let parsed: ApiTokenResponse = serde_json::from_str(body_str)?;
674        ApiToken::try_from(parsed)
675    } else {
676        let mut body = Vec::new();
677        response.body_mut().read_to_end(&mut body).await?;
678
679        let body_str = std::str::from_utf8(&body)?;
680        anyhow::bail!("Failed to request API token: {body_str}");
681    }
682}
683
684fn extract_oauth_token(contents: String, domain: &str) -> Option<String> {
685    serde_json::from_str::<serde_json::Value>(&contents)
686        .map(|v| {
687            v.as_object().and_then(|obj| {
688                obj.iter().find_map(|(key, value)| {
689                    if key.starts_with(domain) {
690                        value["oauth_token"].as_str().map(|v| v.to_string())
691                    } else {
692                        None
693                    }
694                })
695            })
696        })
697        .ok()
698        .flatten()
699}
700
701async fn stream_completion(
702    client: Arc<dyn HttpClient>,
703    api_key: String,
704    completion_url: Arc<str>,
705    request: Request,
706) -> Result<BoxStream<'static, Result<ResponseEvent>>> {
707    let is_vision_request = request.messages.last().map_or(false, |message| match message {
708        ChatMessage::User { content }
709        | ChatMessage::Assistant { content, .. }
710        | ChatMessage::Tool { content, .. } => {
711            matches!(content, ChatMessageContent::Multipart(parts) if parts.iter().any(|part| matches!(part, ChatMessagePart::Image { .. })))
712        }
713        _ => false,
714    });
715
716    let request_builder = HttpRequest::builder()
717        .method(Method::POST)
718        .uri(completion_url.as_ref())
719        .header(
720            "Editor-Version",
721            format!(
722                "Zed/{}",
723                option_env!("CARGO_PKG_VERSION").unwrap_or("unknown")
724            ),
725        )
726        .header("Authorization", format!("Bearer {}", api_key))
727        .header("Content-Type", "application/json")
728        .header("Copilot-Integration-Id", "vscode-chat")
729        .header("Copilot-Vision-Request", is_vision_request.to_string());
730
731    let is_streaming = request.stream;
732
733    let json = serde_json::to_string(&request)?;
734    let request = request_builder.body(AsyncBody::from(json))?;
735    let mut response = client.send(request).await?;
736
737    if !response.status().is_success() {
738        let mut body = Vec::new();
739        response.body_mut().read_to_end(&mut body).await?;
740        let body_str = std::str::from_utf8(&body)?;
741        anyhow::bail!(
742            "Failed to connect to API: {} {}",
743            response.status(),
744            body_str
745        );
746    }
747
748    if is_streaming {
749        let reader = BufReader::new(response.into_body());
750        Ok(reader
751            .lines()
752            .filter_map(|line| async move {
753                match line {
754                    Ok(line) => {
755                        let line = line.strip_prefix("data: ")?;
756                        if line.starts_with("[DONE]") {
757                            return None;
758                        }
759
760                        match serde_json::from_str::<ResponseEvent>(line) {
761                            Ok(response) => {
762                                if response.choices.is_empty() {
763                                    None
764                                } else {
765                                    Some(Ok(response))
766                                }
767                            }
768                            Err(error) => Some(Err(anyhow!(error))),
769                        }
770                    }
771                    Err(error) => Some(Err(anyhow!(error))),
772                }
773            })
774            .boxed())
775    } else {
776        let mut body = Vec::new();
777        response.body_mut().read_to_end(&mut body).await?;
778        let body_str = std::str::from_utf8(&body)?;
779        let response: ResponseEvent = serde_json::from_str(body_str)?;
780
781        Ok(futures::stream::once(async move { Ok(response) }).boxed())
782    }
783}
784
785#[cfg(test)]
786mod tests {
787    use super::*;
788
789    #[test]
790    fn test_resilient_model_schema_deserialize() {
791        let json = r#"{
792              "data": [
793                {
794                  "capabilities": {
795                    "family": "gpt-4",
796                    "limits": {
797                      "max_context_window_tokens": 32768,
798                      "max_output_tokens": 4096,
799                      "max_prompt_tokens": 32768
800                    },
801                    "object": "model_capabilities",
802                    "supports": { "streaming": true, "tool_calls": true },
803                    "tokenizer": "cl100k_base",
804                    "type": "chat"
805                  },
806                  "id": "gpt-4",
807                  "model_picker_enabled": false,
808                  "name": "GPT 4",
809                  "object": "model",
810                  "preview": false,
811                  "vendor": "Azure OpenAI",
812                  "version": "gpt-4-0613"
813                },
814                {
815                    "some-unknown-field": 123
816                },
817                {
818                  "capabilities": {
819                    "family": "claude-3.7-sonnet",
820                    "limits": {
821                      "max_context_window_tokens": 200000,
822                      "max_output_tokens": 16384,
823                      "max_prompt_tokens": 90000,
824                      "vision": {
825                        "max_prompt_image_size": 3145728,
826                        "max_prompt_images": 1,
827                        "supported_media_types": ["image/jpeg", "image/png", "image/webp"]
828                      }
829                    },
830                    "object": "model_capabilities",
831                    "supports": {
832                      "parallel_tool_calls": true,
833                      "streaming": true,
834                      "tool_calls": true,
835                      "vision": true
836                    },
837                    "tokenizer": "o200k_base",
838                    "type": "chat"
839                  },
840                  "id": "claude-3.7-sonnet",
841                  "model_picker_enabled": true,
842                  "name": "Claude 3.7 Sonnet",
843                  "object": "model",
844                  "policy": {
845                    "state": "enabled",
846                    "terms": "Enable access to the latest Claude 3.7 Sonnet model from Anthropic. [Learn more about how GitHub Copilot serves Claude 3.7 Sonnet](https://docs.github.com/copilot/using-github-copilot/using-claude-sonnet-in-github-copilot)."
847                  },
848                  "preview": false,
849                  "vendor": "Anthropic",
850                  "version": "claude-3.7-sonnet"
851                }
852              ],
853              "object": "list"
854            }"#;
855
856        let schema: ModelSchema = serde_json::from_str(&json).unwrap();
857
858        assert_eq!(schema.data.len(), 2);
859        assert_eq!(schema.data[0].id, "gpt-4");
860        assert_eq!(schema.data[1].id, "claude-3.7-sonnet");
861    }
862}