1use std::path::PathBuf;
2use std::sync::Arc;
3use std::sync::OnceLock;
4
5use anyhow::Context as _;
6use anyhow::{Result, anyhow};
7use chrono::DateTime;
8use collections::HashSet;
9use fs::Fs;
10use futures::{AsyncBufReadExt, AsyncReadExt, StreamExt, io::BufReader, stream::BoxStream};
11use gpui::WeakEntity;
12use gpui::{App, AsyncApp, Global, prelude::*};
13use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest};
14use itertools::Itertools;
15use paths::home_dir;
16use serde::{Deserialize, Serialize};
17use settings::watch_config_dir;
18
19pub const COPILOT_OAUTH_ENV_VAR: &str = "GH_COPILOT_TOKEN";
20
21#[derive(Default, Clone, Debug, PartialEq)]
22pub struct CopilotChatConfiguration {
23 pub enterprise_uri: Option<String>,
24}
25
26impl CopilotChatConfiguration {
27 pub fn token_url(&self) -> String {
28 if let Some(enterprise_uri) = &self.enterprise_uri {
29 let domain = Self::parse_domain(enterprise_uri);
30 format!("https://api.{}/copilot_internal/v2/token", domain)
31 } else {
32 "https://api.github.com/copilot_internal/v2/token".to_string()
33 }
34 }
35
36 pub fn oauth_domain(&self) -> String {
37 if let Some(enterprise_uri) = &self.enterprise_uri {
38 Self::parse_domain(enterprise_uri)
39 } else {
40 "github.com".to_string()
41 }
42 }
43
44 pub fn api_url_from_endpoint(&self, endpoint: &str) -> String {
45 format!("{}/chat/completions", endpoint)
46 }
47
48 pub fn models_url_from_endpoint(&self, endpoint: &str) -> String {
49 format!("{}/models", endpoint)
50 }
51
52 fn parse_domain(enterprise_uri: &str) -> String {
53 let uri = enterprise_uri.trim_end_matches('/');
54
55 if let Some(domain) = uri.strip_prefix("https://") {
56 domain.split('/').next().unwrap_or(domain).to_string()
57 } else if let Some(domain) = uri.strip_prefix("http://") {
58 domain.split('/').next().unwrap_or(domain).to_string()
59 } else {
60 uri.split('/').next().unwrap_or(uri).to_string()
61 }
62 }
63}
64
65#[derive(Clone, Copy, Serialize, Deserialize, Debug, Eq, PartialEq)]
66#[serde(rename_all = "lowercase")]
67pub enum Role {
68 User,
69 Assistant,
70 System,
71}
72
73#[derive(Deserialize)]
74struct ModelSchema {
75 #[serde(deserialize_with = "deserialize_models_skip_errors")]
76 data: Vec<Model>,
77}
78
79fn deserialize_models_skip_errors<'de, D>(deserializer: D) -> Result<Vec<Model>, D::Error>
80where
81 D: serde::Deserializer<'de>,
82{
83 let raw_values = Vec::<serde_json::Value>::deserialize(deserializer)?;
84 let models = raw_values
85 .into_iter()
86 .filter_map(|value| match serde_json::from_value::<Model>(value) {
87 Ok(model) => Some(model),
88 Err(err) => {
89 log::warn!("GitHub Copilot Chat model failed to deserialize: {:?}", err);
90 None
91 }
92 })
93 .collect();
94
95 Ok(models)
96}
97
98#[derive(Clone, Serialize, Deserialize, Debug, PartialEq)]
99pub struct Model {
100 billing: ModelBilling,
101 capabilities: ModelCapabilities,
102 id: String,
103 name: String,
104 policy: Option<ModelPolicy>,
105 vendor: ModelVendor,
106 is_chat_default: bool,
107 // The model with this value true is selected by VSCode copilot if a premium request limit is
108 // reached. Zed does not currently implement this behaviour
109 is_chat_fallback: bool,
110 model_picker_enabled: bool,
111}
112
113#[derive(Clone, Serialize, Deserialize, Debug, PartialEq)]
114struct ModelBilling {
115 is_premium: bool,
116 multiplier: f64,
117 // List of plans a model is restricted to
118 // Field is not present if a model is available for all plans
119 #[serde(default)]
120 restricted_to: Option<Vec<String>>,
121}
122
123#[derive(Clone, Serialize, Deserialize, Debug, Eq, PartialEq)]
124struct ModelCapabilities {
125 family: String,
126 #[serde(default)]
127 limits: ModelLimits,
128 supports: ModelSupportedFeatures,
129 #[serde(rename = "type")]
130 model_type: String,
131}
132
133#[derive(Default, Clone, Serialize, Deserialize, Debug, Eq, PartialEq)]
134struct ModelLimits {
135 #[serde(default)]
136 max_context_window_tokens: usize,
137 #[serde(default)]
138 max_output_tokens: usize,
139 #[serde(default)]
140 max_prompt_tokens: u64,
141}
142
143#[derive(Clone, Serialize, Deserialize, Debug, Eq, PartialEq)]
144struct ModelPolicy {
145 state: String,
146}
147
148#[derive(Clone, Serialize, Deserialize, Debug, Eq, PartialEq)]
149struct ModelSupportedFeatures {
150 #[serde(default)]
151 streaming: bool,
152 #[serde(default)]
153 tool_calls: bool,
154 #[serde(default)]
155 parallel_tool_calls: bool,
156 #[serde(default)]
157 vision: bool,
158}
159
160#[derive(Clone, Copy, Serialize, Deserialize, Debug, Eq, PartialEq)]
161pub enum ModelVendor {
162 // Azure OpenAI should have no functional difference from OpenAI in Copilot Chat
163 #[serde(alias = "Azure OpenAI")]
164 OpenAI,
165 Google,
166 Anthropic,
167 #[serde(rename = "xAI")]
168 XAI,
169}
170
171#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
172#[serde(tag = "type")]
173pub enum ChatMessagePart {
174 #[serde(rename = "text")]
175 Text { text: String },
176 #[serde(rename = "image_url")]
177 Image { image_url: ImageUrl },
178}
179
180#[derive(Serialize, Deserialize, Debug, Eq, PartialEq, Clone)]
181pub struct ImageUrl {
182 pub url: String,
183}
184
185impl Model {
186 pub fn uses_streaming(&self) -> bool {
187 self.capabilities.supports.streaming
188 }
189
190 pub fn id(&self) -> &str {
191 self.id.as_str()
192 }
193
194 pub fn display_name(&self) -> &str {
195 self.name.as_str()
196 }
197
198 pub fn max_token_count(&self) -> u64 {
199 self.capabilities.limits.max_prompt_tokens
200 }
201
202 pub fn supports_tools(&self) -> bool {
203 self.capabilities.supports.tool_calls
204 }
205
206 pub fn vendor(&self) -> ModelVendor {
207 self.vendor
208 }
209
210 pub fn supports_vision(&self) -> bool {
211 self.capabilities.supports.vision
212 }
213
214 pub fn supports_parallel_tool_calls(&self) -> bool {
215 self.capabilities.supports.parallel_tool_calls
216 }
217}
218
219#[derive(Serialize, Deserialize)]
220pub struct Request {
221 pub intent: bool,
222 pub n: usize,
223 pub stream: bool,
224 pub temperature: f32,
225 pub model: String,
226 pub messages: Vec<ChatMessage>,
227 #[serde(default, skip_serializing_if = "Vec::is_empty")]
228 pub tools: Vec<Tool>,
229 #[serde(default, skip_serializing_if = "Option::is_none")]
230 pub tool_choice: Option<ToolChoice>,
231}
232
233#[derive(Serialize, Deserialize)]
234pub struct Function {
235 pub name: String,
236 pub description: String,
237 pub parameters: serde_json::Value,
238}
239
240#[derive(Serialize, Deserialize)]
241#[serde(tag = "type", rename_all = "snake_case")]
242pub enum Tool {
243 Function { function: Function },
244}
245
246#[derive(Serialize, Deserialize)]
247#[serde(rename_all = "lowercase")]
248pub enum ToolChoice {
249 Auto,
250 Any,
251 None,
252}
253
254#[derive(Serialize, Deserialize, Debug)]
255#[serde(tag = "role", rename_all = "lowercase")]
256pub enum ChatMessage {
257 Assistant {
258 content: ChatMessageContent,
259 #[serde(default, skip_serializing_if = "Vec::is_empty")]
260 tool_calls: Vec<ToolCall>,
261 },
262 User {
263 content: ChatMessageContent,
264 },
265 System {
266 content: String,
267 },
268 Tool {
269 content: ChatMessageContent,
270 tool_call_id: String,
271 },
272}
273
274#[derive(Debug, Serialize, Deserialize)]
275#[serde(untagged)]
276pub enum ChatMessageContent {
277 Plain(String),
278 Multipart(Vec<ChatMessagePart>),
279}
280
281impl ChatMessageContent {
282 pub fn empty() -> Self {
283 ChatMessageContent::Multipart(vec![])
284 }
285}
286
287impl From<Vec<ChatMessagePart>> for ChatMessageContent {
288 fn from(mut parts: Vec<ChatMessagePart>) -> Self {
289 if let [ChatMessagePart::Text { text }] = parts.as_mut_slice() {
290 ChatMessageContent::Plain(std::mem::take(text))
291 } else {
292 ChatMessageContent::Multipart(parts)
293 }
294 }
295}
296
297impl From<String> for ChatMessageContent {
298 fn from(text: String) -> Self {
299 ChatMessageContent::Plain(text)
300 }
301}
302
303#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
304pub struct ToolCall {
305 pub id: String,
306 #[serde(flatten)]
307 pub content: ToolCallContent,
308}
309
310#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
311#[serde(tag = "type", rename_all = "lowercase")]
312pub enum ToolCallContent {
313 Function { function: FunctionContent },
314}
315
316#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
317pub struct FunctionContent {
318 pub name: String,
319 pub arguments: String,
320}
321
322#[derive(Deserialize, Debug)]
323#[serde(tag = "type", rename_all = "snake_case")]
324pub struct ResponseEvent {
325 pub choices: Vec<ResponseChoice>,
326 pub id: String,
327 pub usage: Option<Usage>,
328}
329
330#[derive(Deserialize, Debug)]
331pub struct Usage {
332 pub completion_tokens: u64,
333 pub prompt_tokens: u64,
334 pub total_tokens: u64,
335}
336
337#[derive(Debug, Deserialize)]
338pub struct ResponseChoice {
339 pub index: usize,
340 pub finish_reason: Option<String>,
341 pub delta: Option<ResponseDelta>,
342 pub message: Option<ResponseDelta>,
343}
344
345#[derive(Debug, Deserialize)]
346pub struct ResponseDelta {
347 pub content: Option<String>,
348 pub role: Option<Role>,
349 #[serde(default)]
350 pub tool_calls: Vec<ToolCallChunk>,
351}
352
353#[derive(Deserialize, Debug, Eq, PartialEq)]
354pub struct ToolCallChunk {
355 pub index: usize,
356 pub id: Option<String>,
357 pub function: Option<FunctionChunk>,
358}
359
360#[derive(Deserialize, Debug, Eq, PartialEq)]
361pub struct FunctionChunk {
362 pub name: Option<String>,
363 pub arguments: Option<String>,
364}
365
366#[derive(Deserialize)]
367struct ApiTokenResponse {
368 token: String,
369 expires_at: i64,
370 endpoints: ApiTokenResponseEndpoints,
371}
372
373#[derive(Deserialize)]
374struct ApiTokenResponseEndpoints {
375 api: String,
376}
377
378#[derive(Clone)]
379struct ApiToken {
380 api_key: String,
381 expires_at: DateTime<chrono::Utc>,
382 api_endpoint: String,
383}
384
385impl ApiToken {
386 pub fn remaining_seconds(&self) -> i64 {
387 self.expires_at
388 .timestamp()
389 .saturating_sub(chrono::Utc::now().timestamp())
390 }
391}
392
393impl TryFrom<ApiTokenResponse> for ApiToken {
394 type Error = anyhow::Error;
395
396 fn try_from(response: ApiTokenResponse) -> Result<Self, Self::Error> {
397 let expires_at =
398 DateTime::from_timestamp(response.expires_at, 0).context("invalid expires_at")?;
399
400 Ok(Self {
401 api_key: response.token,
402 expires_at,
403 api_endpoint: response.endpoints.api,
404 })
405 }
406}
407
408struct GlobalCopilotChat(gpui::Entity<CopilotChat>);
409
410impl Global for GlobalCopilotChat {}
411
412pub struct CopilotChat {
413 oauth_token: Option<String>,
414 api_token: Option<ApiToken>,
415 configuration: CopilotChatConfiguration,
416 models: Option<Vec<Model>>,
417 client: Arc<dyn HttpClient>,
418}
419
420pub fn init(
421 fs: Arc<dyn Fs>,
422 client: Arc<dyn HttpClient>,
423 configuration: CopilotChatConfiguration,
424 cx: &mut App,
425) {
426 let copilot_chat = cx.new(|cx| CopilotChat::new(fs, client, configuration, cx));
427 cx.set_global(GlobalCopilotChat(copilot_chat));
428}
429
430pub fn copilot_chat_config_dir() -> &'static PathBuf {
431 static COPILOT_CHAT_CONFIG_DIR: OnceLock<PathBuf> = OnceLock::new();
432
433 COPILOT_CHAT_CONFIG_DIR.get_or_init(|| {
434 let config_dir = if cfg!(target_os = "windows") {
435 dirs::data_local_dir().expect("failed to determine LocalAppData directory")
436 } else {
437 std::env::var("XDG_CONFIG_HOME")
438 .map(PathBuf::from)
439 .unwrap_or_else(|_| home_dir().join(".config"))
440 };
441
442 config_dir.join("github-copilot")
443 })
444}
445
446fn copilot_chat_config_paths() -> [PathBuf; 2] {
447 let base_dir = copilot_chat_config_dir();
448 [base_dir.join("hosts.json"), base_dir.join("apps.json")]
449}
450
451impl CopilotChat {
452 pub fn global(cx: &App) -> Option<gpui::Entity<Self>> {
453 cx.try_global::<GlobalCopilotChat>()
454 .map(|model| model.0.clone())
455 }
456
457 fn new(
458 fs: Arc<dyn Fs>,
459 client: Arc<dyn HttpClient>,
460 configuration: CopilotChatConfiguration,
461 cx: &mut Context<Self>,
462 ) -> Self {
463 let config_paths: HashSet<PathBuf> = copilot_chat_config_paths().into_iter().collect();
464 let dir_path = copilot_chat_config_dir();
465
466 cx.spawn(async move |this, cx| {
467 let mut parent_watch_rx = watch_config_dir(
468 cx.background_executor(),
469 fs.clone(),
470 dir_path.clone(),
471 config_paths,
472 );
473 while let Some(contents) = parent_watch_rx.next().await {
474 let oauth_domain =
475 this.read_with(cx, |this, _| this.configuration.oauth_domain())?;
476 let oauth_token = extract_oauth_token(contents, &oauth_domain);
477
478 this.update(cx, |this, cx| {
479 this.oauth_token = oauth_token.clone();
480 cx.notify();
481 })?;
482
483 if oauth_token.is_some() {
484 Self::update_models(&this, cx).await?;
485 }
486 }
487 anyhow::Ok(())
488 })
489 .detach_and_log_err(cx);
490
491 let this = Self {
492 oauth_token: std::env::var(COPILOT_OAUTH_ENV_VAR).ok(),
493 api_token: None,
494 models: None,
495 configuration,
496 client,
497 };
498
499 if this.oauth_token.is_some() {
500 cx.spawn(async move |this, cx| Self::update_models(&this, cx).await)
501 .detach_and_log_err(cx);
502 }
503
504 this
505 }
506
507 async fn update_models(this: &WeakEntity<Self>, cx: &mut AsyncApp) -> Result<()> {
508 let (oauth_token, client, configuration) = this.read_with(cx, |this, _| {
509 (
510 this.oauth_token.clone(),
511 this.client.clone(),
512 this.configuration.clone(),
513 )
514 })?;
515
516 let oauth_token = oauth_token
517 .ok_or_else(|| anyhow!("OAuth token is missing while updating Copilot Chat models"))?;
518
519 let token_url = configuration.token_url();
520 let api_token = request_api_token(&oauth_token, token_url.into(), client.clone()).await?;
521
522 let models_url = configuration.models_url_from_endpoint(&api_token.api_endpoint);
523 let models =
524 get_models(models_url.into(), api_token.api_key.clone(), client.clone()).await?;
525
526 this.update(cx, |this, cx| {
527 this.api_token = Some(api_token);
528 this.models = Some(models);
529 cx.notify();
530 })?;
531 anyhow::Ok(())
532 }
533
534 pub fn is_authenticated(&self) -> bool {
535 self.oauth_token.is_some()
536 }
537
538 pub fn models(&self) -> Option<&[Model]> {
539 self.models.as_deref()
540 }
541
542 pub async fn stream_completion(
543 request: Request,
544 is_user_initiated: bool,
545 mut cx: AsyncApp,
546 ) -> Result<BoxStream<'static, Result<ResponseEvent>>> {
547 let this = cx
548 .update(|cx| Self::global(cx))
549 .ok()
550 .flatten()
551 .context("Copilot chat is not enabled")?;
552
553 let (oauth_token, api_token, client, configuration) = this.read_with(&cx, |this, _| {
554 (
555 this.oauth_token.clone(),
556 this.api_token.clone(),
557 this.client.clone(),
558 this.configuration.clone(),
559 )
560 })?;
561
562 let oauth_token = oauth_token.context("No OAuth token available")?;
563
564 let token = match api_token {
565 Some(api_token) if api_token.remaining_seconds() > 5 * 60 => api_token.clone(),
566 _ => {
567 let token_url = configuration.token_url();
568 let token =
569 request_api_token(&oauth_token, token_url.into(), client.clone()).await?;
570 this.update(&mut cx, |this, cx| {
571 this.api_token = Some(token.clone());
572 cx.notify();
573 })?;
574 token
575 }
576 };
577
578 let api_url = configuration.api_url_from_endpoint(&token.api_endpoint);
579 stream_completion(
580 client.clone(),
581 token.api_key,
582 api_url.into(),
583 request,
584 is_user_initiated,
585 )
586 .await
587 }
588
589 pub fn set_configuration(
590 &mut self,
591 configuration: CopilotChatConfiguration,
592 cx: &mut Context<Self>,
593 ) {
594 let same_configuration = self.configuration == configuration;
595 self.configuration = configuration;
596 if !same_configuration {
597 self.api_token = None;
598 cx.spawn(async move |this, cx| {
599 Self::update_models(&this, cx).await?;
600 Ok::<_, anyhow::Error>(())
601 })
602 .detach();
603 }
604 }
605}
606
607async fn get_models(
608 models_url: Arc<str>,
609 api_token: String,
610 client: Arc<dyn HttpClient>,
611) -> Result<Vec<Model>> {
612 let all_models = request_models(models_url, api_token, client).await?;
613
614 let mut models: Vec<Model> = all_models
615 .into_iter()
616 .filter(|model| {
617 model.model_picker_enabled
618 && model.capabilities.model_type.as_str() == "chat"
619 && model
620 .policy
621 .as_ref()
622 .is_none_or(|policy| policy.state == "enabled")
623 })
624 .dedup_by(|a, b| a.capabilities.family == b.capabilities.family)
625 .collect();
626
627 if let Some(default_model_position) = models.iter().position(|model| model.is_chat_default) {
628 let default_model = models.remove(default_model_position);
629 models.insert(0, default_model);
630 }
631
632 Ok(models)
633}
634
635async fn request_models(
636 models_url: Arc<str>,
637 api_token: String,
638 client: Arc<dyn HttpClient>,
639) -> Result<Vec<Model>> {
640 let request_builder = HttpRequest::builder()
641 .method(Method::GET)
642 .uri(models_url.as_ref())
643 .header("Authorization", format!("Bearer {}", api_token))
644 .header("Content-Type", "application/json")
645 .header("Copilot-Integration-Id", "vscode-chat")
646 .header("Editor-Version", "vscode/1.103.2")
647 .header("x-github-api-version", "2025-05-01");
648
649 let request = request_builder.body(AsyncBody::empty())?;
650
651 let mut response = client.send(request).await?;
652
653 anyhow::ensure!(
654 response.status().is_success(),
655 "Failed to request models: {}",
656 response.status()
657 );
658 let mut body = Vec::new();
659 response.body_mut().read_to_end(&mut body).await?;
660
661 let body_str = std::str::from_utf8(&body)?;
662
663 let models = serde_json::from_str::<ModelSchema>(body_str)?.data;
664
665 Ok(models)
666}
667
668async fn request_api_token(
669 oauth_token: &str,
670 auth_url: Arc<str>,
671 client: Arc<dyn HttpClient>,
672) -> Result<ApiToken> {
673 let request_builder = HttpRequest::builder()
674 .method(Method::GET)
675 .uri(auth_url.as_ref())
676 .header("Authorization", format!("token {}", oauth_token))
677 .header("Accept", "application/json");
678
679 let request = request_builder.body(AsyncBody::empty())?;
680
681 let mut response = client.send(request).await?;
682
683 if response.status().is_success() {
684 let mut body = Vec::new();
685 response.body_mut().read_to_end(&mut body).await?;
686
687 let body_str = std::str::from_utf8(&body)?;
688
689 let parsed: ApiTokenResponse = serde_json::from_str(body_str)?;
690 ApiToken::try_from(parsed)
691 } else {
692 let mut body = Vec::new();
693 response.body_mut().read_to_end(&mut body).await?;
694
695 let body_str = std::str::from_utf8(&body)?;
696 anyhow::bail!("Failed to request API token: {body_str}");
697 }
698}
699
700fn extract_oauth_token(contents: String, domain: &str) -> Option<String> {
701 serde_json::from_str::<serde_json::Value>(&contents)
702 .map(|v| {
703 v.as_object().and_then(|obj| {
704 obj.iter().find_map(|(key, value)| {
705 if key.starts_with(domain) {
706 value["oauth_token"].as_str().map(|v| v.to_string())
707 } else {
708 None
709 }
710 })
711 })
712 })
713 .ok()
714 .flatten()
715}
716
717async fn stream_completion(
718 client: Arc<dyn HttpClient>,
719 api_key: String,
720 completion_url: Arc<str>,
721 request: Request,
722 is_user_initiated: bool,
723) -> Result<BoxStream<'static, Result<ResponseEvent>>> {
724 let is_vision_request = request.messages.iter().any(|message| match message {
725 ChatMessage::User { content }
726 | ChatMessage::Assistant { content, .. }
727 | ChatMessage::Tool { content, .. } => {
728 matches!(content, ChatMessageContent::Multipart(parts) if parts.iter().any(|part| matches!(part, ChatMessagePart::Image { .. })))
729 }
730 _ => false,
731 });
732
733 let request_initiator = if is_user_initiated { "user" } else { "agent" };
734
735 let mut request_builder = HttpRequest::builder()
736 .method(Method::POST)
737 .uri(completion_url.as_ref())
738 .header(
739 "Editor-Version",
740 format!(
741 "Zed/{}",
742 option_env!("CARGO_PKG_VERSION").unwrap_or("unknown")
743 ),
744 )
745 .header("Authorization", format!("Bearer {}", api_key))
746 .header("Content-Type", "application/json")
747 .header("Copilot-Integration-Id", "vscode-chat")
748 .header("X-Initiator", request_initiator);
749
750 if is_vision_request {
751 request_builder =
752 request_builder.header("Copilot-Vision-Request", is_vision_request.to_string());
753 }
754
755 let is_streaming = request.stream;
756
757 let json = serde_json::to_string(&request)?;
758 let request = request_builder.body(AsyncBody::from(json))?;
759 let mut response = client.send(request).await?;
760
761 if !response.status().is_success() {
762 let mut body = Vec::new();
763 response.body_mut().read_to_end(&mut body).await?;
764 let body_str = std::str::from_utf8(&body)?;
765 anyhow::bail!(
766 "Failed to connect to API: {} {}",
767 response.status(),
768 body_str
769 );
770 }
771
772 if is_streaming {
773 let reader = BufReader::new(response.into_body());
774 Ok(reader
775 .lines()
776 .filter_map(|line| async move {
777 match line {
778 Ok(line) => {
779 let line = line.strip_prefix("data: ")?;
780 if line.starts_with("[DONE]") {
781 return None;
782 }
783
784 match serde_json::from_str::<ResponseEvent>(line) {
785 Ok(response) => {
786 if response.choices.is_empty() {
787 None
788 } else {
789 Some(Ok(response))
790 }
791 }
792 Err(error) => Some(Err(anyhow!(error))),
793 }
794 }
795 Err(error) => Some(Err(anyhow!(error))),
796 }
797 })
798 .boxed())
799 } else {
800 let mut body = Vec::new();
801 response.body_mut().read_to_end(&mut body).await?;
802 let body_str = std::str::from_utf8(&body)?;
803 let response: ResponseEvent = serde_json::from_str(body_str)?;
804
805 Ok(futures::stream::once(async move { Ok(response) }).boxed())
806 }
807}
808
809#[cfg(test)]
810mod tests {
811 use super::*;
812
813 #[test]
814 fn test_resilient_model_schema_deserialize() {
815 let json = r#"{
816 "data": [
817 {
818 "billing": {
819 "is_premium": false,
820 "multiplier": 0
821 },
822 "capabilities": {
823 "family": "gpt-4",
824 "limits": {
825 "max_context_window_tokens": 32768,
826 "max_output_tokens": 4096,
827 "max_prompt_tokens": 32768
828 },
829 "object": "model_capabilities",
830 "supports": { "streaming": true, "tool_calls": true },
831 "tokenizer": "cl100k_base",
832 "type": "chat"
833 },
834 "id": "gpt-4",
835 "is_chat_default": false,
836 "is_chat_fallback": false,
837 "model_picker_enabled": false,
838 "name": "GPT 4",
839 "object": "model",
840 "preview": false,
841 "vendor": "Azure OpenAI",
842 "version": "gpt-4-0613"
843 },
844 {
845 "some-unknown-field": 123
846 },
847 {
848 "billing": {
849 "is_premium": true,
850 "multiplier": 1,
851 "restricted_to": [
852 "pro",
853 "pro_plus",
854 "business",
855 "enterprise"
856 ]
857 },
858 "capabilities": {
859 "family": "claude-3.7-sonnet",
860 "limits": {
861 "max_context_window_tokens": 200000,
862 "max_output_tokens": 16384,
863 "max_prompt_tokens": 90000,
864 "vision": {
865 "max_prompt_image_size": 3145728,
866 "max_prompt_images": 1,
867 "supported_media_types": ["image/jpeg", "image/png", "image/webp"]
868 }
869 },
870 "object": "model_capabilities",
871 "supports": {
872 "parallel_tool_calls": true,
873 "streaming": true,
874 "tool_calls": true,
875 "vision": true
876 },
877 "tokenizer": "o200k_base",
878 "type": "chat"
879 },
880 "id": "claude-3.7-sonnet",
881 "is_chat_default": false,
882 "is_chat_fallback": false,
883 "model_picker_enabled": true,
884 "name": "Claude 3.7 Sonnet",
885 "object": "model",
886 "policy": {
887 "state": "enabled",
888 "terms": "Enable access to the latest Claude 3.7 Sonnet model from Anthropic. [Learn more about how GitHub Copilot serves Claude 3.7 Sonnet](https://docs.github.com/copilot/using-github-copilot/using-claude-sonnet-in-github-copilot)."
889 },
890 "preview": false,
891 "vendor": "Anthropic",
892 "version": "claude-3.7-sonnet"
893 }
894 ],
895 "object": "list"
896 }"#;
897
898 let schema: ModelSchema = serde_json::from_str(json).unwrap();
899
900 assert_eq!(schema.data.len(), 2);
901 assert_eq!(schema.data[0].id, "gpt-4");
902 assert_eq!(schema.data[1].id, "claude-3.7-sonnet");
903 }
904}