1use std::pin::Pin;
2use std::str::FromStr as _;
3use std::sync::Arc;
4
5use anyhow::{Result, anyhow};
6use cloud_llm_client::CompletionIntent;
7use collections::HashMap;
8use copilot::copilot_chat::{
9 ChatMessage, ChatMessageContent, ChatMessagePart, CopilotChat, ImageUrl,
10 Model as CopilotChatModel, ModelVendor, Request as CopilotChatRequest, ResponseEvent, Tool,
11 ToolCall,
12};
13use copilot::{Copilot, Status};
14use futures::future::BoxFuture;
15use futures::stream::BoxStream;
16use futures::{FutureExt, Stream, StreamExt};
17use gpui::{Action, AnyView, App, AsyncApp, Entity, Render, Subscription, Task, svg};
18use language::language_settings::all_language_settings;
19use language_model::{
20 AuthenticateError, LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent,
21 LanguageModelId, LanguageModelName, LanguageModelProvider, LanguageModelProviderId,
22 LanguageModelProviderName, LanguageModelProviderState, LanguageModelRequest,
23 LanguageModelRequestMessage, LanguageModelToolChoice, LanguageModelToolResultContent,
24 LanguageModelToolSchemaFormat, LanguageModelToolUse, MessageContent, RateLimiter, Role,
25 StopReason, TokenUsage,
26};
27use settings::SettingsStore;
28use ui::{CommonAnimationExt, prelude::*};
29use util::debug_panic;
30
31use crate::provider::x_ai::count_xai_tokens;
32
33use super::anthropic::count_anthropic_tokens;
34use super::google::count_google_tokens;
35use super::open_ai::count_open_ai_tokens;
36
37const PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("copilot_chat");
38const PROVIDER_NAME: LanguageModelProviderName =
39 LanguageModelProviderName::new("GitHub Copilot Chat");
40
41pub struct CopilotChatLanguageModelProvider {
42 state: Entity<State>,
43}
44
45pub struct State {
46 _copilot_chat_subscription: Option<Subscription>,
47 _settings_subscription: Subscription,
48}
49
50impl State {
51 fn is_authenticated(&self, cx: &App) -> bool {
52 CopilotChat::global(cx)
53 .map(|m| m.read(cx).is_authenticated())
54 .unwrap_or(false)
55 }
56}
57
58impl CopilotChatLanguageModelProvider {
59 pub fn new(cx: &mut App) -> Self {
60 let state = cx.new(|cx| {
61 let copilot_chat_subscription = CopilotChat::global(cx)
62 .map(|copilot_chat| cx.observe(&copilot_chat, |_, _, cx| cx.notify()));
63 State {
64 _copilot_chat_subscription: copilot_chat_subscription,
65 _settings_subscription: cx.observe_global::<SettingsStore>(|_, cx| {
66 if let Some(copilot_chat) = CopilotChat::global(cx) {
67 let language_settings = all_language_settings(None, cx);
68 let configuration = copilot::copilot_chat::CopilotChatConfiguration {
69 enterprise_uri: language_settings
70 .edit_predictions
71 .copilot
72 .enterprise_uri
73 .clone(),
74 };
75 copilot_chat.update(cx, |chat, cx| {
76 chat.set_configuration(configuration, cx);
77 });
78 }
79 cx.notify();
80 }),
81 }
82 });
83
84 Self { state }
85 }
86
87 fn create_language_model(&self, model: CopilotChatModel) -> Arc<dyn LanguageModel> {
88 Arc::new(CopilotChatLanguageModel {
89 model,
90 request_limiter: RateLimiter::new(4),
91 })
92 }
93}
94
95impl LanguageModelProviderState for CopilotChatLanguageModelProvider {
96 type ObservableEntity = State;
97
98 fn observable_entity(&self) -> Option<gpui::Entity<Self::ObservableEntity>> {
99 Some(self.state.clone())
100 }
101}
102
103impl LanguageModelProvider for CopilotChatLanguageModelProvider {
104 fn id(&self) -> LanguageModelProviderId {
105 PROVIDER_ID
106 }
107
108 fn name(&self) -> LanguageModelProviderName {
109 PROVIDER_NAME
110 }
111
112 fn icon(&self) -> IconName {
113 IconName::Copilot
114 }
115
116 fn default_model(&self, cx: &App) -> Option<Arc<dyn LanguageModel>> {
117 let models = CopilotChat::global(cx).and_then(|m| m.read(cx).models())?;
118 models
119 .first()
120 .map(|model| self.create_language_model(model.clone()))
121 }
122
123 fn default_fast_model(&self, cx: &App) -> Option<Arc<dyn LanguageModel>> {
124 // The default model should be Copilot Chat's 'base model', which is likely a relatively fast
125 // model (e.g. 4o) and a sensible choice when considering premium requests
126 self.default_model(cx)
127 }
128
129 fn provided_models(&self, cx: &App) -> Vec<Arc<dyn LanguageModel>> {
130 let Some(models) = CopilotChat::global(cx).and_then(|m| m.read(cx).models()) else {
131 return Vec::new();
132 };
133 models
134 .iter()
135 .map(|model| self.create_language_model(model.clone()))
136 .collect()
137 }
138
139 fn is_authenticated(&self, cx: &App) -> bool {
140 self.state.read(cx).is_authenticated(cx)
141 }
142
143 fn authenticate(&self, cx: &mut App) -> Task<Result<(), AuthenticateError>> {
144 if self.is_authenticated(cx) {
145 return Task::ready(Ok(()));
146 };
147
148 let Some(copilot) = Copilot::global(cx) else {
149 return Task::ready( Err(anyhow!(
150 "Copilot must be enabled for Copilot Chat to work. Please enable Copilot and try again."
151 ).into()));
152 };
153
154 let err = match copilot.read(cx).status() {
155 Status::Authorized => return Task::ready(Ok(())),
156 Status::Disabled => anyhow!(
157 "Copilot must be enabled for Copilot Chat to work. Please enable Copilot and try again."
158 ),
159 Status::Error(err) => anyhow!(format!(
160 "Received the following error while signing into Copilot: {err}"
161 )),
162 Status::Starting { task: _ } => anyhow!(
163 "Copilot is still starting, please wait for Copilot to start then try again"
164 ),
165 Status::Unauthorized => anyhow!(
166 "Unable to authorize with Copilot. Please make sure that you have an active Copilot and Copilot Chat subscription."
167 ),
168 Status::SignedOut { .. } => {
169 anyhow!("You have signed out of Copilot. Please sign in to Copilot and try again.")
170 }
171 Status::SigningIn { prompt: _ } => anyhow!("Still signing into Copilot..."),
172 };
173
174 Task::ready(Err(err.into()))
175 }
176
177 fn configuration_view(
178 &self,
179 _target_agent: language_model::ConfigurationViewTargetAgent,
180 _: &mut Window,
181 cx: &mut App,
182 ) -> AnyView {
183 let state = self.state.clone();
184 cx.new(|cx| ConfigurationView::new(state, cx)).into()
185 }
186
187 fn reset_credentials(&self, _cx: &mut App) -> Task<Result<()>> {
188 Task::ready(Err(anyhow!(
189 "Signing out of GitHub Copilot Chat is currently not supported."
190 )))
191 }
192}
193
194pub struct CopilotChatLanguageModel {
195 model: CopilotChatModel,
196 request_limiter: RateLimiter,
197}
198
199impl LanguageModel for CopilotChatLanguageModel {
200 fn id(&self) -> LanguageModelId {
201 LanguageModelId::from(self.model.id().to_string())
202 }
203
204 fn name(&self) -> LanguageModelName {
205 LanguageModelName::from(self.model.display_name().to_string())
206 }
207
208 fn provider_id(&self) -> LanguageModelProviderId {
209 PROVIDER_ID
210 }
211
212 fn provider_name(&self) -> LanguageModelProviderName {
213 PROVIDER_NAME
214 }
215
216 fn supports_tools(&self) -> bool {
217 self.model.supports_tools()
218 }
219
220 fn supports_images(&self) -> bool {
221 self.model.supports_vision()
222 }
223
224 fn tool_input_format(&self) -> LanguageModelToolSchemaFormat {
225 match self.model.vendor() {
226 ModelVendor::OpenAI | ModelVendor::Anthropic => {
227 LanguageModelToolSchemaFormat::JsonSchema
228 }
229 ModelVendor::Google | ModelVendor::XAI => {
230 LanguageModelToolSchemaFormat::JsonSchemaSubset
231 }
232 }
233 }
234
235 fn supports_tool_choice(&self, choice: LanguageModelToolChoice) -> bool {
236 match choice {
237 LanguageModelToolChoice::Auto
238 | LanguageModelToolChoice::Any
239 | LanguageModelToolChoice::None => self.supports_tools(),
240 }
241 }
242
243 fn telemetry_id(&self) -> String {
244 format!("copilot_chat/{}", self.model.id())
245 }
246
247 fn max_token_count(&self) -> u64 {
248 self.model.max_token_count()
249 }
250
251 fn count_tokens(
252 &self,
253 request: LanguageModelRequest,
254 cx: &App,
255 ) -> BoxFuture<'static, Result<u64>> {
256 match self.model.vendor() {
257 ModelVendor::Anthropic => count_anthropic_tokens(request, cx),
258 ModelVendor::Google => count_google_tokens(request, cx),
259 ModelVendor::XAI => {
260 let model = x_ai::Model::from_id(self.model.id()).unwrap_or_default();
261 count_xai_tokens(request, model, cx)
262 }
263 ModelVendor::OpenAI => {
264 let model = open_ai::Model::from_id(self.model.id()).unwrap_or_default();
265 count_open_ai_tokens(request, model, cx)
266 }
267 }
268 }
269
270 fn stream_completion(
271 &self,
272 request: LanguageModelRequest,
273 cx: &AsyncApp,
274 ) -> BoxFuture<
275 'static,
276 Result<
277 BoxStream<'static, Result<LanguageModelCompletionEvent, LanguageModelCompletionError>>,
278 LanguageModelCompletionError,
279 >,
280 > {
281 let is_user_initiated = request.intent.is_none_or(|intent| match intent {
282 CompletionIntent::UserPrompt
283 | CompletionIntent::ThreadContextSummarization
284 | CompletionIntent::InlineAssist
285 | CompletionIntent::TerminalInlineAssist
286 | CompletionIntent::GenerateGitCommitMessage => true,
287
288 CompletionIntent::ToolResults
289 | CompletionIntent::ThreadSummarization
290 | CompletionIntent::CreateFile
291 | CompletionIntent::EditFile => false,
292 });
293
294 let copilot_request = match into_copilot_chat(&self.model, request) {
295 Ok(request) => request,
296 Err(err) => return futures::future::ready(Err(err.into())).boxed(),
297 };
298 let is_streaming = copilot_request.stream;
299
300 let request_limiter = self.request_limiter.clone();
301 let future = cx.spawn(async move |cx| {
302 let request =
303 CopilotChat::stream_completion(copilot_request, is_user_initiated, cx.clone());
304 request_limiter
305 .stream(async move {
306 let response = request.await?;
307 Ok(map_to_language_model_completion_events(
308 response,
309 is_streaming,
310 ))
311 })
312 .await
313 });
314 async move { Ok(future.await?.boxed()) }.boxed()
315 }
316}
317
318pub fn map_to_language_model_completion_events(
319 events: Pin<Box<dyn Send + Stream<Item = Result<ResponseEvent>>>>,
320 is_streaming: bool,
321) -> impl Stream<Item = Result<LanguageModelCompletionEvent, LanguageModelCompletionError>> {
322 #[derive(Default)]
323 struct RawToolCall {
324 id: String,
325 name: String,
326 arguments: String,
327 }
328
329 struct State {
330 events: Pin<Box<dyn Send + Stream<Item = Result<ResponseEvent>>>>,
331 tool_calls_by_index: HashMap<usize, RawToolCall>,
332 }
333
334 futures::stream::unfold(
335 State {
336 events,
337 tool_calls_by_index: HashMap::default(),
338 },
339 move |mut state| async move {
340 if let Some(event) = state.events.next().await {
341 match event {
342 Ok(event) => {
343 let Some(choice) = event.choices.first() else {
344 return Some((
345 vec![Err(anyhow!("Response contained no choices").into())],
346 state,
347 ));
348 };
349
350 let delta = if is_streaming {
351 choice.delta.as_ref()
352 } else {
353 choice.message.as_ref()
354 };
355
356 let Some(delta) = delta else {
357 return Some((
358 vec![Err(anyhow!("Response contained no delta").into())],
359 state,
360 ));
361 };
362
363 let mut events = Vec::new();
364 if let Some(content) = delta.content.clone() {
365 events.push(Ok(LanguageModelCompletionEvent::Text(content)));
366 }
367
368 for tool_call in &delta.tool_calls {
369 let entry = state
370 .tool_calls_by_index
371 .entry(tool_call.index)
372 .or_default();
373
374 if let Some(tool_id) = tool_call.id.clone() {
375 entry.id = tool_id;
376 }
377
378 if let Some(function) = tool_call.function.as_ref() {
379 if let Some(name) = function.name.clone() {
380 entry.name = name;
381 }
382
383 if let Some(arguments) = function.arguments.clone() {
384 entry.arguments.push_str(&arguments);
385 }
386 }
387 }
388
389 if let Some(usage) = event.usage {
390 events.push(Ok(LanguageModelCompletionEvent::UsageUpdate(
391 TokenUsage {
392 input_tokens: usage.prompt_tokens,
393 output_tokens: usage.completion_tokens,
394 cache_creation_input_tokens: 0,
395 cache_read_input_tokens: 0,
396 },
397 )));
398 }
399
400 match choice.finish_reason.as_deref() {
401 Some("stop") => {
402 events.push(Ok(LanguageModelCompletionEvent::Stop(
403 StopReason::EndTurn,
404 )));
405 }
406 Some("tool_calls") => {
407 events.extend(state.tool_calls_by_index.drain().map(
408 |(_, tool_call)| {
409 // The model can output an empty string
410 // to indicate the absence of arguments.
411 // When that happens, create an empty
412 // object instead.
413 let arguments = if tool_call.arguments.is_empty() {
414 Ok(serde_json::Value::Object(Default::default()))
415 } else {
416 serde_json::Value::from_str(&tool_call.arguments)
417 };
418 match arguments {
419 Ok(input) => Ok(LanguageModelCompletionEvent::ToolUse(
420 LanguageModelToolUse {
421 id: tool_call.id.clone().into(),
422 name: tool_call.name.as_str().into(),
423 is_input_complete: true,
424 input,
425 raw_input: tool_call.arguments.clone(),
426 },
427 )),
428 Err(error) => Ok(
429 LanguageModelCompletionEvent::ToolUseJsonParseError {
430 id: tool_call.id.into(),
431 tool_name: tool_call.name.as_str().into(),
432 raw_input: tool_call.arguments.into(),
433 json_parse_error: error.to_string(),
434 },
435 ),
436 }
437 },
438 ));
439
440 events.push(Ok(LanguageModelCompletionEvent::Stop(
441 StopReason::ToolUse,
442 )));
443 }
444 Some(stop_reason) => {
445 log::error!("Unexpected Copilot Chat stop_reason: {stop_reason:?}");
446 events.push(Ok(LanguageModelCompletionEvent::Stop(
447 StopReason::EndTurn,
448 )));
449 }
450 None => {}
451 }
452
453 return Some((events, state));
454 }
455 Err(err) => return Some((vec![Err(anyhow!(err).into())], state)),
456 }
457 }
458
459 None
460 },
461 )
462 .flat_map(futures::stream::iter)
463}
464
465fn into_copilot_chat(
466 model: &copilot::copilot_chat::Model,
467 request: LanguageModelRequest,
468) -> Result<CopilotChatRequest> {
469 let mut request_messages: Vec<LanguageModelRequestMessage> = Vec::new();
470 for message in request.messages {
471 if let Some(last_message) = request_messages.last_mut() {
472 if last_message.role == message.role {
473 last_message.content.extend(message.content);
474 } else {
475 request_messages.push(message);
476 }
477 } else {
478 request_messages.push(message);
479 }
480 }
481
482 let mut messages: Vec<ChatMessage> = Vec::new();
483 for message in request_messages {
484 match message.role {
485 Role::User => {
486 for content in &message.content {
487 if let MessageContent::ToolResult(tool_result) = content {
488 let content = match &tool_result.content {
489 LanguageModelToolResultContent::Text(text) => text.to_string().into(),
490 LanguageModelToolResultContent::Image(image) => {
491 if model.supports_vision() {
492 ChatMessageContent::Multipart(vec![ChatMessagePart::Image {
493 image_url: ImageUrl {
494 url: image.to_base64_url(),
495 },
496 }])
497 } else {
498 debug_panic!(
499 "This should be caught at {} level",
500 tool_result.tool_name
501 );
502 "[Tool responded with an image, but this model does not support vision]".to_string().into()
503 }
504 }
505 };
506
507 messages.push(ChatMessage::Tool {
508 tool_call_id: tool_result.tool_use_id.to_string(),
509 content,
510 });
511 }
512 }
513
514 let mut content_parts = Vec::new();
515 for content in &message.content {
516 match content {
517 MessageContent::Text(text) | MessageContent::Thinking { text, .. }
518 if !text.is_empty() =>
519 {
520 if let Some(ChatMessagePart::Text { text: text_content }) =
521 content_parts.last_mut()
522 {
523 text_content.push_str(text);
524 } else {
525 content_parts.push(ChatMessagePart::Text {
526 text: text.to_string(),
527 });
528 }
529 }
530 MessageContent::Image(image) if model.supports_vision() => {
531 content_parts.push(ChatMessagePart::Image {
532 image_url: ImageUrl {
533 url: image.to_base64_url(),
534 },
535 });
536 }
537 _ => {}
538 }
539 }
540
541 if !content_parts.is_empty() {
542 messages.push(ChatMessage::User {
543 content: content_parts.into(),
544 });
545 }
546 }
547 Role::Assistant => {
548 let mut tool_calls = Vec::new();
549 for content in &message.content {
550 if let MessageContent::ToolUse(tool_use) = content {
551 tool_calls.push(ToolCall {
552 id: tool_use.id.to_string(),
553 content: copilot::copilot_chat::ToolCallContent::Function {
554 function: copilot::copilot_chat::FunctionContent {
555 name: tool_use.name.to_string(),
556 arguments: serde_json::to_string(&tool_use.input)?,
557 },
558 },
559 });
560 }
561 }
562
563 let text_content = {
564 let mut buffer = String::new();
565 for string in message.content.iter().filter_map(|content| match content {
566 MessageContent::Text(text) | MessageContent::Thinking { text, .. } => {
567 Some(text.as_str())
568 }
569 MessageContent::ToolUse(_)
570 | MessageContent::RedactedThinking(_)
571 | MessageContent::ToolResult(_)
572 | MessageContent::Image(_) => None,
573 }) {
574 buffer.push_str(string);
575 }
576
577 buffer
578 };
579
580 messages.push(ChatMessage::Assistant {
581 content: if text_content.is_empty() {
582 ChatMessageContent::empty()
583 } else {
584 text_content.into()
585 },
586 tool_calls,
587 });
588 }
589 Role::System => messages.push(ChatMessage::System {
590 content: message.string_contents(),
591 }),
592 }
593 }
594
595 let tools = request
596 .tools
597 .iter()
598 .map(|tool| Tool::Function {
599 function: copilot::copilot_chat::Function {
600 name: tool.name.clone(),
601 description: tool.description.clone(),
602 parameters: tool.input_schema.clone(),
603 },
604 })
605 .collect::<Vec<_>>();
606
607 Ok(CopilotChatRequest {
608 intent: true,
609 n: 1,
610 stream: model.uses_streaming(),
611 temperature: 0.1,
612 model: model.id().to_string(),
613 messages,
614 tools,
615 tool_choice: request.tool_choice.map(|choice| match choice {
616 LanguageModelToolChoice::Auto => copilot::copilot_chat::ToolChoice::Auto,
617 LanguageModelToolChoice::Any => copilot::copilot_chat::ToolChoice::Any,
618 LanguageModelToolChoice::None => copilot::copilot_chat::ToolChoice::None,
619 }),
620 })
621}
622
623struct ConfigurationView {
624 copilot_status: Option<copilot::Status>,
625 state: Entity<State>,
626 _subscription: Option<Subscription>,
627}
628
629impl ConfigurationView {
630 pub fn new(state: Entity<State>, cx: &mut Context<Self>) -> Self {
631 let copilot = Copilot::global(cx);
632
633 Self {
634 copilot_status: copilot.as_ref().map(|copilot| copilot.read(cx).status()),
635 state,
636 _subscription: copilot.as_ref().map(|copilot| {
637 cx.observe(copilot, |this, model, cx| {
638 this.copilot_status = Some(model.read(cx).status());
639 cx.notify();
640 })
641 }),
642 }
643 }
644}
645
646impl Render for ConfigurationView {
647 fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
648 if self.state.read(cx).is_authenticated(cx) {
649 h_flex()
650 .mt_1()
651 .p_1()
652 .justify_between()
653 .rounded_md()
654 .border_1()
655 .border_color(cx.theme().colors().border)
656 .bg(cx.theme().colors().background)
657 .child(
658 h_flex()
659 .gap_1()
660 .child(Icon::new(IconName::Check).color(Color::Success))
661 .child(Label::new("Authorized")),
662 )
663 .child(
664 Button::new("sign_out", "Sign Out")
665 .label_size(LabelSize::Small)
666 .on_click(|_, window, cx| {
667 window.dispatch_action(copilot::SignOut.boxed_clone(), cx);
668 }),
669 )
670 } else {
671 let loading_icon = Icon::new(IconName::ArrowCircle).with_rotate_animation(4);
672
673 const ERROR_LABEL: &str = "Copilot Chat requires an active GitHub Copilot subscription. Please ensure Copilot is configured and try again, or use a different Assistant provider.";
674
675 match &self.copilot_status {
676 Some(status) => match status {
677 Status::Starting { task: _ } => h_flex()
678 .gap_2()
679 .child(loading_icon)
680 .child(Label::new("Starting Copilot…")),
681 Status::SigningIn { prompt: _ }
682 | Status::SignedOut {
683 awaiting_signing_in: true,
684 } => h_flex()
685 .gap_2()
686 .child(loading_icon)
687 .child(Label::new("Signing into Copilot…")),
688 Status::Error(_) => {
689 const LABEL: &str = "Copilot had issues starting. Please try restarting it. If the issue persists, try reinstalling Copilot.";
690 v_flex()
691 .gap_6()
692 .child(Label::new(LABEL))
693 .child(svg().size_8().path(IconName::CopilotError.path()))
694 }
695 _ => {
696 const LABEL: &str = "To use Zed's agent with GitHub Copilot, you need to be logged in to GitHub. Note that your GitHub account must have an active Copilot Chat subscription.";
697
698 v_flex().gap_2().child(Label::new(LABEL)).child(
699 Button::new("sign_in", "Sign in to use GitHub Copilot")
700 .icon_color(Color::Muted)
701 .icon(IconName::Github)
702 .icon_position(IconPosition::Start)
703 .icon_size(IconSize::Medium)
704 .full_width()
705 .on_click(|_, window, cx| copilot::initiate_sign_in(window, cx)),
706 )
707 }
708 },
709 None => v_flex().gap_6().child(Label::new(ERROR_LABEL)),
710 }
711 }
712 }
713}