copilot_chat.rs

   1use std::pin::Pin;
   2use std::str::FromStr as _;
   3use std::sync::Arc;
   4
   5use anyhow::{Result, anyhow};
   6use cloud_llm_client::CompletionIntent;
   7use collections::HashMap;
   8use copilot::copilot_chat::{
   9    ChatMessage, ChatMessageContent, ChatMessagePart, CopilotChat, ImageUrl,
  10    Model as CopilotChatModel, ModelVendor, Request as CopilotChatRequest, ResponseEvent, Tool,
  11    ToolCall,
  12};
  13use copilot::{Copilot, Status};
  14use futures::future::BoxFuture;
  15use futures::stream::BoxStream;
  16use futures::{FutureExt, Stream, StreamExt};
  17use gpui::{Action, AnyView, App, AsyncApp, Entity, Render, Subscription, Task, svg};
  18use http_client::StatusCode;
  19use language::language_settings::all_language_settings;
  20use language_model::{
  21    AuthenticateError, LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent,
  22    LanguageModelId, LanguageModelName, LanguageModelProvider, LanguageModelProviderId,
  23    LanguageModelProviderName, LanguageModelProviderState, LanguageModelRequest,
  24    LanguageModelRequestMessage, LanguageModelToolChoice, LanguageModelToolResultContent,
  25    LanguageModelToolSchemaFormat, LanguageModelToolUse, MessageContent, RateLimiter, Role,
  26    StopReason, TokenUsage,
  27};
  28use settings::SettingsStore;
  29use ui::{CommonAnimationExt, prelude::*};
  30use util::debug_panic;
  31
  32use crate::ui::ConfiguredApiCard;
  33
  34const PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("copilot_chat");
  35const PROVIDER_NAME: LanguageModelProviderName =
  36    LanguageModelProviderName::new("GitHub Copilot Chat");
  37
  38pub struct CopilotChatLanguageModelProvider {
  39    state: Entity<State>,
  40}
  41
  42pub struct State {
  43    _copilot_chat_subscription: Option<Subscription>,
  44    _settings_subscription: Subscription,
  45}
  46
  47impl State {
  48    fn is_authenticated(&self, cx: &App) -> bool {
  49        CopilotChat::global(cx)
  50            .map(|m| m.read(cx).is_authenticated())
  51            .unwrap_or(false)
  52    }
  53}
  54
  55impl CopilotChatLanguageModelProvider {
  56    pub fn new(cx: &mut App) -> Self {
  57        let state = cx.new(|cx| {
  58            let copilot_chat_subscription = CopilotChat::global(cx)
  59                .map(|copilot_chat| cx.observe(&copilot_chat, |_, _, cx| cx.notify()));
  60            State {
  61                _copilot_chat_subscription: copilot_chat_subscription,
  62                _settings_subscription: cx.observe_global::<SettingsStore>(|_, cx| {
  63                    if let Some(copilot_chat) = CopilotChat::global(cx) {
  64                        let language_settings = all_language_settings(None, cx);
  65                        let configuration = copilot::copilot_chat::CopilotChatConfiguration {
  66                            enterprise_uri: language_settings
  67                                .edit_predictions
  68                                .copilot
  69                                .enterprise_uri
  70                                .clone(),
  71                        };
  72                        copilot_chat.update(cx, |chat, cx| {
  73                            chat.set_configuration(configuration, cx);
  74                        });
  75                    }
  76                    cx.notify();
  77                }),
  78            }
  79        });
  80
  81        Self { state }
  82    }
  83
  84    fn create_language_model(&self, model: CopilotChatModel) -> Arc<dyn LanguageModel> {
  85        Arc::new(CopilotChatLanguageModel {
  86            model,
  87            request_limiter: RateLimiter::new(4),
  88        })
  89    }
  90}
  91
  92impl LanguageModelProviderState for CopilotChatLanguageModelProvider {
  93    type ObservableEntity = State;
  94
  95    fn observable_entity(&self) -> Option<Entity<Self::ObservableEntity>> {
  96        Some(self.state.clone())
  97    }
  98}
  99
 100impl LanguageModelProvider for CopilotChatLanguageModelProvider {
 101    fn id(&self) -> LanguageModelProviderId {
 102        PROVIDER_ID
 103    }
 104
 105    fn name(&self) -> LanguageModelProviderName {
 106        PROVIDER_NAME
 107    }
 108
 109    fn icon(&self) -> IconName {
 110        IconName::Copilot
 111    }
 112
 113    fn default_model(&self, cx: &App) -> Option<Arc<dyn LanguageModel>> {
 114        let models = CopilotChat::global(cx).and_then(|m| m.read(cx).models())?;
 115        models
 116            .first()
 117            .map(|model| self.create_language_model(model.clone()))
 118    }
 119
 120    fn default_fast_model(&self, cx: &App) -> Option<Arc<dyn LanguageModel>> {
 121        // The default model should be Copilot Chat's 'base model', which is likely a relatively fast
 122        // model (e.g. 4o) and a sensible choice when considering premium requests
 123        self.default_model(cx)
 124    }
 125
 126    fn provided_models(&self, cx: &App) -> Vec<Arc<dyn LanguageModel>> {
 127        let Some(models) = CopilotChat::global(cx).and_then(|m| m.read(cx).models()) else {
 128            return Vec::new();
 129        };
 130        models
 131            .iter()
 132            .map(|model| self.create_language_model(model.clone()))
 133            .collect()
 134    }
 135
 136    fn is_authenticated(&self, cx: &App) -> bool {
 137        self.state.read(cx).is_authenticated(cx)
 138    }
 139
 140    fn authenticate(&self, cx: &mut App) -> Task<Result<(), AuthenticateError>> {
 141        if self.is_authenticated(cx) {
 142            return Task::ready(Ok(()));
 143        };
 144
 145        let Some(copilot) = Copilot::global(cx) else {
 146            return Task::ready( Err(anyhow!(
 147                "Copilot must be enabled for Copilot Chat to work. Please enable Copilot and try again."
 148            ).into()));
 149        };
 150
 151        let err = match copilot.read(cx).status() {
 152            Status::Authorized => return Task::ready(Ok(())),
 153            Status::Disabled => anyhow!(
 154                "Copilot must be enabled for Copilot Chat to work. Please enable Copilot and try again."
 155            ),
 156            Status::Error(err) => anyhow!(format!(
 157                "Received the following error while signing into Copilot: {err}"
 158            )),
 159            Status::Starting { task: _ } => anyhow!(
 160                "Copilot is still starting, please wait for Copilot to start then try again"
 161            ),
 162            Status::Unauthorized => anyhow!(
 163                "Unable to authorize with Copilot. Please make sure that you have an active Copilot and Copilot Chat subscription."
 164            ),
 165            Status::SignedOut { .. } => {
 166                anyhow!("You have signed out of Copilot. Please sign in to Copilot and try again.")
 167            }
 168            Status::SigningIn { prompt: _ } => anyhow!("Still signing into Copilot..."),
 169        };
 170
 171        Task::ready(Err(err.into()))
 172    }
 173
 174    fn configuration_view(
 175        &self,
 176        _target_agent: language_model::ConfigurationViewTargetAgent,
 177        _: &mut Window,
 178        cx: &mut App,
 179    ) -> AnyView {
 180        let state = self.state.clone();
 181        cx.new(|cx| ConfigurationView::new(state, cx)).into()
 182    }
 183
 184    fn reset_credentials(&self, _cx: &mut App) -> Task<Result<()>> {
 185        Task::ready(Err(anyhow!(
 186            "Signing out of GitHub Copilot Chat is currently not supported."
 187        )))
 188    }
 189}
 190
 191fn collect_tiktoken_messages(
 192    request: LanguageModelRequest,
 193) -> Vec<tiktoken_rs::ChatCompletionRequestMessage> {
 194    request
 195        .messages
 196        .into_iter()
 197        .map(|message| tiktoken_rs::ChatCompletionRequestMessage {
 198            role: match message.role {
 199                Role::User => "user".into(),
 200                Role::Assistant => "assistant".into(),
 201                Role::System => "system".into(),
 202            },
 203            content: Some(message.string_contents()),
 204            name: None,
 205            function_call: None,
 206        })
 207        .collect::<Vec<_>>()
 208}
 209
 210pub struct CopilotChatLanguageModel {
 211    model: CopilotChatModel,
 212    request_limiter: RateLimiter,
 213}
 214
 215impl LanguageModel for CopilotChatLanguageModel {
 216    fn id(&self) -> LanguageModelId {
 217        LanguageModelId::from(self.model.id().to_string())
 218    }
 219
 220    fn name(&self) -> LanguageModelName {
 221        LanguageModelName::from(self.model.display_name().to_string())
 222    }
 223
 224    fn provider_id(&self) -> LanguageModelProviderId {
 225        PROVIDER_ID
 226    }
 227
 228    fn provider_name(&self) -> LanguageModelProviderName {
 229        PROVIDER_NAME
 230    }
 231
 232    fn supports_tools(&self) -> bool {
 233        self.model.supports_tools()
 234    }
 235
 236    fn supports_images(&self) -> bool {
 237        self.model.supports_vision()
 238    }
 239
 240    fn tool_input_format(&self) -> LanguageModelToolSchemaFormat {
 241        match self.model.vendor() {
 242            ModelVendor::OpenAI | ModelVendor::Anthropic => {
 243                LanguageModelToolSchemaFormat::JsonSchema
 244            }
 245            ModelVendor::Google | ModelVendor::XAI | ModelVendor::Unknown => {
 246                LanguageModelToolSchemaFormat::JsonSchemaSubset
 247            }
 248        }
 249    }
 250
 251    fn supports_tool_choice(&self, choice: LanguageModelToolChoice) -> bool {
 252        match choice {
 253            LanguageModelToolChoice::Auto
 254            | LanguageModelToolChoice::Any
 255            | LanguageModelToolChoice::None => self.supports_tools(),
 256        }
 257    }
 258
 259    fn telemetry_id(&self) -> String {
 260        format!("copilot_chat/{}", self.model.id())
 261    }
 262
 263    fn max_token_count(&self) -> u64 {
 264        self.model.max_token_count()
 265    }
 266
 267    fn count_tokens(
 268        &self,
 269        request: LanguageModelRequest,
 270        cx: &App,
 271    ) -> BoxFuture<'static, Result<u64>> {
 272        let model = self.model.clone();
 273        cx.background_spawn(async move {
 274            let messages = collect_tiktoken_messages(request);
 275            // Copilot uses OpenAI tiktoken tokenizer for all it's model irrespective of the underlying provider(vendor).
 276            let tokenizer_model = match model.tokenizer() {
 277                Some("o200k_base") => "gpt-4o",
 278                Some("cl100k_base") => "gpt-4",
 279                _ => "gpt-4o",
 280            };
 281
 282            tiktoken_rs::num_tokens_from_messages(tokenizer_model, &messages)
 283                .map(|tokens| tokens as u64)
 284        })
 285        .boxed()
 286    }
 287
 288    fn stream_completion(
 289        &self,
 290        request: LanguageModelRequest,
 291        cx: &AsyncApp,
 292    ) -> BoxFuture<
 293        'static,
 294        Result<
 295            BoxStream<'static, Result<LanguageModelCompletionEvent, LanguageModelCompletionError>>,
 296            LanguageModelCompletionError,
 297        >,
 298    > {
 299        let is_user_initiated = request.intent.is_none_or(|intent| match intent {
 300            CompletionIntent::UserPrompt
 301            | CompletionIntent::ThreadContextSummarization
 302            | CompletionIntent::InlineAssist
 303            | CompletionIntent::TerminalInlineAssist
 304            | CompletionIntent::GenerateGitCommitMessage => true,
 305
 306            CompletionIntent::ToolResults
 307            | CompletionIntent::ThreadSummarization
 308            | CompletionIntent::CreateFile
 309            | CompletionIntent::EditFile => false,
 310        });
 311
 312        if self.model.supports_response() {
 313            let responses_request = into_copilot_responses(&self.model, request);
 314            let request_limiter = self.request_limiter.clone();
 315            let future = cx.spawn(async move |cx| {
 316                let request =
 317                    CopilotChat::stream_response(responses_request, is_user_initiated, cx.clone());
 318                request_limiter
 319                    .stream(async move {
 320                        let stream = request.await?;
 321                        let mapper = CopilotResponsesEventMapper::new();
 322                        Ok(mapper.map_stream(stream).boxed())
 323                    })
 324                    .await
 325            });
 326            return async move { Ok(future.await?.boxed()) }.boxed();
 327        }
 328
 329        let copilot_request = match into_copilot_chat(&self.model, request) {
 330            Ok(request) => request,
 331            Err(err) => return futures::future::ready(Err(err.into())).boxed(),
 332        };
 333        let is_streaming = copilot_request.stream;
 334
 335        let request_limiter = self.request_limiter.clone();
 336        let future = cx.spawn(async move |cx| {
 337            let request =
 338                CopilotChat::stream_completion(copilot_request, is_user_initiated, cx.clone());
 339            request_limiter
 340                .stream(async move {
 341                    let response = request.await?;
 342                    Ok(map_to_language_model_completion_events(
 343                        response,
 344                        is_streaming,
 345                    ))
 346                })
 347                .await
 348        });
 349        async move { Ok(future.await?.boxed()) }.boxed()
 350    }
 351}
 352
 353pub fn map_to_language_model_completion_events(
 354    events: Pin<Box<dyn Send + Stream<Item = Result<ResponseEvent>>>>,
 355    is_streaming: bool,
 356) -> impl Stream<Item = Result<LanguageModelCompletionEvent, LanguageModelCompletionError>> {
 357    #[derive(Default)]
 358    struct RawToolCall {
 359        id: String,
 360        name: String,
 361        arguments: String,
 362    }
 363
 364    struct State {
 365        events: Pin<Box<dyn Send + Stream<Item = Result<ResponseEvent>>>>,
 366        tool_calls_by_index: HashMap<usize, RawToolCall>,
 367    }
 368
 369    futures::stream::unfold(
 370        State {
 371            events,
 372            tool_calls_by_index: HashMap::default(),
 373        },
 374        move |mut state| async move {
 375            if let Some(event) = state.events.next().await {
 376                match event {
 377                    Ok(event) => {
 378                        let Some(choice) = event.choices.first() else {
 379                            return Some((
 380                                vec![Err(anyhow!("Response contained no choices").into())],
 381                                state,
 382                            ));
 383                        };
 384
 385                        let delta = if is_streaming {
 386                            choice.delta.as_ref()
 387                        } else {
 388                            choice.message.as_ref()
 389                        };
 390
 391                        let Some(delta) = delta else {
 392                            return Some((
 393                                vec![Err(anyhow!("Response contained no delta").into())],
 394                                state,
 395                            ));
 396                        };
 397
 398                        let mut events = Vec::new();
 399                        if let Some(content) = delta.content.clone() {
 400                            events.push(Ok(LanguageModelCompletionEvent::Text(content)));
 401                        }
 402
 403                        for (index, tool_call) in delta.tool_calls.iter().enumerate() {
 404                            let tool_index = tool_call.index.unwrap_or(index);
 405                            let entry = state.tool_calls_by_index.entry(tool_index).or_default();
 406
 407                            if let Some(tool_id) = tool_call.id.clone() {
 408                                entry.id = tool_id;
 409                            }
 410
 411                            if let Some(function) = tool_call.function.as_ref() {
 412                                if let Some(name) = function.name.clone() {
 413                                    entry.name = name;
 414                                }
 415
 416                                if let Some(arguments) = function.arguments.clone() {
 417                                    entry.arguments.push_str(&arguments);
 418                                }
 419                            }
 420                        }
 421
 422                        if let Some(usage) = event.usage {
 423                            events.push(Ok(LanguageModelCompletionEvent::UsageUpdate(
 424                                TokenUsage {
 425                                    input_tokens: usage.prompt_tokens,
 426                                    output_tokens: usage.completion_tokens,
 427                                    cache_creation_input_tokens: 0,
 428                                    cache_read_input_tokens: 0,
 429                                },
 430                            )));
 431                        }
 432
 433                        match choice.finish_reason.as_deref() {
 434                            Some("stop") => {
 435                                events.push(Ok(LanguageModelCompletionEvent::Stop(
 436                                    StopReason::EndTurn,
 437                                )));
 438                            }
 439                            Some("tool_calls") => {
 440                                events.extend(state.tool_calls_by_index.drain().map(
 441                                    |(_, tool_call)| {
 442                                        // The model can output an empty string
 443                                        // to indicate the absence of arguments.
 444                                        // When that happens, create an empty
 445                                        // object instead.
 446                                        let arguments = if tool_call.arguments.is_empty() {
 447                                            Ok(serde_json::Value::Object(Default::default()))
 448                                        } else {
 449                                            serde_json::Value::from_str(&tool_call.arguments)
 450                                        };
 451                                        match arguments {
 452                                        Ok(input) => Ok(LanguageModelCompletionEvent::ToolUse(
 453                                            LanguageModelToolUse {
 454                                                id: tool_call.id.into(),
 455                                                name: tool_call.name.as_str().into(),
 456                                                is_input_complete: true,
 457                                                input,
 458                                                raw_input: tool_call.arguments,
 459                                                thought_signature: None,
 460                                            },
 461                                        )),
 462                                        Err(error) => Ok(
 463                                            LanguageModelCompletionEvent::ToolUseJsonParseError {
 464                                                id: tool_call.id.into(),
 465                                                tool_name: tool_call.name.as_str().into(),
 466                                                raw_input: tool_call.arguments.into(),
 467                                                json_parse_error: error.to_string(),
 468                                            },
 469                                        ),
 470                                    }
 471                                    },
 472                                ));
 473
 474                                events.push(Ok(LanguageModelCompletionEvent::Stop(
 475                                    StopReason::ToolUse,
 476                                )));
 477                            }
 478                            Some(stop_reason) => {
 479                                log::error!("Unexpected Copilot Chat stop_reason: {stop_reason:?}");
 480                                events.push(Ok(LanguageModelCompletionEvent::Stop(
 481                                    StopReason::EndTurn,
 482                                )));
 483                            }
 484                            None => {}
 485                        }
 486
 487                        return Some((events, state));
 488                    }
 489                    Err(err) => return Some((vec![Err(anyhow!(err).into())], state)),
 490                }
 491            }
 492
 493            None
 494        },
 495    )
 496    .flat_map(futures::stream::iter)
 497}
 498
 499pub struct CopilotResponsesEventMapper {
 500    pending_stop_reason: Option<StopReason>,
 501}
 502
 503impl CopilotResponsesEventMapper {
 504    pub fn new() -> Self {
 505        Self {
 506            pending_stop_reason: None,
 507        }
 508    }
 509
 510    pub fn map_stream(
 511        mut self,
 512        events: Pin<Box<dyn Send + Stream<Item = Result<copilot::copilot_responses::StreamEvent>>>>,
 513    ) -> impl Stream<Item = Result<LanguageModelCompletionEvent, LanguageModelCompletionError>>
 514    {
 515        events.flat_map(move |event| {
 516            futures::stream::iter(match event {
 517                Ok(event) => self.map_event(event),
 518                Err(error) => vec![Err(LanguageModelCompletionError::from(anyhow!(error)))],
 519            })
 520        })
 521    }
 522
 523    fn map_event(
 524        &mut self,
 525        event: copilot::copilot_responses::StreamEvent,
 526    ) -> Vec<Result<LanguageModelCompletionEvent, LanguageModelCompletionError>> {
 527        match event {
 528            copilot::copilot_responses::StreamEvent::OutputItemAdded { item, .. } => match item {
 529                copilot::copilot_responses::ResponseOutputItem::Message { id, .. } => {
 530                    vec![Ok(LanguageModelCompletionEvent::StartMessage {
 531                        message_id: id,
 532                    })]
 533                }
 534                _ => Vec::new(),
 535            },
 536
 537            copilot::copilot_responses::StreamEvent::OutputTextDelta { delta, .. } => {
 538                if delta.is_empty() {
 539                    Vec::new()
 540                } else {
 541                    vec![Ok(LanguageModelCompletionEvent::Text(delta))]
 542                }
 543            }
 544
 545            copilot::copilot_responses::StreamEvent::OutputItemDone { item, .. } => match item {
 546                copilot::copilot_responses::ResponseOutputItem::Message { .. } => Vec::new(),
 547                copilot::copilot_responses::ResponseOutputItem::FunctionCall {
 548                    call_id,
 549                    name,
 550                    arguments,
 551                    ..
 552                } => {
 553                    let mut events = Vec::new();
 554                    match serde_json::from_str::<serde_json::Value>(&arguments) {
 555                        Ok(input) => events.push(Ok(LanguageModelCompletionEvent::ToolUse(
 556                            LanguageModelToolUse {
 557                                id: call_id.into(),
 558                                name: name.as_str().into(),
 559                                is_input_complete: true,
 560                                input,
 561                                raw_input: arguments.clone(),
 562                                thought_signature: None,
 563                            },
 564                        ))),
 565                        Err(error) => {
 566                            events.push(Ok(LanguageModelCompletionEvent::ToolUseJsonParseError {
 567                                id: call_id.into(),
 568                                tool_name: name.as_str().into(),
 569                                raw_input: arguments.clone().into(),
 570                                json_parse_error: error.to_string(),
 571                            }))
 572                        }
 573                    }
 574                    // Record that we already emitted a tool-use stop so we can avoid duplicating
 575                    // a Stop event on Completed.
 576                    self.pending_stop_reason = Some(StopReason::ToolUse);
 577                    events.push(Ok(LanguageModelCompletionEvent::Stop(StopReason::ToolUse)));
 578                    events
 579                }
 580                copilot::copilot_responses::ResponseOutputItem::Reasoning {
 581                    summary,
 582                    encrypted_content,
 583                    ..
 584                } => {
 585                    let mut events = Vec::new();
 586
 587                    if let Some(blocks) = summary {
 588                        let mut text = String::new();
 589                        for block in blocks {
 590                            text.push_str(&block.text);
 591                        }
 592                        if !text.is_empty() {
 593                            events.push(Ok(LanguageModelCompletionEvent::Thinking {
 594                                text,
 595                                signature: None,
 596                            }));
 597                        }
 598                    }
 599
 600                    if let Some(data) = encrypted_content {
 601                        events.push(Ok(LanguageModelCompletionEvent::RedactedThinking { data }));
 602                    }
 603
 604                    events
 605                }
 606            },
 607
 608            copilot::copilot_responses::StreamEvent::Completed { response } => {
 609                let mut events = Vec::new();
 610                if let Some(usage) = response.usage {
 611                    events.push(Ok(LanguageModelCompletionEvent::UsageUpdate(TokenUsage {
 612                        input_tokens: usage.input_tokens.unwrap_or(0),
 613                        output_tokens: usage.output_tokens.unwrap_or(0),
 614                        cache_creation_input_tokens: 0,
 615                        cache_read_input_tokens: 0,
 616                    })));
 617                }
 618                if self.pending_stop_reason.take() != Some(StopReason::ToolUse) {
 619                    events.push(Ok(LanguageModelCompletionEvent::Stop(StopReason::EndTurn)));
 620                }
 621                events
 622            }
 623
 624            copilot::copilot_responses::StreamEvent::Incomplete { response } => {
 625                let reason = response
 626                    .incomplete_details
 627                    .as_ref()
 628                    .and_then(|details| details.reason.as_ref());
 629                let stop_reason = match reason {
 630                    Some(copilot::copilot_responses::IncompleteReason::MaxOutputTokens) => {
 631                        StopReason::MaxTokens
 632                    }
 633                    Some(copilot::copilot_responses::IncompleteReason::ContentFilter) => {
 634                        StopReason::Refusal
 635                    }
 636                    _ => self
 637                        .pending_stop_reason
 638                        .take()
 639                        .unwrap_or(StopReason::EndTurn),
 640                };
 641
 642                let mut events = Vec::new();
 643                if let Some(usage) = response.usage {
 644                    events.push(Ok(LanguageModelCompletionEvent::UsageUpdate(TokenUsage {
 645                        input_tokens: usage.input_tokens.unwrap_or(0),
 646                        output_tokens: usage.output_tokens.unwrap_or(0),
 647                        cache_creation_input_tokens: 0,
 648                        cache_read_input_tokens: 0,
 649                    })));
 650                }
 651                events.push(Ok(LanguageModelCompletionEvent::Stop(stop_reason)));
 652                events
 653            }
 654
 655            copilot::copilot_responses::StreamEvent::Failed { response } => {
 656                let provider = PROVIDER_NAME;
 657                let (status_code, message) = match response.error {
 658                    Some(error) => {
 659                        let status_code = StatusCode::from_str(&error.code)
 660                            .unwrap_or(StatusCode::INTERNAL_SERVER_ERROR);
 661                        (status_code, error.message)
 662                    }
 663                    None => (
 664                        StatusCode::INTERNAL_SERVER_ERROR,
 665                        "response.failed".to_string(),
 666                    ),
 667                };
 668                vec![Err(LanguageModelCompletionError::HttpResponseError {
 669                    provider,
 670                    status_code,
 671                    message,
 672                })]
 673            }
 674
 675            copilot::copilot_responses::StreamEvent::GenericError { error } => vec![Err(
 676                LanguageModelCompletionError::Other(anyhow!(format!("{error:?}"))),
 677            )],
 678
 679            copilot::copilot_responses::StreamEvent::Created { .. }
 680            | copilot::copilot_responses::StreamEvent::Unknown => Vec::new(),
 681        }
 682    }
 683}
 684
 685fn into_copilot_chat(
 686    model: &copilot::copilot_chat::Model,
 687    request: LanguageModelRequest,
 688) -> Result<CopilotChatRequest> {
 689    let mut request_messages: Vec<LanguageModelRequestMessage> = Vec::new();
 690    for message in request.messages {
 691        if let Some(last_message) = request_messages.last_mut() {
 692            if last_message.role == message.role {
 693                last_message.content.extend(message.content);
 694            } else {
 695                request_messages.push(message);
 696            }
 697        } else {
 698            request_messages.push(message);
 699        }
 700    }
 701
 702    let mut messages: Vec<ChatMessage> = Vec::new();
 703    for message in request_messages {
 704        match message.role {
 705            Role::User => {
 706                for content in &message.content {
 707                    if let MessageContent::ToolResult(tool_result) = content {
 708                        let content = match &tool_result.content {
 709                            LanguageModelToolResultContent::Text(text) => text.to_string().into(),
 710                            LanguageModelToolResultContent::Image(image) => {
 711                                if model.supports_vision() {
 712                                    ChatMessageContent::Multipart(vec![ChatMessagePart::Image {
 713                                        image_url: ImageUrl {
 714                                            url: image.to_base64_url(),
 715                                        },
 716                                    }])
 717                                } else {
 718                                    debug_panic!(
 719                                        "This should be caught at {} level",
 720                                        tool_result.tool_name
 721                                    );
 722                                    "[Tool responded with an image, but this model does not support vision]".to_string().into()
 723                                }
 724                            }
 725                        };
 726
 727                        messages.push(ChatMessage::Tool {
 728                            tool_call_id: tool_result.tool_use_id.to_string(),
 729                            content,
 730                        });
 731                    }
 732                }
 733
 734                let mut content_parts = Vec::new();
 735                for content in &message.content {
 736                    match content {
 737                        MessageContent::Text(text) | MessageContent::Thinking { text, .. }
 738                            if !text.is_empty() =>
 739                        {
 740                            if let Some(ChatMessagePart::Text { text: text_content }) =
 741                                content_parts.last_mut()
 742                            {
 743                                text_content.push_str(text);
 744                            } else {
 745                                content_parts.push(ChatMessagePart::Text {
 746                                    text: text.to_string(),
 747                                });
 748                            }
 749                        }
 750                        MessageContent::Image(image) if model.supports_vision() => {
 751                            content_parts.push(ChatMessagePart::Image {
 752                                image_url: ImageUrl {
 753                                    url: image.to_base64_url(),
 754                                },
 755                            });
 756                        }
 757                        _ => {}
 758                    }
 759                }
 760
 761                if !content_parts.is_empty() {
 762                    messages.push(ChatMessage::User {
 763                        content: content_parts.into(),
 764                    });
 765                }
 766            }
 767            Role::Assistant => {
 768                let mut tool_calls = Vec::new();
 769                for content in &message.content {
 770                    if let MessageContent::ToolUse(tool_use) = content {
 771                        tool_calls.push(ToolCall {
 772                            id: tool_use.id.to_string(),
 773                            content: copilot::copilot_chat::ToolCallContent::Function {
 774                                function: copilot::copilot_chat::FunctionContent {
 775                                    name: tool_use.name.to_string(),
 776                                    arguments: serde_json::to_string(&tool_use.input)?,
 777                                },
 778                            },
 779                        });
 780                    }
 781                }
 782
 783                let text_content = {
 784                    let mut buffer = String::new();
 785                    for string in message.content.iter().filter_map(|content| match content {
 786                        MessageContent::Text(text) | MessageContent::Thinking { text, .. } => {
 787                            Some(text.as_str())
 788                        }
 789                        MessageContent::ToolUse(_)
 790                        | MessageContent::RedactedThinking(_)
 791                        | MessageContent::ToolResult(_)
 792                        | MessageContent::Image(_) => None,
 793                    }) {
 794                        buffer.push_str(string);
 795                    }
 796
 797                    buffer
 798                };
 799
 800                messages.push(ChatMessage::Assistant {
 801                    content: if text_content.is_empty() {
 802                        ChatMessageContent::empty()
 803                    } else {
 804                        text_content.into()
 805                    },
 806                    tool_calls,
 807                });
 808            }
 809            Role::System => messages.push(ChatMessage::System {
 810                content: message.string_contents(),
 811            }),
 812        }
 813    }
 814
 815    let tools = request
 816        .tools
 817        .iter()
 818        .map(|tool| Tool::Function {
 819            function: copilot::copilot_chat::Function {
 820                name: tool.name.clone(),
 821                description: tool.description.clone(),
 822                parameters: tool.input_schema.clone(),
 823            },
 824        })
 825        .collect::<Vec<_>>();
 826
 827    Ok(CopilotChatRequest {
 828        intent: true,
 829        n: 1,
 830        stream: model.uses_streaming(),
 831        temperature: 0.1,
 832        model: model.id().to_string(),
 833        messages,
 834        tools,
 835        tool_choice: request.tool_choice.map(|choice| match choice {
 836            LanguageModelToolChoice::Auto => copilot::copilot_chat::ToolChoice::Auto,
 837            LanguageModelToolChoice::Any => copilot::copilot_chat::ToolChoice::Any,
 838            LanguageModelToolChoice::None => copilot::copilot_chat::ToolChoice::None,
 839        }),
 840    })
 841}
 842
 843fn into_copilot_responses(
 844    model: &copilot::copilot_chat::Model,
 845    request: LanguageModelRequest,
 846) -> copilot::copilot_responses::Request {
 847    use copilot::copilot_responses as responses;
 848
 849    let LanguageModelRequest {
 850        thread_id: _,
 851        prompt_id: _,
 852        intent: _,
 853        mode: _,
 854        messages,
 855        tools,
 856        tool_choice,
 857        stop: _,
 858        temperature,
 859        thinking_allowed: _,
 860    } = request;
 861
 862    let mut input_items: Vec<responses::ResponseInputItem> = Vec::new();
 863
 864    for message in messages {
 865        match message.role {
 866            Role::User => {
 867                for content in &message.content {
 868                    if let MessageContent::ToolResult(tool_result) = content {
 869                        let output = if let Some(out) = &tool_result.output {
 870                            match out {
 871                                serde_json::Value::String(s) => {
 872                                    responses::ResponseFunctionOutput::Text(s.clone())
 873                                }
 874                                serde_json::Value::Null => {
 875                                    responses::ResponseFunctionOutput::Text(String::new())
 876                                }
 877                                other => responses::ResponseFunctionOutput::Text(other.to_string()),
 878                            }
 879                        } else {
 880                            match &tool_result.content {
 881                                LanguageModelToolResultContent::Text(text) => {
 882                                    responses::ResponseFunctionOutput::Text(text.to_string())
 883                                }
 884                                LanguageModelToolResultContent::Image(image) => {
 885                                    if model.supports_vision() {
 886                                        responses::ResponseFunctionOutput::Content(vec![
 887                                            responses::ResponseInputContent::InputImage {
 888                                                image_url: Some(image.to_base64_url()),
 889                                                detail: Default::default(),
 890                                            },
 891                                        ])
 892                                    } else {
 893                                        debug_panic!(
 894                                            "This should be caught at {} level",
 895                                            tool_result.tool_name
 896                                        );
 897                                        responses::ResponseFunctionOutput::Text(
 898                                            "[Tool responded with an image, but this model does not support vision]".into(),
 899                                        )
 900                                    }
 901                                }
 902                            }
 903                        };
 904
 905                        input_items.push(responses::ResponseInputItem::FunctionCallOutput {
 906                            call_id: tool_result.tool_use_id.to_string(),
 907                            output,
 908                            status: None,
 909                        });
 910                    }
 911                }
 912
 913                let mut parts: Vec<responses::ResponseInputContent> = Vec::new();
 914                for content in &message.content {
 915                    match content {
 916                        MessageContent::Text(text) => {
 917                            parts.push(responses::ResponseInputContent::InputText {
 918                                text: text.clone(),
 919                            });
 920                        }
 921
 922                        MessageContent::Image(image) => {
 923                            if model.supports_vision() {
 924                                parts.push(responses::ResponseInputContent::InputImage {
 925                                    image_url: Some(image.to_base64_url()),
 926                                    detail: Default::default(),
 927                                });
 928                            }
 929                        }
 930                        _ => {}
 931                    }
 932                }
 933
 934                if !parts.is_empty() {
 935                    input_items.push(responses::ResponseInputItem::Message {
 936                        role: "user".into(),
 937                        content: Some(parts),
 938                        status: None,
 939                    });
 940                }
 941            }
 942
 943            Role::Assistant => {
 944                for content in &message.content {
 945                    if let MessageContent::ToolUse(tool_use) = content {
 946                        input_items.push(responses::ResponseInputItem::FunctionCall {
 947                            call_id: tool_use.id.to_string(),
 948                            name: tool_use.name.to_string(),
 949                            arguments: tool_use.raw_input.clone(),
 950                            status: None,
 951                        });
 952                    }
 953                }
 954
 955                for content in &message.content {
 956                    if let MessageContent::RedactedThinking(data) = content {
 957                        input_items.push(responses::ResponseInputItem::Reasoning {
 958                            id: None,
 959                            summary: Vec::new(),
 960                            encrypted_content: data.clone(),
 961                        });
 962                    }
 963                }
 964
 965                let mut parts: Vec<responses::ResponseInputContent> = Vec::new();
 966                for content in &message.content {
 967                    match content {
 968                        MessageContent::Text(text) => {
 969                            parts.push(responses::ResponseInputContent::OutputText {
 970                                text: text.clone(),
 971                            });
 972                        }
 973                        MessageContent::Image(_) => {
 974                            parts.push(responses::ResponseInputContent::OutputText {
 975                                text: "[image omitted]".to_string(),
 976                            });
 977                        }
 978                        _ => {}
 979                    }
 980                }
 981
 982                if !parts.is_empty() {
 983                    input_items.push(responses::ResponseInputItem::Message {
 984                        role: "assistant".into(),
 985                        content: Some(parts),
 986                        status: Some("completed".into()),
 987                    });
 988                }
 989            }
 990
 991            Role::System => {
 992                let mut parts: Vec<responses::ResponseInputContent> = Vec::new();
 993                for content in &message.content {
 994                    if let MessageContent::Text(text) = content {
 995                        parts.push(responses::ResponseInputContent::InputText {
 996                            text: text.clone(),
 997                        });
 998                    }
 999                }
1000
1001                if !parts.is_empty() {
1002                    input_items.push(responses::ResponseInputItem::Message {
1003                        role: "system".into(),
1004                        content: Some(parts),
1005                        status: None,
1006                    });
1007                }
1008            }
1009        }
1010    }
1011
1012    let converted_tools: Vec<responses::ToolDefinition> = tools
1013        .into_iter()
1014        .map(|tool| responses::ToolDefinition::Function {
1015            name: tool.name,
1016            description: Some(tool.description),
1017            parameters: Some(tool.input_schema),
1018            strict: None,
1019        })
1020        .collect();
1021
1022    let mapped_tool_choice = tool_choice.map(|choice| match choice {
1023        LanguageModelToolChoice::Auto => responses::ToolChoice::Auto,
1024        LanguageModelToolChoice::Any => responses::ToolChoice::Any,
1025        LanguageModelToolChoice::None => responses::ToolChoice::None,
1026    });
1027
1028    responses::Request {
1029        model: model.id().to_string(),
1030        input: input_items,
1031        stream: model.uses_streaming(),
1032        temperature,
1033        tools: converted_tools,
1034        tool_choice: mapped_tool_choice,
1035        reasoning: None, // We would need to add support for setting from user settings.
1036        include: Some(vec![
1037            copilot::copilot_responses::ResponseIncludable::ReasoningEncryptedContent,
1038        ]),
1039    }
1040}
1041
1042#[cfg(test)]
1043mod tests {
1044    use super::*;
1045    use copilot::copilot_responses as responses;
1046    use futures::StreamExt;
1047
1048    fn map_events(events: Vec<responses::StreamEvent>) -> Vec<LanguageModelCompletionEvent> {
1049        futures::executor::block_on(async {
1050            CopilotResponsesEventMapper::new()
1051                .map_stream(Box::pin(futures::stream::iter(events.into_iter().map(Ok))))
1052                .collect::<Vec<_>>()
1053                .await
1054                .into_iter()
1055                .map(Result::unwrap)
1056                .collect()
1057        })
1058    }
1059
1060    #[test]
1061    fn responses_stream_maps_text_and_usage() {
1062        let events = vec![
1063            responses::StreamEvent::OutputItemAdded {
1064                output_index: 0,
1065                sequence_number: None,
1066                item: responses::ResponseOutputItem::Message {
1067                    id: "msg_1".into(),
1068                    role: "assistant".into(),
1069                    content: Some(Vec::new()),
1070                },
1071            },
1072            responses::StreamEvent::OutputTextDelta {
1073                item_id: "msg_1".into(),
1074                output_index: 0,
1075                delta: "Hello".into(),
1076            },
1077            responses::StreamEvent::Completed {
1078                response: responses::Response {
1079                    usage: Some(responses::ResponseUsage {
1080                        input_tokens: Some(5),
1081                        output_tokens: Some(3),
1082                        total_tokens: Some(8),
1083                    }),
1084                    ..Default::default()
1085                },
1086            },
1087        ];
1088
1089        let mapped = map_events(events);
1090        assert!(matches!(
1091            mapped[0],
1092            LanguageModelCompletionEvent::StartMessage { ref message_id } if message_id == "msg_1"
1093        ));
1094        assert!(matches!(
1095            mapped[1],
1096            LanguageModelCompletionEvent::Text(ref text) if text == "Hello"
1097        ));
1098        assert!(matches!(
1099            mapped[2],
1100            LanguageModelCompletionEvent::UsageUpdate(TokenUsage {
1101                input_tokens: 5,
1102                output_tokens: 3,
1103                ..
1104            })
1105        ));
1106        assert!(matches!(
1107            mapped[3],
1108            LanguageModelCompletionEvent::Stop(StopReason::EndTurn)
1109        ));
1110    }
1111
1112    #[test]
1113    fn responses_stream_maps_tool_calls() {
1114        let events = vec![responses::StreamEvent::OutputItemDone {
1115            output_index: 0,
1116            sequence_number: None,
1117            item: responses::ResponseOutputItem::FunctionCall {
1118                id: Some("fn_1".into()),
1119                call_id: "call_1".into(),
1120                name: "do_it".into(),
1121                arguments: "{\"x\":1}".into(),
1122                status: None,
1123            },
1124        }];
1125
1126        let mapped = map_events(events);
1127        assert!(matches!(
1128            mapped[0],
1129            LanguageModelCompletionEvent::ToolUse(ref use_) if use_.id.to_string() == "call_1" && use_.name.as_ref() == "do_it"
1130        ));
1131        assert!(matches!(
1132            mapped[1],
1133            LanguageModelCompletionEvent::Stop(StopReason::ToolUse)
1134        ));
1135    }
1136
1137    #[test]
1138    fn responses_stream_handles_json_parse_error() {
1139        let events = vec![responses::StreamEvent::OutputItemDone {
1140            output_index: 0,
1141            sequence_number: None,
1142            item: responses::ResponseOutputItem::FunctionCall {
1143                id: Some("fn_1".into()),
1144                call_id: "call_1".into(),
1145                name: "do_it".into(),
1146                arguments: "{not json}".into(),
1147                status: None,
1148            },
1149        }];
1150
1151        let mapped = map_events(events);
1152        assert!(matches!(
1153            mapped[0],
1154            LanguageModelCompletionEvent::ToolUseJsonParseError { ref id, ref tool_name, .. }
1155                if id.to_string() == "call_1" && tool_name.as_ref() == "do_it"
1156        ));
1157        assert!(matches!(
1158            mapped[1],
1159            LanguageModelCompletionEvent::Stop(StopReason::ToolUse)
1160        ));
1161    }
1162
1163    #[test]
1164    fn responses_stream_maps_reasoning_summary_and_encrypted_content() {
1165        let events = vec![responses::StreamEvent::OutputItemDone {
1166            output_index: 0,
1167            sequence_number: None,
1168            item: responses::ResponseOutputItem::Reasoning {
1169                id: "r1".into(),
1170                summary: Some(vec![responses::ResponseReasoningItem {
1171                    kind: "summary_text".into(),
1172                    text: "Chain".into(),
1173                }]),
1174                encrypted_content: Some("ENC".into()),
1175            },
1176        }];
1177
1178        let mapped = map_events(events);
1179        assert!(matches!(
1180            mapped[0],
1181            LanguageModelCompletionEvent::Thinking { ref text, signature: None } if text == "Chain"
1182        ));
1183        assert!(matches!(
1184            mapped[1],
1185            LanguageModelCompletionEvent::RedactedThinking { ref data } if data == "ENC"
1186        ));
1187    }
1188
1189    #[test]
1190    fn responses_stream_handles_incomplete_max_tokens() {
1191        let events = vec![responses::StreamEvent::Incomplete {
1192            response: responses::Response {
1193                usage: Some(responses::ResponseUsage {
1194                    input_tokens: Some(10),
1195                    output_tokens: Some(0),
1196                    total_tokens: Some(10),
1197                }),
1198                incomplete_details: Some(responses::IncompleteDetails {
1199                    reason: Some(responses::IncompleteReason::MaxOutputTokens),
1200                }),
1201                ..Default::default()
1202            },
1203        }];
1204
1205        let mapped = map_events(events);
1206        assert!(matches!(
1207            mapped[0],
1208            LanguageModelCompletionEvent::UsageUpdate(TokenUsage {
1209                input_tokens: 10,
1210                output_tokens: 0,
1211                ..
1212            })
1213        ));
1214        assert!(matches!(
1215            mapped[1],
1216            LanguageModelCompletionEvent::Stop(StopReason::MaxTokens)
1217        ));
1218    }
1219
1220    #[test]
1221    fn responses_stream_handles_incomplete_content_filter() {
1222        let events = vec![responses::StreamEvent::Incomplete {
1223            response: responses::Response {
1224                usage: None,
1225                incomplete_details: Some(responses::IncompleteDetails {
1226                    reason: Some(responses::IncompleteReason::ContentFilter),
1227                }),
1228                ..Default::default()
1229            },
1230        }];
1231
1232        let mapped = map_events(events);
1233        assert!(matches!(
1234            mapped.last().unwrap(),
1235            LanguageModelCompletionEvent::Stop(StopReason::Refusal)
1236        ));
1237    }
1238
1239    #[test]
1240    fn responses_stream_completed_no_duplicate_after_tool_use() {
1241        let events = vec![
1242            responses::StreamEvent::OutputItemDone {
1243                output_index: 0,
1244                sequence_number: None,
1245                item: responses::ResponseOutputItem::FunctionCall {
1246                    id: Some("fn_1".into()),
1247                    call_id: "call_1".into(),
1248                    name: "do_it".into(),
1249                    arguments: "{}".into(),
1250                    status: None,
1251                },
1252            },
1253            responses::StreamEvent::Completed {
1254                response: responses::Response::default(),
1255            },
1256        ];
1257
1258        let mapped = map_events(events);
1259
1260        let mut stop_count = 0usize;
1261        let mut saw_tool_use_stop = false;
1262        for event in mapped {
1263            if let LanguageModelCompletionEvent::Stop(reason) = event {
1264                stop_count += 1;
1265                if matches!(reason, StopReason::ToolUse) {
1266                    saw_tool_use_stop = true;
1267                }
1268            }
1269        }
1270        assert_eq!(stop_count, 1, "should emit exactly one Stop event");
1271        assert!(saw_tool_use_stop, "Stop reason should be ToolUse");
1272    }
1273
1274    #[test]
1275    fn responses_stream_failed_maps_http_response_error() {
1276        let events = vec![responses::StreamEvent::Failed {
1277            response: responses::Response {
1278                error: Some(responses::ResponseError {
1279                    code: "429".into(),
1280                    message: "too many requests".into(),
1281                }),
1282                ..Default::default()
1283            },
1284        }];
1285
1286        let mapped_results = futures::executor::block_on(async {
1287            CopilotResponsesEventMapper::new()
1288                .map_stream(Box::pin(futures::stream::iter(events.into_iter().map(Ok))))
1289                .collect::<Vec<_>>()
1290                .await
1291        });
1292
1293        assert_eq!(mapped_results.len(), 1);
1294        match &mapped_results[0] {
1295            Err(LanguageModelCompletionError::HttpResponseError {
1296                status_code,
1297                message,
1298                ..
1299            }) => {
1300                assert_eq!(*status_code, http_client::StatusCode::TOO_MANY_REQUESTS);
1301                assert_eq!(message, "too many requests");
1302            }
1303            other => panic!("expected HttpResponseError, got {:?}", other),
1304        }
1305    }
1306}
1307struct ConfigurationView {
1308    copilot_status: Option<copilot::Status>,
1309    state: Entity<State>,
1310    _subscription: Option<Subscription>,
1311}
1312
1313impl ConfigurationView {
1314    pub fn new(state: Entity<State>, cx: &mut Context<Self>) -> Self {
1315        let copilot = Copilot::global(cx);
1316
1317        Self {
1318            copilot_status: copilot.as_ref().map(|copilot| copilot.read(cx).status()),
1319            state,
1320            _subscription: copilot.as_ref().map(|copilot| {
1321                cx.observe(copilot, |this, model, cx| {
1322                    this.copilot_status = Some(model.read(cx).status());
1323                    cx.notify();
1324                })
1325            }),
1326        }
1327    }
1328}
1329
1330impl Render for ConfigurationView {
1331    fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
1332        if self.state.read(cx).is_authenticated(cx) {
1333            ConfiguredApiCard::new("Authorized")
1334                .button_label("Sign Out")
1335                .on_click(|_, window, cx| {
1336                    window.dispatch_action(copilot::SignOut.boxed_clone(), cx);
1337                })
1338                .into_any_element()
1339        } else {
1340            let loading_icon = Icon::new(IconName::ArrowCircle).with_rotate_animation(4);
1341
1342            const ERROR_LABEL: &str = "Copilot Chat requires an active GitHub Copilot subscription. Please ensure Copilot is configured and try again, or use a different Assistant provider.";
1343
1344            match &self.copilot_status {
1345                Some(status) => match status {
1346                    Status::Starting { task: _ } => h_flex()
1347                        .gap_2()
1348                        .child(loading_icon)
1349                        .child(Label::new("Starting Copilot…"))
1350                        .into_any_element(),
1351                    Status::SigningIn { prompt: _ }
1352                    | Status::SignedOut {
1353                        awaiting_signing_in: true,
1354                    } => h_flex()
1355                        .gap_2()
1356                        .child(loading_icon)
1357                        .child(Label::new("Signing into Copilot…"))
1358                        .into_any_element(),
1359                    Status::Error(_) => {
1360                        const LABEL: &str = "Copilot had issues starting. Please try restarting it. If the issue persists, try reinstalling Copilot.";
1361                        v_flex()
1362                            .gap_6()
1363                            .child(Label::new(LABEL))
1364                            .child(svg().size_8().path(IconName::CopilotError.path()))
1365                            .into_any_element()
1366                    }
1367                    _ => {
1368                        const LABEL: &str = "To use Zed's agent with GitHub Copilot, you need to be logged in to GitHub. Note that your GitHub account must have an active Copilot Chat subscription.";
1369
1370                        v_flex()
1371                            .gap_2()
1372                            .child(Label::new(LABEL))
1373                            .child(
1374                                Button::new("sign_in", "Sign in to use GitHub Copilot")
1375                                    .full_width()
1376                                    .style(ButtonStyle::Outlined)
1377                                    .icon_color(Color::Muted)
1378                                    .icon(IconName::Github)
1379                                    .icon_position(IconPosition::Start)
1380                                    .icon_size(IconSize::Small)
1381                                    .on_click(|_, window, cx| {
1382                                        copilot::initiate_sign_in(window, cx)
1383                                    }),
1384                            )
1385                            .into_any_element()
1386                    }
1387                },
1388                None => v_flex()
1389                    .gap_6()
1390                    .child(Label::new(ERROR_LABEL))
1391                    .into_any_element(),
1392            }
1393        }
1394    }
1395}