deepseek.rs

  1use anyhow::{Result, anyhow};
  2use collections::{BTreeMap, HashMap};
  3use deepseek::DEEPSEEK_API_URL;
  4
  5use futures::Stream;
  6use futures::{FutureExt, StreamExt, future::BoxFuture, stream::BoxStream};
  7use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task, Window};
  8use http_client::HttpClient;
  9use language_model::{
 10    ApiKeyState, AuthenticateError, EnvVar, IconOrSvg, LanguageModel, LanguageModelCompletionError,
 11    LanguageModelCompletionEvent, LanguageModelId, LanguageModelName, LanguageModelProvider,
 12    LanguageModelProviderId, LanguageModelProviderName, LanguageModelProviderState,
 13    LanguageModelRequest, LanguageModelToolChoice, LanguageModelToolResultContent,
 14    LanguageModelToolUse, MessageContent, RateLimiter, Role, StopReason, TokenUsage, env_var,
 15};
 16pub use settings::DeepseekAvailableModel as AvailableModel;
 17use settings::{Settings, SettingsStore};
 18use std::pin::Pin;
 19use std::str::FromStr;
 20use std::sync::{Arc, LazyLock};
 21
 22use ui::{ButtonLink, ConfiguredApiCard, List, ListBulletItem, prelude::*};
 23use ui_input::InputField;
 24use util::ResultExt;
 25
 26const PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("deepseek");
 27const PROVIDER_NAME: LanguageModelProviderName = LanguageModelProviderName::new("DeepSeek");
 28
 29const API_KEY_ENV_VAR_NAME: &str = "DEEPSEEK_API_KEY";
 30static API_KEY_ENV_VAR: LazyLock<EnvVar> = env_var!(API_KEY_ENV_VAR_NAME);
 31
 32#[derive(Default)]
 33struct RawToolCall {
 34    id: String,
 35    name: String,
 36    arguments: String,
 37}
 38
 39#[derive(Default, Clone, Debug, PartialEq)]
 40pub struct DeepSeekSettings {
 41    pub api_url: String,
 42    pub available_models: Vec<AvailableModel>,
 43}
 44pub struct DeepSeekLanguageModelProvider {
 45    http_client: Arc<dyn HttpClient>,
 46    state: Entity<State>,
 47}
 48
 49pub struct State {
 50    api_key_state: ApiKeyState,
 51}
 52
 53impl State {
 54    fn is_authenticated(&self) -> bool {
 55        self.api_key_state.has_key()
 56    }
 57
 58    fn set_api_key(&mut self, api_key: Option<String>, cx: &mut Context<Self>) -> Task<Result<()>> {
 59        let api_url = DeepSeekLanguageModelProvider::api_url(cx);
 60        self.api_key_state
 61            .store(api_url, api_key, |this| &mut this.api_key_state, cx)
 62    }
 63
 64    fn authenticate(&mut self, cx: &mut Context<Self>) -> Task<Result<(), AuthenticateError>> {
 65        let api_url = DeepSeekLanguageModelProvider::api_url(cx);
 66        self.api_key_state
 67            .load_if_needed(api_url, |this| &mut this.api_key_state, cx)
 68    }
 69}
 70
 71impl DeepSeekLanguageModelProvider {
 72    pub fn new(http_client: Arc<dyn HttpClient>, cx: &mut App) -> Self {
 73        let state = cx.new(|cx| {
 74            cx.observe_global::<SettingsStore>(|this: &mut State, cx| {
 75                let api_url = Self::api_url(cx);
 76                this.api_key_state
 77                    .handle_url_change(api_url, |this| &mut this.api_key_state, cx);
 78                cx.notify();
 79            })
 80            .detach();
 81            State {
 82                api_key_state: ApiKeyState::new(Self::api_url(cx), (*API_KEY_ENV_VAR).clone()),
 83            }
 84        });
 85
 86        Self { http_client, state }
 87    }
 88
 89    fn create_language_model(&self, model: deepseek::Model) -> Arc<dyn LanguageModel> {
 90        Arc::new(DeepSeekLanguageModel {
 91            id: LanguageModelId::from(model.id().to_string()),
 92            model,
 93            state: self.state.clone(),
 94            http_client: self.http_client.clone(),
 95            request_limiter: RateLimiter::new(4),
 96        })
 97    }
 98
 99    fn settings(cx: &App) -> &DeepSeekSettings {
100        &crate::AllLanguageModelSettings::get_global(cx).deepseek
101    }
102
103    fn api_url(cx: &App) -> SharedString {
104        let api_url = &Self::settings(cx).api_url;
105        if api_url.is_empty() {
106            DEEPSEEK_API_URL.into()
107        } else {
108            SharedString::new(api_url.as_str())
109        }
110    }
111}
112
113impl LanguageModelProviderState for DeepSeekLanguageModelProvider {
114    type ObservableEntity = State;
115
116    fn observable_entity(&self) -> Option<Entity<Self::ObservableEntity>> {
117        Some(self.state.clone())
118    }
119}
120
121impl LanguageModelProvider for DeepSeekLanguageModelProvider {
122    fn id(&self) -> LanguageModelProviderId {
123        PROVIDER_ID
124    }
125
126    fn name(&self) -> LanguageModelProviderName {
127        PROVIDER_NAME
128    }
129
130    fn icon(&self) -> IconOrSvg {
131        IconOrSvg::Icon(IconName::AiDeepSeek)
132    }
133
134    fn default_model(&self, _cx: &App) -> Option<Arc<dyn LanguageModel>> {
135        Some(self.create_language_model(deepseek::Model::default()))
136    }
137
138    fn default_fast_model(&self, _cx: &App) -> Option<Arc<dyn LanguageModel>> {
139        Some(self.create_language_model(deepseek::Model::default_fast()))
140    }
141
142    fn provided_models(&self, cx: &App) -> Vec<Arc<dyn LanguageModel>> {
143        let mut models = BTreeMap::default();
144
145        models.insert("deepseek-chat", deepseek::Model::Chat);
146        models.insert("deepseek-reasoner", deepseek::Model::Reasoner);
147
148        for available_model in &Self::settings(cx).available_models {
149            models.insert(
150                &available_model.name,
151                deepseek::Model::Custom {
152                    name: available_model.name.clone(),
153                    display_name: available_model.display_name.clone(),
154                    max_tokens: available_model.max_tokens,
155                    max_output_tokens: available_model.max_output_tokens,
156                },
157            );
158        }
159
160        models
161            .into_values()
162            .map(|model| self.create_language_model(model))
163            .collect()
164    }
165
166    fn is_authenticated(&self, cx: &App) -> bool {
167        self.state.read(cx).is_authenticated()
168    }
169
170    fn authenticate(&self, cx: &mut App) -> Task<Result<(), AuthenticateError>> {
171        self.state.update(cx, |state, cx| state.authenticate(cx))
172    }
173
174    fn configuration_view(
175        &self,
176        _target_agent: language_model::ConfigurationViewTargetAgent,
177        window: &mut Window,
178        cx: &mut App,
179    ) -> AnyView {
180        cx.new(|cx| ConfigurationView::new(self.state.clone(), window, cx))
181            .into()
182    }
183
184    fn reset_credentials(&self, cx: &mut App) -> Task<Result<()>> {
185        self.state
186            .update(cx, |state, cx| state.set_api_key(None, cx))
187    }
188}
189
190pub struct DeepSeekLanguageModel {
191    id: LanguageModelId,
192    model: deepseek::Model,
193    state: Entity<State>,
194    http_client: Arc<dyn HttpClient>,
195    request_limiter: RateLimiter,
196}
197
198impl DeepSeekLanguageModel {
199    fn stream_completion(
200        &self,
201        request: deepseek::Request,
202        cx: &AsyncApp,
203    ) -> BoxFuture<'static, Result<BoxStream<'static, Result<deepseek::StreamResponse>>>> {
204        let http_client = self.http_client.clone();
205
206        let (api_key, api_url) = self.state.read_with(cx, |state, cx| {
207            let api_url = DeepSeekLanguageModelProvider::api_url(cx);
208            (state.api_key_state.key(&api_url), api_url)
209        });
210
211        let future = self.request_limiter.stream(async move {
212            let Some(api_key) = api_key else {
213                return Err(LanguageModelCompletionError::NoApiKey {
214                    provider: PROVIDER_NAME,
215                });
216            };
217            let request =
218                deepseek::stream_completion(http_client.as_ref(), &api_url, &api_key, request);
219            let response = request.await?;
220            Ok(response)
221        });
222
223        async move { Ok(future.await?.boxed()) }.boxed()
224    }
225}
226
227impl LanguageModel for DeepSeekLanguageModel {
228    fn id(&self) -> LanguageModelId {
229        self.id.clone()
230    }
231
232    fn name(&self) -> LanguageModelName {
233        LanguageModelName::from(self.model.display_name().to_string())
234    }
235
236    fn provider_id(&self) -> LanguageModelProviderId {
237        PROVIDER_ID
238    }
239
240    fn provider_name(&self) -> LanguageModelProviderName {
241        PROVIDER_NAME
242    }
243
244    fn supports_tools(&self) -> bool {
245        true
246    }
247
248    fn supports_tool_choice(&self, _choice: LanguageModelToolChoice) -> bool {
249        true
250    }
251
252    fn supports_images(&self) -> bool {
253        false
254    }
255
256    fn telemetry_id(&self) -> String {
257        format!("deepseek/{}", self.model.id())
258    }
259
260    fn max_token_count(&self) -> u64 {
261        self.model.max_token_count()
262    }
263
264    fn max_output_tokens(&self) -> Option<u64> {
265        self.model.max_output_tokens()
266    }
267
268    fn count_tokens(
269        &self,
270        request: LanguageModelRequest,
271        cx: &App,
272    ) -> BoxFuture<'static, Result<u64>> {
273        cx.background_spawn(async move {
274            let messages = request
275                .messages
276                .into_iter()
277                .map(|message| tiktoken_rs::ChatCompletionRequestMessage {
278                    role: match message.role {
279                        Role::User => "user".into(),
280                        Role::Assistant => "assistant".into(),
281                        Role::System => "system".into(),
282                    },
283                    content: Some(message.string_contents()),
284                    name: None,
285                    function_call: None,
286                })
287                .collect::<Vec<_>>();
288
289            tiktoken_rs::num_tokens_from_messages("gpt-4", &messages).map(|tokens| tokens as u64)
290        })
291        .boxed()
292    }
293
294    fn stream_completion(
295        &self,
296        request: LanguageModelRequest,
297        cx: &AsyncApp,
298    ) -> BoxFuture<
299        'static,
300        Result<
301            BoxStream<'static, Result<LanguageModelCompletionEvent, LanguageModelCompletionError>>,
302            LanguageModelCompletionError,
303        >,
304    > {
305        let request = into_deepseek(request, &self.model, self.max_output_tokens());
306        let stream = self.stream_completion(request, cx);
307
308        async move {
309            let mapper = DeepSeekEventMapper::new();
310            Ok(mapper.map_stream(stream.await?).boxed())
311        }
312        .boxed()
313    }
314}
315
316pub fn into_deepseek(
317    request: LanguageModelRequest,
318    model: &deepseek::Model,
319    max_output_tokens: Option<u64>,
320) -> deepseek::Request {
321    let is_reasoner = model == &deepseek::Model::Reasoner;
322
323    let mut messages = Vec::new();
324    let mut current_reasoning: Option<String> = None;
325
326    for message in request.messages {
327        for content in message.content {
328            match content {
329                MessageContent::Text(text) => messages.push(match message.role {
330                    Role::User => deepseek::RequestMessage::User { content: text },
331                    Role::Assistant => deepseek::RequestMessage::Assistant {
332                        content: Some(text),
333                        tool_calls: Vec::new(),
334                        reasoning_content: current_reasoning.take(),
335                    },
336                    Role::System => deepseek::RequestMessage::System { content: text },
337                }),
338                MessageContent::Thinking { text, .. } => {
339                    // Accumulate reasoning content for next assistant message
340                    current_reasoning.get_or_insert_default().push_str(&text);
341                }
342                MessageContent::RedactedThinking(_) => {}
343                MessageContent::Image(_) => {}
344                MessageContent::ToolUse(tool_use) => {
345                    let tool_call = deepseek::ToolCall {
346                        id: tool_use.id.to_string(),
347                        content: deepseek::ToolCallContent::Function {
348                            function: deepseek::FunctionContent {
349                                name: tool_use.name.to_string(),
350                                arguments: serde_json::to_string(&tool_use.input)
351                                    .unwrap_or_default(),
352                            },
353                        },
354                    };
355
356                    if let Some(deepseek::RequestMessage::Assistant { tool_calls, .. }) =
357                        messages.last_mut()
358                    {
359                        tool_calls.push(tool_call);
360                    } else {
361                        messages.push(deepseek::RequestMessage::Assistant {
362                            content: None,
363                            tool_calls: vec![tool_call],
364                            reasoning_content: current_reasoning.take(),
365                        });
366                    }
367                }
368                MessageContent::ToolResult(tool_result) => {
369                    match &tool_result.content {
370                        LanguageModelToolResultContent::Text(text) => {
371                            messages.push(deepseek::RequestMessage::Tool {
372                                content: text.to_string(),
373                                tool_call_id: tool_result.tool_use_id.to_string(),
374                            });
375                        }
376                        LanguageModelToolResultContent::Image(_) => {}
377                    };
378                }
379            }
380        }
381    }
382
383    deepseek::Request {
384        model: model.id().to_string(),
385        messages,
386        stream: true,
387        max_tokens: max_output_tokens,
388        temperature: if is_reasoner {
389            None
390        } else {
391            request.temperature
392        },
393        response_format: None,
394        tools: request
395            .tools
396            .into_iter()
397            .map(|tool| deepseek::ToolDefinition::Function {
398                function: deepseek::FunctionDefinition {
399                    name: tool.name,
400                    description: Some(tool.description),
401                    parameters: Some(tool.input_schema),
402                },
403            })
404            .collect(),
405    }
406}
407
408pub struct DeepSeekEventMapper {
409    tool_calls_by_index: HashMap<usize, RawToolCall>,
410}
411
412impl DeepSeekEventMapper {
413    pub fn new() -> Self {
414        Self {
415            tool_calls_by_index: HashMap::default(),
416        }
417    }
418
419    pub fn map_stream(
420        mut self,
421        events: Pin<Box<dyn Send + Stream<Item = Result<deepseek::StreamResponse>>>>,
422    ) -> impl Stream<Item = Result<LanguageModelCompletionEvent, LanguageModelCompletionError>>
423    {
424        events.flat_map(move |event| {
425            futures::stream::iter(match event {
426                Ok(event) => self.map_event(event),
427                Err(error) => vec![Err(LanguageModelCompletionError::from(error))],
428            })
429        })
430    }
431
432    pub fn map_event(
433        &mut self,
434        event: deepseek::StreamResponse,
435    ) -> Vec<Result<LanguageModelCompletionEvent, LanguageModelCompletionError>> {
436        let Some(choice) = event.choices.first() else {
437            return vec![Err(LanguageModelCompletionError::from(anyhow!(
438                "Response contained no choices"
439            )))];
440        };
441
442        let mut events = Vec::new();
443        if let Some(content) = choice.delta.content.clone() {
444            events.push(Ok(LanguageModelCompletionEvent::Text(content)));
445        }
446
447        if let Some(reasoning_content) = choice.delta.reasoning_content.clone() {
448            events.push(Ok(LanguageModelCompletionEvent::Thinking {
449                text: reasoning_content,
450                signature: None,
451            }));
452        }
453
454        if let Some(tool_calls) = choice.delta.tool_calls.as_ref() {
455            for tool_call in tool_calls {
456                let entry = self.tool_calls_by_index.entry(tool_call.index).or_default();
457
458                if let Some(tool_id) = tool_call.id.clone() {
459                    entry.id = tool_id;
460                }
461
462                if let Some(function) = tool_call.function.as_ref() {
463                    if let Some(name) = function.name.clone() {
464                        entry.name = name;
465                    }
466
467                    if let Some(arguments) = function.arguments.clone() {
468                        entry.arguments.push_str(&arguments);
469                    }
470                }
471            }
472        }
473
474        if let Some(usage) = event.usage {
475            events.push(Ok(LanguageModelCompletionEvent::UsageUpdate(TokenUsage {
476                input_tokens: usage.prompt_tokens,
477                output_tokens: usage.completion_tokens,
478                cache_creation_input_tokens: 0,
479                cache_read_input_tokens: 0,
480            })));
481        }
482
483        match choice.finish_reason.as_deref() {
484            Some("stop") => {
485                events.push(Ok(LanguageModelCompletionEvent::Stop(StopReason::EndTurn)));
486            }
487            Some("tool_calls") => {
488                events.extend(self.tool_calls_by_index.drain().map(|(_, tool_call)| {
489                    match serde_json::Value::from_str(&tool_call.arguments) {
490                        Ok(input) => Ok(LanguageModelCompletionEvent::ToolUse(
491                            LanguageModelToolUse {
492                                id: tool_call.id.clone().into(),
493                                name: tool_call.name.as_str().into(),
494                                is_input_complete: true,
495                                input,
496                                raw_input: tool_call.arguments.clone(),
497                                thought_signature: None,
498                            },
499                        )),
500                        Err(error) => Ok(LanguageModelCompletionEvent::ToolUseJsonParseError {
501                            id: tool_call.id.clone().into(),
502                            tool_name: tool_call.name.as_str().into(),
503                            raw_input: tool_call.arguments.into(),
504                            json_parse_error: error.to_string(),
505                        }),
506                    }
507                }));
508
509                events.push(Ok(LanguageModelCompletionEvent::Stop(StopReason::ToolUse)));
510            }
511            Some(stop_reason) => {
512                log::error!("Unexpected DeepSeek stop_reason: {stop_reason:?}",);
513                events.push(Ok(LanguageModelCompletionEvent::Stop(StopReason::EndTurn)));
514            }
515            None => {}
516        }
517
518        events
519    }
520}
521
522struct ConfigurationView {
523    api_key_editor: Entity<InputField>,
524    state: Entity<State>,
525    load_credentials_task: Option<Task<()>>,
526}
527
528impl ConfigurationView {
529    fn new(state: Entity<State>, window: &mut Window, cx: &mut Context<Self>) -> Self {
530        let api_key_editor =
531            cx.new(|cx| InputField::new(window, cx, "sk-00000000000000000000000000000000"));
532
533        cx.observe(&state, |_, _, cx| {
534            cx.notify();
535        })
536        .detach();
537
538        let load_credentials_task = Some(cx.spawn({
539            let state = state.clone();
540            async move |this, cx| {
541                if let Some(task) = Some(state.update(cx, |state, cx| state.authenticate(cx))) {
542                    let _ = task.await;
543                }
544
545                this.update(cx, |this, cx| {
546                    this.load_credentials_task = None;
547                    cx.notify();
548                })
549                .log_err();
550            }
551        }));
552
553        Self {
554            api_key_editor,
555            state,
556            load_credentials_task,
557        }
558    }
559
560    fn save_api_key(&mut self, _: &menu::Confirm, _window: &mut Window, cx: &mut Context<Self>) {
561        let api_key = self.api_key_editor.read(cx).text(cx).trim().to_string();
562        if api_key.is_empty() {
563            return;
564        }
565
566        let state = self.state.clone();
567        cx.spawn(async move |_, cx| {
568            state
569                .update(cx, |state, cx| state.set_api_key(Some(api_key), cx))
570                .await
571        })
572        .detach_and_log_err(cx);
573    }
574
575    fn reset_api_key(&mut self, window: &mut Window, cx: &mut Context<Self>) {
576        self.api_key_editor
577            .update(cx, |editor, cx| editor.set_text("", window, cx));
578
579        let state = self.state.clone();
580        cx.spawn(async move |_, cx| {
581            state
582                .update(cx, |state, cx| state.set_api_key(None, cx))
583                .await
584        })
585        .detach_and_log_err(cx);
586    }
587
588    fn should_render_editor(&self, cx: &mut Context<Self>) -> bool {
589        !self.state.read(cx).is_authenticated()
590    }
591}
592
593impl Render for ConfigurationView {
594    fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
595        let env_var_set = self.state.read(cx).api_key_state.is_from_env_var();
596        let configured_card_label = if env_var_set {
597            format!("API key set in {API_KEY_ENV_VAR_NAME} environment variable")
598        } else {
599            let api_url = DeepSeekLanguageModelProvider::api_url(cx);
600            if api_url == DEEPSEEK_API_URL {
601                "API key configured".to_string()
602            } else {
603                format!("API key configured for {}", api_url)
604            }
605        };
606
607        if self.load_credentials_task.is_some() {
608            div()
609                .child(Label::new("Loading credentials..."))
610                .into_any_element()
611        } else if self.should_render_editor(cx) {
612            v_flex()
613                .size_full()
614                .on_action(cx.listener(Self::save_api_key))
615                .child(Label::new("To use DeepSeek in Zed, you need an API key:"))
616                .child(
617                    List::new()
618                        .child(
619                            ListBulletItem::new("")
620                                .child(Label::new("Get your API key from the"))
621                                .child(ButtonLink::new(
622                                    "DeepSeek console",
623                                    "https://platform.deepseek.com/api_keys",
624                                )),
625                        )
626                        .child(ListBulletItem::new(
627                            "Paste your API key below and hit enter to start using the assistant",
628                        )),
629                )
630                .child(self.api_key_editor.clone())
631                .child(
632                    Label::new(format!(
633                        "You can also set the {API_KEY_ENV_VAR_NAME} environment variable and restart Zed."
634                    ))
635                    .size(LabelSize::Small)
636                    .color(Color::Muted),
637                )
638                .into_any_element()
639        } else {
640            ConfiguredApiCard::new(configured_card_label)
641                .disabled(env_var_set)
642                .on_click(cx.listener(|this, _, window, cx| this.reset_api_key(window, cx)))
643                .into_any_element()
644        }
645    }
646}