deepseek.rs

  1use anyhow::{Result, anyhow};
  2use collections::{BTreeMap, HashMap};
  3use deepseek::DEEPSEEK_API_URL;
  4
  5use futures::Stream;
  6use futures::{FutureExt, StreamExt, future, future::BoxFuture, stream::BoxStream};
  7use gpui::{AnyView, App, AsyncApp, Context, Entity, SharedString, Task, Window};
  8use http_client::HttpClient;
  9use language_model::{
 10    ApiKeyState, AuthenticateError, EnvVar, LanguageModel, LanguageModelCompletionError,
 11    LanguageModelCompletionEvent, LanguageModelId, LanguageModelName, LanguageModelProvider,
 12    LanguageModelProviderId, LanguageModelProviderName, LanguageModelProviderState,
 13    LanguageModelRequest, LanguageModelToolChoice, LanguageModelToolResultContent,
 14    LanguageModelToolUse, MessageContent, RateLimiter, Role, StopReason, TokenUsage, env_var,
 15};
 16pub use settings::DeepseekAvailableModel as AvailableModel;
 17use settings::{Settings, SettingsStore};
 18use std::pin::Pin;
 19use std::str::FromStr;
 20use std::sync::{Arc, LazyLock};
 21
 22use ui::{ButtonLink, ConfiguredApiCard, List, ListBulletItem, prelude::*};
 23use ui_input::InputField;
 24use util::ResultExt;
 25
 26const PROVIDER_ID: LanguageModelProviderId = LanguageModelProviderId::new("deepseek");
 27const PROVIDER_NAME: LanguageModelProviderName = LanguageModelProviderName::new("DeepSeek");
 28
 29const API_KEY_ENV_VAR_NAME: &str = "DEEPSEEK_API_KEY";
 30static API_KEY_ENV_VAR: LazyLock<EnvVar> = env_var!(API_KEY_ENV_VAR_NAME);
 31
 32#[derive(Default)]
 33struct RawToolCall {
 34    id: String,
 35    name: String,
 36    arguments: String,
 37}
 38
 39#[derive(Default, Clone, Debug, PartialEq)]
 40pub struct DeepSeekSettings {
 41    pub api_url: String,
 42    pub available_models: Vec<AvailableModel>,
 43}
 44pub struct DeepSeekLanguageModelProvider {
 45    http_client: Arc<dyn HttpClient>,
 46    state: Entity<State>,
 47}
 48
 49pub struct State {
 50    api_key_state: ApiKeyState,
 51}
 52
 53impl State {
 54    fn is_authenticated(&self) -> bool {
 55        self.api_key_state.has_key()
 56    }
 57
 58    fn set_api_key(&mut self, api_key: Option<String>, cx: &mut Context<Self>) -> Task<Result<()>> {
 59        let api_url = DeepSeekLanguageModelProvider::api_url(cx);
 60        self.api_key_state
 61            .store(api_url, api_key, |this| &mut this.api_key_state, cx)
 62    }
 63
 64    fn authenticate(&mut self, cx: &mut Context<Self>) -> Task<Result<(), AuthenticateError>> {
 65        let api_url = DeepSeekLanguageModelProvider::api_url(cx);
 66        self.api_key_state
 67            .load_if_needed(api_url, |this| &mut this.api_key_state, cx)
 68    }
 69}
 70
 71impl DeepSeekLanguageModelProvider {
 72    pub fn new(http_client: Arc<dyn HttpClient>, cx: &mut App) -> Self {
 73        let state = cx.new(|cx| {
 74            cx.observe_global::<SettingsStore>(|this: &mut State, cx| {
 75                let api_url = Self::api_url(cx);
 76                this.api_key_state
 77                    .handle_url_change(api_url, |this| &mut this.api_key_state, cx);
 78                cx.notify();
 79            })
 80            .detach();
 81            State {
 82                api_key_state: ApiKeyState::new(Self::api_url(cx), (*API_KEY_ENV_VAR).clone()),
 83            }
 84        });
 85
 86        Self { http_client, state }
 87    }
 88
 89    fn create_language_model(&self, model: deepseek::Model) -> Arc<dyn LanguageModel> {
 90        Arc::new(DeepSeekLanguageModel {
 91            id: LanguageModelId::from(model.id().to_string()),
 92            model,
 93            state: self.state.clone(),
 94            http_client: self.http_client.clone(),
 95            request_limiter: RateLimiter::new(4),
 96        })
 97    }
 98
 99    fn settings(cx: &App) -> &DeepSeekSettings {
100        &crate::AllLanguageModelSettings::get_global(cx).deepseek
101    }
102
103    fn api_url(cx: &App) -> SharedString {
104        let api_url = &Self::settings(cx).api_url;
105        if api_url.is_empty() {
106            DEEPSEEK_API_URL.into()
107        } else {
108            SharedString::new(api_url.as_str())
109        }
110    }
111}
112
113impl LanguageModelProviderState for DeepSeekLanguageModelProvider {
114    type ObservableEntity = State;
115
116    fn observable_entity(&self) -> Option<Entity<Self::ObservableEntity>> {
117        Some(self.state.clone())
118    }
119}
120
121impl LanguageModelProvider for DeepSeekLanguageModelProvider {
122    fn id(&self) -> LanguageModelProviderId {
123        PROVIDER_ID
124    }
125
126    fn name(&self) -> LanguageModelProviderName {
127        PROVIDER_NAME
128    }
129
130    fn icon(&self) -> IconName {
131        IconName::AiDeepSeek
132    }
133
134    fn default_model(&self, _cx: &App) -> Option<Arc<dyn LanguageModel>> {
135        Some(self.create_language_model(deepseek::Model::default()))
136    }
137
138    fn default_fast_model(&self, _cx: &App) -> Option<Arc<dyn LanguageModel>> {
139        Some(self.create_language_model(deepseek::Model::default_fast()))
140    }
141
142    fn provided_models(&self, cx: &App) -> Vec<Arc<dyn LanguageModel>> {
143        let mut models = BTreeMap::default();
144
145        models.insert("deepseek-chat", deepseek::Model::Chat);
146        models.insert("deepseek-reasoner", deepseek::Model::Reasoner);
147
148        for available_model in &Self::settings(cx).available_models {
149            models.insert(
150                &available_model.name,
151                deepseek::Model::Custom {
152                    name: available_model.name.clone(),
153                    display_name: available_model.display_name.clone(),
154                    max_tokens: available_model.max_tokens,
155                    max_output_tokens: available_model.max_output_tokens,
156                },
157            );
158        }
159
160        models
161            .into_values()
162            .map(|model| self.create_language_model(model))
163            .collect()
164    }
165
166    fn is_authenticated(&self, cx: &App) -> bool {
167        self.state.read(cx).is_authenticated()
168    }
169
170    fn authenticate(&self, cx: &mut App) -> Task<Result<(), AuthenticateError>> {
171        self.state.update(cx, |state, cx| state.authenticate(cx))
172    }
173
174    fn configuration_view(
175        &self,
176        _target_agent: language_model::ConfigurationViewTargetAgent,
177        window: &mut Window,
178        cx: &mut App,
179    ) -> AnyView {
180        cx.new(|cx| ConfigurationView::new(self.state.clone(), window, cx))
181            .into()
182    }
183
184    fn reset_credentials(&self, cx: &mut App) -> Task<Result<()>> {
185        self.state
186            .update(cx, |state, cx| state.set_api_key(None, cx))
187    }
188}
189
190pub struct DeepSeekLanguageModel {
191    id: LanguageModelId,
192    model: deepseek::Model,
193    state: Entity<State>,
194    http_client: Arc<dyn HttpClient>,
195    request_limiter: RateLimiter,
196}
197
198impl DeepSeekLanguageModel {
199    fn stream_completion(
200        &self,
201        request: deepseek::Request,
202        cx: &AsyncApp,
203    ) -> BoxFuture<'static, Result<BoxStream<'static, Result<deepseek::StreamResponse>>>> {
204        let http_client = self.http_client.clone();
205
206        let Ok((api_key, api_url)) = self.state.read_with(cx, |state, cx| {
207            let api_url = DeepSeekLanguageModelProvider::api_url(cx);
208            (state.api_key_state.key(&api_url), api_url)
209        }) else {
210            return future::ready(Err(anyhow!("App state dropped"))).boxed();
211        };
212
213        let future = self.request_limiter.stream(async move {
214            let Some(api_key) = api_key else {
215                return Err(LanguageModelCompletionError::NoApiKey {
216                    provider: PROVIDER_NAME,
217                });
218            };
219            let request =
220                deepseek::stream_completion(http_client.as_ref(), &api_url, &api_key, request);
221            let response = request.await?;
222            Ok(response)
223        });
224
225        async move { Ok(future.await?.boxed()) }.boxed()
226    }
227}
228
229impl LanguageModel for DeepSeekLanguageModel {
230    fn id(&self) -> LanguageModelId {
231        self.id.clone()
232    }
233
234    fn name(&self) -> LanguageModelName {
235        LanguageModelName::from(self.model.display_name().to_string())
236    }
237
238    fn provider_id(&self) -> LanguageModelProviderId {
239        PROVIDER_ID
240    }
241
242    fn provider_name(&self) -> LanguageModelProviderName {
243        PROVIDER_NAME
244    }
245
246    fn supports_tools(&self) -> bool {
247        true
248    }
249
250    fn supports_tool_choice(&self, _choice: LanguageModelToolChoice) -> bool {
251        true
252    }
253
254    fn supports_images(&self) -> bool {
255        false
256    }
257
258    fn telemetry_id(&self) -> String {
259        format!("deepseek/{}", self.model.id())
260    }
261
262    fn max_token_count(&self) -> u64 {
263        self.model.max_token_count()
264    }
265
266    fn max_output_tokens(&self) -> Option<u64> {
267        self.model.max_output_tokens()
268    }
269
270    fn count_tokens(
271        &self,
272        request: LanguageModelRequest,
273        cx: &App,
274    ) -> BoxFuture<'static, Result<u64>> {
275        cx.background_spawn(async move {
276            let messages = request
277                .messages
278                .into_iter()
279                .map(|message| tiktoken_rs::ChatCompletionRequestMessage {
280                    role: match message.role {
281                        Role::User => "user".into(),
282                        Role::Assistant => "assistant".into(),
283                        Role::System => "system".into(),
284                    },
285                    content: Some(message.string_contents()),
286                    name: None,
287                    function_call: None,
288                })
289                .collect::<Vec<_>>();
290
291            tiktoken_rs::num_tokens_from_messages("gpt-4", &messages).map(|tokens| tokens as u64)
292        })
293        .boxed()
294    }
295
296    fn stream_completion(
297        &self,
298        request: LanguageModelRequest,
299        cx: &AsyncApp,
300    ) -> BoxFuture<
301        'static,
302        Result<
303            BoxStream<'static, Result<LanguageModelCompletionEvent, LanguageModelCompletionError>>,
304            LanguageModelCompletionError,
305        >,
306    > {
307        let request = into_deepseek(request, &self.model, self.max_output_tokens());
308        let stream = self.stream_completion(request, cx);
309
310        async move {
311            let mapper = DeepSeekEventMapper::new();
312            Ok(mapper.map_stream(stream.await?).boxed())
313        }
314        .boxed()
315    }
316}
317
318pub fn into_deepseek(
319    request: LanguageModelRequest,
320    model: &deepseek::Model,
321    max_output_tokens: Option<u64>,
322) -> deepseek::Request {
323    let is_reasoner = model == &deepseek::Model::Reasoner;
324
325    let mut messages = Vec::new();
326    let mut current_reasoning: Option<String> = None;
327
328    for message in request.messages {
329        for content in message.content {
330            match content {
331                MessageContent::Text(text) => messages.push(match message.role {
332                    Role::User => deepseek::RequestMessage::User { content: text },
333                    Role::Assistant => deepseek::RequestMessage::Assistant {
334                        content: Some(text),
335                        tool_calls: Vec::new(),
336                        reasoning_content: current_reasoning.take(),
337                    },
338                    Role::System => deepseek::RequestMessage::System { content: text },
339                }),
340                MessageContent::Thinking { text, .. } => {
341                    // Accumulate reasoning content for next assistant message
342                    current_reasoning.get_or_insert_default().push_str(&text);
343                }
344                MessageContent::RedactedThinking(_) => {}
345                MessageContent::Image(_) => {}
346                MessageContent::ToolUse(tool_use) => {
347                    let tool_call = deepseek::ToolCall {
348                        id: tool_use.id.to_string(),
349                        content: deepseek::ToolCallContent::Function {
350                            function: deepseek::FunctionContent {
351                                name: tool_use.name.to_string(),
352                                arguments: serde_json::to_string(&tool_use.input)
353                                    .unwrap_or_default(),
354                            },
355                        },
356                    };
357
358                    if let Some(deepseek::RequestMessage::Assistant { tool_calls, .. }) =
359                        messages.last_mut()
360                    {
361                        tool_calls.push(tool_call);
362                    } else {
363                        messages.push(deepseek::RequestMessage::Assistant {
364                            content: None,
365                            tool_calls: vec![tool_call],
366                            reasoning_content: current_reasoning.take(),
367                        });
368                    }
369                }
370                MessageContent::ToolResult(tool_result) => {
371                    match &tool_result.content {
372                        LanguageModelToolResultContent::Text(text) => {
373                            messages.push(deepseek::RequestMessage::Tool {
374                                content: text.to_string(),
375                                tool_call_id: tool_result.tool_use_id.to_string(),
376                            });
377                        }
378                        LanguageModelToolResultContent::Image(_) => {}
379                    };
380                }
381            }
382        }
383    }
384
385    deepseek::Request {
386        model: model.id().to_string(),
387        messages,
388        stream: true,
389        max_tokens: max_output_tokens,
390        temperature: if is_reasoner {
391            None
392        } else {
393            request.temperature
394        },
395        response_format: None,
396        tools: request
397            .tools
398            .into_iter()
399            .map(|tool| deepseek::ToolDefinition::Function {
400                function: deepseek::FunctionDefinition {
401                    name: tool.name,
402                    description: Some(tool.description),
403                    parameters: Some(tool.input_schema),
404                },
405            })
406            .collect(),
407    }
408}
409
410pub struct DeepSeekEventMapper {
411    tool_calls_by_index: HashMap<usize, RawToolCall>,
412}
413
414impl DeepSeekEventMapper {
415    pub fn new() -> Self {
416        Self {
417            tool_calls_by_index: HashMap::default(),
418        }
419    }
420
421    pub fn map_stream(
422        mut self,
423        events: Pin<Box<dyn Send + Stream<Item = Result<deepseek::StreamResponse>>>>,
424    ) -> impl Stream<Item = Result<LanguageModelCompletionEvent, LanguageModelCompletionError>>
425    {
426        events.flat_map(move |event| {
427            futures::stream::iter(match event {
428                Ok(event) => self.map_event(event),
429                Err(error) => vec![Err(LanguageModelCompletionError::from(error))],
430            })
431        })
432    }
433
434    pub fn map_event(
435        &mut self,
436        event: deepseek::StreamResponse,
437    ) -> Vec<Result<LanguageModelCompletionEvent, LanguageModelCompletionError>> {
438        let Some(choice) = event.choices.first() else {
439            return vec![Err(LanguageModelCompletionError::from(anyhow!(
440                "Response contained no choices"
441            )))];
442        };
443
444        let mut events = Vec::new();
445        if let Some(content) = choice.delta.content.clone() {
446            events.push(Ok(LanguageModelCompletionEvent::Text(content)));
447        }
448
449        if let Some(reasoning_content) = choice.delta.reasoning_content.clone() {
450            events.push(Ok(LanguageModelCompletionEvent::Thinking {
451                text: reasoning_content,
452                signature: None,
453            }));
454        }
455
456        if let Some(tool_calls) = choice.delta.tool_calls.as_ref() {
457            for tool_call in tool_calls {
458                let entry = self.tool_calls_by_index.entry(tool_call.index).or_default();
459
460                if let Some(tool_id) = tool_call.id.clone() {
461                    entry.id = tool_id;
462                }
463
464                if let Some(function) = tool_call.function.as_ref() {
465                    if let Some(name) = function.name.clone() {
466                        entry.name = name;
467                    }
468
469                    if let Some(arguments) = function.arguments.clone() {
470                        entry.arguments.push_str(&arguments);
471                    }
472                }
473            }
474        }
475
476        if let Some(usage) = event.usage {
477            events.push(Ok(LanguageModelCompletionEvent::UsageUpdate(TokenUsage {
478                input_tokens: usage.prompt_tokens,
479                output_tokens: usage.completion_tokens,
480                cache_creation_input_tokens: 0,
481                cache_read_input_tokens: 0,
482            })));
483        }
484
485        match choice.finish_reason.as_deref() {
486            Some("stop") => {
487                events.push(Ok(LanguageModelCompletionEvent::Stop(StopReason::EndTurn)));
488            }
489            Some("tool_calls") => {
490                events.extend(self.tool_calls_by_index.drain().map(|(_, tool_call)| {
491                    match serde_json::Value::from_str(&tool_call.arguments) {
492                        Ok(input) => Ok(LanguageModelCompletionEvent::ToolUse(
493                            LanguageModelToolUse {
494                                id: tool_call.id.clone().into(),
495                                name: tool_call.name.as_str().into(),
496                                is_input_complete: true,
497                                input,
498                                raw_input: tool_call.arguments.clone(),
499                                thought_signature: None,
500                            },
501                        )),
502                        Err(error) => Ok(LanguageModelCompletionEvent::ToolUseJsonParseError {
503                            id: tool_call.id.clone().into(),
504                            tool_name: tool_call.name.as_str().into(),
505                            raw_input: tool_call.arguments.into(),
506                            json_parse_error: error.to_string(),
507                        }),
508                    }
509                }));
510
511                events.push(Ok(LanguageModelCompletionEvent::Stop(StopReason::ToolUse)));
512            }
513            Some(stop_reason) => {
514                log::error!("Unexpected DeepSeek stop_reason: {stop_reason:?}",);
515                events.push(Ok(LanguageModelCompletionEvent::Stop(StopReason::EndTurn)));
516            }
517            None => {}
518        }
519
520        events
521    }
522}
523
524struct ConfigurationView {
525    api_key_editor: Entity<InputField>,
526    state: Entity<State>,
527    load_credentials_task: Option<Task<()>>,
528}
529
530impl ConfigurationView {
531    fn new(state: Entity<State>, window: &mut Window, cx: &mut Context<Self>) -> Self {
532        let api_key_editor =
533            cx.new(|cx| InputField::new(window, cx, "sk-00000000000000000000000000000000"));
534
535        cx.observe(&state, |_, _, cx| {
536            cx.notify();
537        })
538        .detach();
539
540        let load_credentials_task = Some(cx.spawn({
541            let state = state.clone();
542            async move |this, cx| {
543                if let Some(task) = state
544                    .update(cx, |state, cx| state.authenticate(cx))
545                    .log_err()
546                {
547                    let _ = task.await;
548                }
549
550                this.update(cx, |this, cx| {
551                    this.load_credentials_task = None;
552                    cx.notify();
553                })
554                .log_err();
555            }
556        }));
557
558        Self {
559            api_key_editor,
560            state,
561            load_credentials_task,
562        }
563    }
564
565    fn save_api_key(&mut self, _: &menu::Confirm, _window: &mut Window, cx: &mut Context<Self>) {
566        let api_key = self.api_key_editor.read(cx).text(cx).trim().to_string();
567        if api_key.is_empty() {
568            return;
569        }
570
571        let state = self.state.clone();
572        cx.spawn(async move |_, cx| {
573            state
574                .update(cx, |state, cx| state.set_api_key(Some(api_key), cx))?
575                .await
576        })
577        .detach_and_log_err(cx);
578    }
579
580    fn reset_api_key(&mut self, window: &mut Window, cx: &mut Context<Self>) {
581        self.api_key_editor
582            .update(cx, |editor, cx| editor.set_text("", window, cx));
583
584        let state = self.state.clone();
585        cx.spawn(async move |_, cx| {
586            state
587                .update(cx, |state, cx| state.set_api_key(None, cx))?
588                .await
589        })
590        .detach_and_log_err(cx);
591    }
592
593    fn should_render_editor(&self, cx: &mut Context<Self>) -> bool {
594        !self.state.read(cx).is_authenticated()
595    }
596}
597
598impl Render for ConfigurationView {
599    fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
600        let env_var_set = self.state.read(cx).api_key_state.is_from_env_var();
601        let configured_card_label = if env_var_set {
602            format!("API key set in {API_KEY_ENV_VAR_NAME} environment variable")
603        } else {
604            let api_url = DeepSeekLanguageModelProvider::api_url(cx);
605            if api_url == DEEPSEEK_API_URL {
606                "API key configured".to_string()
607            } else {
608                format!("API key configured for {}", api_url)
609            }
610        };
611
612        if self.load_credentials_task.is_some() {
613            div()
614                .child(Label::new("Loading credentials..."))
615                .into_any_element()
616        } else if self.should_render_editor(cx) {
617            v_flex()
618                .size_full()
619                .on_action(cx.listener(Self::save_api_key))
620                .child(Label::new("To use DeepSeek in Zed, you need an API key:"))
621                .child(
622                    List::new()
623                        .child(
624                            ListBulletItem::new("")
625                                .child(Label::new("Get your API key from the"))
626                                .child(ButtonLink::new(
627                                    "DeepSeek console",
628                                    "https://platform.deepseek.com/api_keys",
629                                )),
630                        )
631                        .child(ListBulletItem::new(
632                            "Paste your API key below and hit enter to start using the assistant",
633                        )),
634                )
635                .child(self.api_key_editor.clone())
636                .child(
637                    Label::new(format!(
638                        "Or set the {API_KEY_ENV_VAR_NAME} environment variable."
639                    ))
640                    .size(LabelSize::Small)
641                    .color(Color::Muted),
642                )
643                .into_any_element()
644        } else {
645            ConfiguredApiCard::new(configured_card_label)
646                .disabled(env_var_set)
647                .on_click(cx.listener(|this, _, window, cx| this.reset_api_key(window, cx)))
648                .into_any_element()
649        }
650    }
651}