1use super::{
2 breakpoint_store::BreakpointStore,
3 locators::DapLocator,
4 session::{self, Session, SessionStateEvent},
5};
6use crate::{
7 InlayHint, InlayHintLabel, ProjectEnvironment, ResolveState,
8 project_settings::ProjectSettings,
9 terminals::{SshCommand, wrap_for_ssh},
10 worktree_store::WorktreeStore,
11};
12use anyhow::{Result, anyhow};
13use async_trait::async_trait;
14use collections::HashMap;
15use dap::{
16 Capabilities, CompletionItem, CompletionsArguments, DapRegistry, EvaluateArguments,
17 EvaluateArgumentsContext, EvaluateResponse, RunInTerminalRequestArguments, Source,
18 StackFrameId, StartDebuggingRequestArguments,
19 adapters::{DapStatus, DebugAdapterBinary, DebugAdapterName, TcpArguments},
20 client::SessionId,
21 messages::Message,
22 requests::{Completions, Evaluate, Request as _, RunInTerminal, StartDebugging},
23};
24use fs::Fs;
25use futures::{
26 channel::mpsc,
27 future::{Shared, join_all},
28};
29use gpui::{App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Task};
30use http_client::HttpClient;
31use language::{
32 BinaryStatus, Buffer, LanguageRegistry, LanguageToolchainStore,
33 language_settings::InlayHintKind, range_from_lsp,
34};
35use lsp::LanguageServerName;
36use node_runtime::NodeRuntime;
37
38use remote::SshRemoteClient;
39use rpc::{
40 AnyProtoClient, TypedEnvelope,
41 proto::{self},
42};
43use serde_json::Value;
44use settings::{Settings, WorktreeId};
45use smol::{lock::Mutex, stream::StreamExt};
46use std::{
47 borrow::Borrow,
48 collections::{BTreeMap, HashSet},
49 ffi::OsStr,
50 net::Ipv4Addr,
51 path::{Path, PathBuf},
52 sync::Arc,
53};
54use task::{DebugTaskDefinition, DebugTaskTemplate};
55use util::ResultExt as _;
56use worktree::Worktree;
57
58pub enum DapStoreEvent {
59 DebugClientStarted(SessionId),
60 DebugSessionInitialized(SessionId),
61 DebugClientShutdown(SessionId),
62 DebugClientEvent {
63 session_id: SessionId,
64 message: Message,
65 },
66 RunInTerminal {
67 session_id: SessionId,
68 title: Option<String>,
69 cwd: Option<Arc<Path>>,
70 command: Option<String>,
71 args: Vec<String>,
72 envs: HashMap<String, String>,
73 sender: mpsc::Sender<Result<u32>>,
74 },
75 SpawnChildSession {
76 request: StartDebuggingRequestArguments,
77 parent_session: Entity<Session>,
78 },
79 Notification(String),
80 RemoteHasInitialized,
81}
82
83#[allow(clippy::large_enum_variant)]
84enum DapStoreMode {
85 Local(LocalDapStore),
86 Ssh(SshDapStore),
87 Collab,
88}
89
90pub struct LocalDapStore {
91 fs: Arc<dyn Fs>,
92 node_runtime: NodeRuntime,
93 http_client: Arc<dyn HttpClient>,
94 environment: Entity<ProjectEnvironment>,
95 language_registry: Arc<LanguageRegistry>,
96 toolchain_store: Arc<dyn LanguageToolchainStore>,
97 locators: HashMap<String, Arc<dyn DapLocator>>,
98}
99
100pub struct SshDapStore {
101 ssh_client: Entity<SshRemoteClient>,
102 upstream_client: AnyProtoClient,
103 upstream_project_id: u64,
104}
105
106pub struct DapStore {
107 mode: DapStoreMode,
108 downstream_client: Option<(AnyProtoClient, u64)>,
109 breakpoint_store: Entity<BreakpointStore>,
110 worktree_store: Entity<WorktreeStore>,
111 sessions: BTreeMap<SessionId, Entity<Session>>,
112 next_session_id: u32,
113 start_debugging_tx: futures::channel::mpsc::UnboundedSender<(SessionId, Message)>,
114 _start_debugging_task: Task<()>,
115}
116
117impl EventEmitter<DapStoreEvent> for DapStore {}
118
119impl DapStore {
120 pub fn init(client: &AnyProtoClient) {
121 client.add_entity_request_handler(Self::handle_run_debug_locator);
122 client.add_entity_request_handler(Self::handle_get_debug_adapter_binary);
123 }
124
125 #[expect(clippy::too_many_arguments)]
126 pub fn new_local(
127 http_client: Arc<dyn HttpClient>,
128 node_runtime: NodeRuntime,
129 fs: Arc<dyn Fs>,
130 language_registry: Arc<LanguageRegistry>,
131 environment: Entity<ProjectEnvironment>,
132 toolchain_store: Arc<dyn LanguageToolchainStore>,
133 worktree_store: Entity<WorktreeStore>,
134 breakpoint_store: Entity<BreakpointStore>,
135 cx: &mut Context<Self>,
136 ) -> Self {
137 let locators = HashMap::from_iter([(
138 "cargo".to_string(),
139 Arc::new(super::locators::cargo::CargoLocator {}) as _,
140 )]);
141
142 let mode = DapStoreMode::Local(LocalDapStore {
143 fs,
144 environment,
145 http_client,
146 node_runtime,
147 toolchain_store,
148 language_registry,
149 locators,
150 });
151
152 Self::new(mode, breakpoint_store, worktree_store, cx)
153 }
154
155 pub fn new_ssh(
156 project_id: u64,
157 ssh_client: Entity<SshRemoteClient>,
158 breakpoint_store: Entity<BreakpointStore>,
159 worktree_store: Entity<WorktreeStore>,
160 cx: &mut Context<Self>,
161 ) -> Self {
162 let mode = DapStoreMode::Ssh(SshDapStore {
163 upstream_client: ssh_client.read(cx).proto_client(),
164 ssh_client,
165 upstream_project_id: project_id,
166 });
167
168 Self::new(mode, breakpoint_store, worktree_store, cx)
169 }
170
171 pub fn new_collab(
172 _project_id: u64,
173 _upstream_client: AnyProtoClient,
174 breakpoint_store: Entity<BreakpointStore>,
175 worktree_store: Entity<WorktreeStore>,
176 cx: &mut Context<Self>,
177 ) -> Self {
178 Self::new(DapStoreMode::Collab, breakpoint_store, worktree_store, cx)
179 }
180
181 fn new(
182 mode: DapStoreMode,
183 breakpoint_store: Entity<BreakpointStore>,
184 worktree_store: Entity<WorktreeStore>,
185 cx: &mut Context<Self>,
186 ) -> Self {
187 let (start_debugging_tx, mut message_rx) =
188 futures::channel::mpsc::unbounded::<(SessionId, Message)>();
189 let task = cx.spawn(async move |this, cx| {
190 while let Some((session_id, message)) = message_rx.next().await {
191 match message {
192 Message::Request(request) => {
193 let _ = this
194 .update(cx, |this, cx| {
195 if request.command == StartDebugging::COMMAND {
196 this.handle_start_debugging_request(session_id, request, cx)
197 .detach_and_log_err(cx);
198 } else if request.command == RunInTerminal::COMMAND {
199 this.handle_run_in_terminal_request(session_id, request, cx)
200 .detach_and_log_err(cx);
201 }
202 })
203 .log_err();
204 }
205 _ => {}
206 }
207 }
208 });
209
210 Self {
211 mode,
212 _start_debugging_task: task,
213 start_debugging_tx,
214 next_session_id: 0,
215 downstream_client: None,
216 breakpoint_store,
217 worktree_store,
218 sessions: Default::default(),
219 }
220 }
221
222 pub fn get_debug_adapter_binary(
223 &mut self,
224 definition: DebugTaskDefinition,
225 cx: &mut Context<Self>,
226 ) -> Task<Result<DebugAdapterBinary>> {
227 match &self.mode {
228 DapStoreMode::Local(_) => {
229 let Some(worktree) = self.worktree_store.read(cx).visible_worktrees(cx).next()
230 else {
231 return Task::ready(Err(anyhow!("Failed to find a worktree")));
232 };
233 let Some(adapter) = DapRegistry::global(cx).adapter(&definition.adapter) else {
234 return Task::ready(Err(anyhow!("Failed to find a debug adapter")));
235 };
236
237 let user_installed_path = ProjectSettings::get_global(cx)
238 .dap
239 .get(&adapter.name())
240 .and_then(|s| s.binary.as_ref().map(PathBuf::from));
241
242 let delegate = self.delegate(&worktree, cx);
243 let cwd: Arc<Path> = definition
244 .cwd()
245 .unwrap_or(worktree.read(cx).abs_path().as_ref())
246 .into();
247
248 cx.spawn(async move |this, cx| {
249 let mut binary = adapter
250 .get_binary(&delegate, &definition, user_installed_path, cx)
251 .await?;
252
253 let env = this
254 .update(cx, |this, cx| {
255 this.as_local()
256 .unwrap()
257 .environment
258 .update(cx, |environment, cx| {
259 environment.get_directory_environment(cwd, cx)
260 })
261 })?
262 .await;
263
264 if let Some(mut env) = env {
265 env.extend(std::mem::take(&mut binary.envs));
266 binary.envs = env;
267 }
268
269 Ok(binary)
270 })
271 }
272 DapStoreMode::Ssh(ssh) => {
273 let request = ssh.upstream_client.request(proto::GetDebugAdapterBinary {
274 project_id: ssh.upstream_project_id,
275 task: Some(definition.to_proto()),
276 });
277 let ssh_client = ssh.ssh_client.clone();
278
279 cx.spawn(async move |_, cx| {
280 let response = request.await?;
281 let binary = DebugAdapterBinary::from_proto(response)?;
282 let mut ssh_command = ssh_client.update(cx, |ssh, _| {
283 anyhow::Ok(SshCommand {
284 arguments: ssh
285 .ssh_args()
286 .ok_or_else(|| anyhow!("SSH arguments not found"))?,
287 })
288 })??;
289
290 let mut connection = None;
291 if let Some(c) = binary.connection {
292 let local_bind_addr = Ipv4Addr::new(127, 0, 0, 1);
293 let port =
294 dap::transport::TcpTransport::unused_port(local_bind_addr).await?;
295
296 ssh_command.add_port_forwarding(port, c.host.to_string(), c.port);
297 connection = Some(TcpArguments {
298 port: c.port,
299 host: local_bind_addr,
300 timeout: c.timeout,
301 })
302 }
303
304 let (program, args) = wrap_for_ssh(
305 &ssh_command,
306 Some((&binary.command, &binary.arguments)),
307 binary.cwd.as_deref(),
308 binary.envs,
309 None,
310 );
311
312 Ok(DebugAdapterBinary {
313 command: program,
314 arguments: args,
315 envs: HashMap::default(),
316 cwd: None,
317 connection,
318 request_args: binary.request_args,
319 })
320 })
321 }
322 DapStoreMode::Collab => {
323 Task::ready(Err(anyhow!("Debugging is not yet supported via collab")))
324 }
325 }
326 }
327
328 pub fn run_debug_locator(
329 &mut self,
330 template: DebugTaskTemplate,
331 cx: &mut Context<Self>,
332 ) -> Task<Result<DebugTaskDefinition>> {
333 let Some(locator_name) = template.locator else {
334 return Task::ready(Ok(template.definition));
335 };
336
337 match &self.mode {
338 DapStoreMode::Local(local) => {
339 if let Some(locator) = local.locators.get(&locator_name).cloned() {
340 cx.background_spawn(
341 async move { locator.run_locator(template.definition).await },
342 )
343 } else {
344 Task::ready(Err(anyhow!("Couldn't find locator {}", locator_name)))
345 }
346 }
347 DapStoreMode::Ssh(ssh) => {
348 let request = ssh.upstream_client.request(proto::RunDebugLocator {
349 project_id: ssh.upstream_project_id,
350 locator: locator_name,
351 task: Some(template.definition.to_proto()),
352 });
353 cx.background_spawn(async move {
354 let response = request.await?;
355 DebugTaskDefinition::from_proto(response)
356 })
357 }
358 DapStoreMode::Collab => {
359 Task::ready(Err(anyhow!("Debugging is not yet supported via collab")))
360 }
361 }
362 }
363
364 fn as_local(&self) -> Option<&LocalDapStore> {
365 match &self.mode {
366 DapStoreMode::Local(local_dap_store) => Some(local_dap_store),
367 _ => None,
368 }
369 }
370
371 pub fn new_session(
372 &mut self,
373 template: DebugTaskDefinition,
374 parent_session: Option<Entity<Session>>,
375 cx: &mut Context<Self>,
376 ) -> Entity<Session> {
377 let session_id = SessionId(util::post_inc(&mut self.next_session_id));
378
379 if let Some(session) = &parent_session {
380 session.update(cx, |session, _| {
381 session.add_child_session_id(session_id);
382 });
383 }
384
385 let start_debugging_tx = self.start_debugging_tx.clone();
386
387 let session = Session::new(
388 self.breakpoint_store.clone(),
389 session_id,
390 parent_session,
391 template.clone(),
392 start_debugging_tx,
393 cx,
394 );
395
396 self.sessions.insert(session_id, session.clone());
397 cx.notify();
398
399 cx.subscribe(&session, {
400 move |this: &mut DapStore, _, event: &SessionStateEvent, cx| match event {
401 SessionStateEvent::Shutdown => {
402 this.shutdown_session(session_id, cx).detach_and_log_err(cx);
403 }
404 SessionStateEvent::Restart => {}
405 SessionStateEvent::Running => {
406 cx.emit(DapStoreEvent::DebugClientStarted(session_id));
407 }
408 }
409 })
410 .detach();
411
412 session
413 }
414
415 pub fn boot_session(
416 &self,
417 session: Entity<Session>,
418 cx: &mut Context<Self>,
419 ) -> Task<Result<()>> {
420 let Some(worktree) = self.worktree_store.read(cx).visible_worktrees(cx).next() else {
421 return Task::ready(Err(anyhow!("Failed to find a worktree")));
422 };
423
424 let dap_store = cx.weak_entity();
425 let breakpoint_store = self.breakpoint_store.clone();
426 let definition = session.read(cx).definition();
427
428 cx.spawn({
429 let session = session.clone();
430 async move |this, cx| {
431 let binary = this
432 .update(cx, |this, cx| {
433 this.get_debug_adapter_binary(definition.clone(), cx)
434 })?
435 .await?;
436
437 session
438 .update(cx, |session, cx| {
439 session.boot(binary, worktree, breakpoint_store, dap_store, cx)
440 })?
441 .await
442 }
443 })
444 }
445
446 pub fn session_by_id(
447 &self,
448 session_id: impl Borrow<SessionId>,
449 ) -> Option<Entity<session::Session>> {
450 let session_id = session_id.borrow();
451 let client = self.sessions.get(session_id).cloned();
452
453 client
454 }
455 pub fn sessions(&self) -> impl Iterator<Item = &Entity<Session>> {
456 self.sessions.values()
457 }
458
459 pub fn capabilities_by_id(
460 &self,
461 session_id: impl Borrow<SessionId>,
462 cx: &App,
463 ) -> Option<Capabilities> {
464 let session_id = session_id.borrow();
465 self.sessions
466 .get(session_id)
467 .map(|client| client.read(cx).capabilities.clone())
468 }
469
470 pub fn breakpoint_store(&self) -> &Entity<BreakpointStore> {
471 &self.breakpoint_store
472 }
473
474 pub fn worktree_store(&self) -> &Entity<WorktreeStore> {
475 &self.worktree_store
476 }
477
478 #[allow(dead_code)]
479 async fn handle_ignore_breakpoint_state(
480 this: Entity<Self>,
481 envelope: TypedEnvelope<proto::IgnoreBreakpointState>,
482 mut cx: AsyncApp,
483 ) -> Result<()> {
484 let session_id = SessionId::from_proto(envelope.payload.session_id);
485
486 this.update(&mut cx, |this, cx| {
487 if let Some(session) = this.session_by_id(&session_id) {
488 session.update(cx, |session, cx| {
489 session.set_ignore_breakpoints(envelope.payload.ignore, cx)
490 })
491 } else {
492 Task::ready(HashMap::default())
493 }
494 })?
495 .await;
496
497 Ok(())
498 }
499
500 fn delegate(&self, worktree: &Entity<Worktree>, cx: &mut App) -> DapAdapterDelegate {
501 let Some(local_store) = self.as_local() else {
502 unimplemented!("Starting session on remote side");
503 };
504
505 DapAdapterDelegate::new(
506 local_store.fs.clone(),
507 worktree.read(cx).id(),
508 local_store.node_runtime.clone(),
509 local_store.http_client.clone(),
510 local_store.language_registry.clone(),
511 local_store.toolchain_store.clone(),
512 local_store.environment.update(cx, |env, cx| {
513 env.get_worktree_environment(worktree.clone(), cx)
514 }),
515 )
516 }
517
518 fn handle_start_debugging_request(
519 &mut self,
520 session_id: SessionId,
521 request: dap::messages::Request,
522 cx: &mut Context<Self>,
523 ) -> Task<Result<()>> {
524 let Some(parent_session) = self.session_by_id(session_id) else {
525 return Task::ready(Err(anyhow!("Session not found")));
526 };
527 let request_seq = request.seq;
528
529 let launch_request: Option<Result<StartDebuggingRequestArguments, _>> = request
530 .arguments
531 .as_ref()
532 .map(|value| serde_json::from_value(value.clone()));
533
534 let mut success = true;
535 if let Some(Ok(request)) = launch_request {
536 cx.emit(DapStoreEvent::SpawnChildSession {
537 request,
538 parent_session: parent_session.clone(),
539 });
540 } else {
541 log::error!(
542 "Failed to parse launch request arguments: {:?}",
543 request.arguments
544 );
545 success = false;
546 }
547
548 cx.spawn(async move |_, cx| {
549 parent_session
550 .update(cx, |session, cx| {
551 session.respond_to_client(
552 request_seq,
553 success,
554 StartDebugging::COMMAND.to_string(),
555 None,
556 cx,
557 )
558 })?
559 .await
560 })
561 }
562
563 fn handle_run_in_terminal_request(
564 &mut self,
565 session_id: SessionId,
566 request: dap::messages::Request,
567 cx: &mut Context<Self>,
568 ) -> Task<Result<()>> {
569 let Some(session) = self.session_by_id(session_id) else {
570 return Task::ready(Err(anyhow!("Session not found")));
571 };
572
573 let request_args = serde_json::from_value::<RunInTerminalRequestArguments>(
574 request.arguments.unwrap_or_default(),
575 )
576 .expect("To parse StartDebuggingRequestArguments");
577
578 let seq = request.seq;
579
580 let cwd = Path::new(&request_args.cwd);
581
582 match cwd.try_exists() {
583 Ok(false) | Err(_) if !request_args.cwd.is_empty() => {
584 return session.update(cx, |session, cx| {
585 session.respond_to_client(
586 seq,
587 false,
588 RunInTerminal::COMMAND.to_string(),
589 serde_json::to_value(dap::ErrorResponse {
590 error: Some(dap::Message {
591 id: seq,
592 format: format!("Received invalid/unknown cwd: {cwd:?}"),
593 variables: None,
594 send_telemetry: None,
595 show_user: None,
596 url: None,
597 url_label: None,
598 }),
599 })
600 .ok(),
601 cx,
602 )
603 });
604 }
605 _ => (),
606 }
607 let mut args = request_args.args.clone();
608
609 // Handle special case for NodeJS debug adapter
610 // If only the Node binary path is provided, we set the command to None
611 // This prevents the NodeJS REPL from appearing, which is not the desired behavior
612 // The expected usage is for users to provide their own Node command, e.g., `node test.js`
613 // This allows the NodeJS debug client to attach correctly
614 let command = if args.len() > 1 {
615 Some(args.remove(0))
616 } else {
617 None
618 };
619
620 let mut envs: HashMap<String, String> = Default::default();
621 if let Some(Value::Object(env)) = request_args.env {
622 for (key, value) in env {
623 let value_str = match (key.as_str(), value) {
624 (_, Value::String(value)) => value,
625 _ => continue,
626 };
627
628 envs.insert(key, value_str);
629 }
630 }
631
632 let (tx, mut rx) = mpsc::channel::<Result<u32>>(1);
633 let cwd = Some(cwd)
634 .filter(|cwd| cwd.as_os_str().len() > 0)
635 .map(Arc::from)
636 .or_else(|| {
637 self.session_by_id(session_id)
638 .and_then(|session| session.read(cx).binary().cwd.as_deref().map(Arc::from))
639 });
640 cx.emit(DapStoreEvent::RunInTerminal {
641 session_id,
642 title: request_args.title,
643 cwd,
644 command,
645 args,
646 envs,
647 sender: tx,
648 });
649 cx.notify();
650
651 let session = session.downgrade();
652 cx.spawn(async move |_, cx| {
653 let (success, body) = match rx.next().await {
654 Some(Ok(pid)) => (
655 true,
656 serde_json::to_value(dap::RunInTerminalResponse {
657 process_id: None,
658 shell_process_id: Some(pid as u64),
659 })
660 .ok(),
661 ),
662 Some(Err(error)) => (
663 false,
664 serde_json::to_value(dap::ErrorResponse {
665 error: Some(dap::Message {
666 id: seq,
667 format: error.to_string(),
668 variables: None,
669 send_telemetry: None,
670 show_user: None,
671 url: None,
672 url_label: None,
673 }),
674 })
675 .ok(),
676 ),
677 None => (
678 false,
679 serde_json::to_value(dap::ErrorResponse {
680 error: Some(dap::Message {
681 id: seq,
682 format: "failed to receive response from spawn terminal".to_string(),
683 variables: None,
684 send_telemetry: None,
685 show_user: None,
686 url: None,
687 url_label: None,
688 }),
689 })
690 .ok(),
691 ),
692 };
693
694 session
695 .update(cx, |session, cx| {
696 session.respond_to_client(
697 seq,
698 success,
699 RunInTerminal::COMMAND.to_string(),
700 body,
701 cx,
702 )
703 })?
704 .await
705 })
706 }
707
708 pub fn evaluate(
709 &self,
710 session_id: &SessionId,
711 stack_frame_id: u64,
712 expression: String,
713 context: EvaluateArgumentsContext,
714 source: Option<Source>,
715 cx: &mut Context<Self>,
716 ) -> Task<Result<EvaluateResponse>> {
717 let Some(client) = self
718 .session_by_id(session_id)
719 .and_then(|client| client.read(cx).adapter_client())
720 else {
721 return Task::ready(Err(anyhow!("Could not find client: {:?}", session_id)));
722 };
723
724 cx.background_executor().spawn(async move {
725 client
726 .request::<Evaluate>(EvaluateArguments {
727 expression: expression.clone(),
728 frame_id: Some(stack_frame_id),
729 context: Some(context),
730 format: None,
731 line: None,
732 column: None,
733 source,
734 })
735 .await
736 })
737 }
738
739 pub fn completions(
740 &self,
741 session_id: &SessionId,
742 stack_frame_id: u64,
743 text: String,
744 completion_column: u64,
745 cx: &mut Context<Self>,
746 ) -> Task<Result<Vec<CompletionItem>>> {
747 let Some(client) = self
748 .session_by_id(session_id)
749 .and_then(|client| client.read(cx).adapter_client())
750 else {
751 return Task::ready(Err(anyhow!("Could not find client: {:?}", session_id)));
752 };
753
754 cx.background_executor().spawn(async move {
755 Ok(client
756 .request::<Completions>(CompletionsArguments {
757 frame_id: Some(stack_frame_id),
758 line: None,
759 text,
760 column: completion_column,
761 })
762 .await?
763 .targets)
764 })
765 }
766
767 pub fn resolve_inline_values(
768 &self,
769 session: Entity<Session>,
770 stack_frame_id: StackFrameId,
771 buffer_handle: Entity<Buffer>,
772 inline_values: Vec<lsp::InlineValue>,
773 cx: &mut Context<Self>,
774 ) -> Task<Result<Vec<InlayHint>>> {
775 let snapshot = buffer_handle.read(cx).snapshot();
776 let all_variables = session.read(cx).variables_by_stack_frame_id(stack_frame_id);
777
778 cx.spawn(async move |_, cx| {
779 let mut inlay_hints = Vec::with_capacity(inline_values.len());
780 for inline_value in inline_values.iter() {
781 match inline_value {
782 lsp::InlineValue::Text(text) => {
783 inlay_hints.push(InlayHint {
784 position: snapshot.anchor_after(range_from_lsp(text.range).end),
785 label: InlayHintLabel::String(format!(": {}", text.text)),
786 kind: Some(InlayHintKind::Type),
787 padding_left: false,
788 padding_right: false,
789 tooltip: None,
790 resolve_state: ResolveState::Resolved,
791 });
792 }
793 lsp::InlineValue::VariableLookup(variable_lookup) => {
794 let range = range_from_lsp(variable_lookup.range);
795
796 let mut variable_name = variable_lookup
797 .variable_name
798 .clone()
799 .unwrap_or_else(|| snapshot.text_for_range(range.clone()).collect());
800
801 if !variable_lookup.case_sensitive_lookup {
802 variable_name = variable_name.to_ascii_lowercase();
803 }
804
805 let Some(variable) = all_variables.iter().find(|variable| {
806 if variable_lookup.case_sensitive_lookup {
807 variable.name == variable_name
808 } else {
809 variable.name.to_ascii_lowercase() == variable_name
810 }
811 }) else {
812 continue;
813 };
814
815 inlay_hints.push(InlayHint {
816 position: snapshot.anchor_after(range.end),
817 label: InlayHintLabel::String(format!(": {}", variable.value)),
818 kind: Some(InlayHintKind::Type),
819 padding_left: false,
820 padding_right: false,
821 tooltip: None,
822 resolve_state: ResolveState::Resolved,
823 });
824 }
825 lsp::InlineValue::EvaluatableExpression(expression) => {
826 let range = range_from_lsp(expression.range);
827
828 let expression = expression
829 .expression
830 .clone()
831 .unwrap_or_else(|| snapshot.text_for_range(range.clone()).collect());
832
833 let Ok(eval_task) = session.update(cx, |session, cx| {
834 session.evaluate(
835 expression,
836 Some(EvaluateArgumentsContext::Variables),
837 Some(stack_frame_id),
838 None,
839 cx,
840 )
841 }) else {
842 continue;
843 };
844
845 if let Some(response) = eval_task.await {
846 inlay_hints.push(InlayHint {
847 position: snapshot.anchor_after(range.end),
848 label: InlayHintLabel::String(format!(": {}", response.result)),
849 kind: Some(InlayHintKind::Type),
850 padding_left: false,
851 padding_right: false,
852 tooltip: None,
853 resolve_state: ResolveState::Resolved,
854 });
855 };
856 }
857 };
858 }
859
860 Ok(inlay_hints)
861 })
862 }
863
864 pub fn shutdown_sessions(&mut self, cx: &mut Context<Self>) -> Task<()> {
865 let mut tasks = vec![];
866 for session_id in self.sessions.keys().cloned().collect::<Vec<_>>() {
867 tasks.push(self.shutdown_session(session_id, cx));
868 }
869
870 cx.background_executor().spawn(async move {
871 futures::future::join_all(tasks).await;
872 })
873 }
874
875 pub fn shutdown_session(
876 &mut self,
877 session_id: SessionId,
878 cx: &mut Context<Self>,
879 ) -> Task<Result<()>> {
880 let Some(session) = self.sessions.remove(&session_id) else {
881 return Task::ready(Err(anyhow!("Could not find session: {:?}", session_id)));
882 };
883
884 let shutdown_children = session
885 .read(cx)
886 .child_session_ids()
887 .iter()
888 .map(|session_id| self.shutdown_session(*session_id, cx))
889 .collect::<Vec<_>>();
890
891 let shutdown_parent_task = if let Some(parent_session) = session
892 .read(cx)
893 .parent_id(cx)
894 .and_then(|session_id| self.session_by_id(session_id))
895 {
896 let shutdown_id = parent_session.update(cx, |parent_session, _| {
897 parent_session.remove_child_session_id(session_id);
898
899 if parent_session.child_session_ids().len() == 0 {
900 Some(parent_session.session_id())
901 } else {
902 None
903 }
904 });
905
906 shutdown_id.map(|session_id| self.shutdown_session(session_id, cx))
907 } else {
908 None
909 };
910
911 let shutdown_task = session.update(cx, |this, cx| this.shutdown(cx));
912
913 cx.background_spawn(async move {
914 if shutdown_children.len() > 0 {
915 let _ = join_all(shutdown_children).await;
916 }
917
918 shutdown_task.await;
919
920 if let Some(parent_task) = shutdown_parent_task {
921 parent_task.await?;
922 }
923
924 Ok(())
925 })
926 }
927
928 pub fn shared(
929 &mut self,
930 project_id: u64,
931 downstream_client: AnyProtoClient,
932 _: &mut Context<Self>,
933 ) {
934 self.downstream_client = Some((downstream_client.clone(), project_id));
935 }
936
937 pub fn unshared(&mut self, cx: &mut Context<Self>) {
938 self.downstream_client.take();
939
940 cx.notify();
941 }
942
943 async fn handle_run_debug_locator(
944 this: Entity<Self>,
945 envelope: TypedEnvelope<proto::RunDebugLocator>,
946 mut cx: AsyncApp,
947 ) -> Result<proto::DebugTaskDefinition> {
948 let template = DebugTaskTemplate {
949 locator: Some(envelope.payload.locator),
950 definition: DebugTaskDefinition::from_proto(
951 envelope
952 .payload
953 .task
954 .ok_or_else(|| anyhow!("missing definition"))?,
955 )?,
956 };
957 let definition = this
958 .update(&mut cx, |this, cx| this.run_debug_locator(template, cx))?
959 .await?;
960 Ok(definition.to_proto())
961 }
962
963 async fn handle_get_debug_adapter_binary(
964 this: Entity<Self>,
965 envelope: TypedEnvelope<proto::GetDebugAdapterBinary>,
966 mut cx: AsyncApp,
967 ) -> Result<proto::DebugAdapterBinary> {
968 let definition = DebugTaskDefinition::from_proto(
969 envelope
970 .payload
971 .task
972 .ok_or_else(|| anyhow!("missing definition"))?,
973 )?;
974 let binary = this
975 .update(&mut cx, |this, cx| {
976 this.get_debug_adapter_binary(definition, cx)
977 })?
978 .await?;
979 Ok(binary.to_proto())
980 }
981}
982
983#[derive(Clone)]
984pub struct DapAdapterDelegate {
985 fs: Arc<dyn Fs>,
986 worktree_id: WorktreeId,
987 node_runtime: NodeRuntime,
988 http_client: Arc<dyn HttpClient>,
989 language_registry: Arc<LanguageRegistry>,
990 toolchain_store: Arc<dyn LanguageToolchainStore>,
991 updated_adapters: Arc<Mutex<HashSet<DebugAdapterName>>>,
992 load_shell_env_task: Shared<Task<Option<HashMap<String, String>>>>,
993}
994
995impl DapAdapterDelegate {
996 pub fn new(
997 fs: Arc<dyn Fs>,
998 worktree_id: WorktreeId,
999 node_runtime: NodeRuntime,
1000 http_client: Arc<dyn HttpClient>,
1001 language_registry: Arc<LanguageRegistry>,
1002 toolchain_store: Arc<dyn LanguageToolchainStore>,
1003 load_shell_env_task: Shared<Task<Option<HashMap<String, String>>>>,
1004 ) -> Self {
1005 Self {
1006 fs,
1007 worktree_id,
1008 http_client,
1009 node_runtime,
1010 toolchain_store,
1011 language_registry,
1012 load_shell_env_task,
1013 updated_adapters: Default::default(),
1014 }
1015 }
1016}
1017
1018#[async_trait(?Send)]
1019impl dap::adapters::DapDelegate for DapAdapterDelegate {
1020 fn worktree_id(&self) -> WorktreeId {
1021 self.worktree_id
1022 }
1023
1024 fn http_client(&self) -> Arc<dyn HttpClient> {
1025 self.http_client.clone()
1026 }
1027
1028 fn node_runtime(&self) -> NodeRuntime {
1029 self.node_runtime.clone()
1030 }
1031
1032 fn fs(&self) -> Arc<dyn Fs> {
1033 self.fs.clone()
1034 }
1035
1036 fn updated_adapters(&self) -> Arc<Mutex<HashSet<DebugAdapterName>>> {
1037 self.updated_adapters.clone()
1038 }
1039
1040 fn update_status(&self, dap_name: DebugAdapterName, status: dap::adapters::DapStatus) {
1041 let name = SharedString::from(dap_name.to_string());
1042 let status = match status {
1043 DapStatus::None => BinaryStatus::None,
1044 DapStatus::Downloading => BinaryStatus::Downloading,
1045 DapStatus::Failed { error } => BinaryStatus::Failed { error },
1046 DapStatus::CheckingForUpdate => BinaryStatus::CheckingForUpdate,
1047 };
1048
1049 self.language_registry
1050 .update_dap_status(LanguageServerName(name), status);
1051 }
1052
1053 fn which(&self, command: &OsStr) -> Option<PathBuf> {
1054 which::which(command).ok()
1055 }
1056
1057 async fn shell_env(&self) -> HashMap<String, String> {
1058 let task = self.load_shell_env_task.clone();
1059 task.await.unwrap_or_default()
1060 }
1061
1062 fn toolchain_store(&self) -> Arc<dyn LanguageToolchainStore> {
1063 self.toolchain_store.clone()
1064 }
1065}