1use super::{
2 breakpoint_store::BreakpointStore,
3 dap_command::EvaluateCommand,
4 locators,
5 session::{self, Session, SessionStateEvent},
6};
7use crate::{
8 InlayHint, InlayHintLabel, ProjectEnvironment, ResolveState,
9 project_settings::ProjectSettings,
10 terminals::{SshCommand, wrap_for_ssh},
11 worktree_store::WorktreeStore,
12};
13use anyhow::{Result, anyhow};
14use async_trait::async_trait;
15use collections::HashMap;
16use dap::{
17 Capabilities, CompletionItem, CompletionsArguments, DapRegistry, DebugRequest,
18 EvaluateArguments, EvaluateArgumentsContext, EvaluateResponse, RunInTerminalRequestArguments,
19 Source, StackFrameId, StartDebuggingRequestArguments,
20 adapters::{
21 DapStatus, DebugAdapterBinary, DebugAdapterName, DebugTaskDefinition, TcpArguments,
22 },
23 client::SessionId,
24 messages::Message,
25 requests::{Completions, Evaluate, Request as _, RunInTerminal, StartDebugging},
26};
27use fs::Fs;
28use futures::{
29 channel::mpsc,
30 future::{Shared, join_all},
31};
32use gpui::{App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Task};
33use http_client::HttpClient;
34use language::{
35 BinaryStatus, Buffer, LanguageRegistry, LanguageToolchainStore,
36 language_settings::InlayHintKind, range_from_lsp,
37};
38use lsp::LanguageServerName;
39use node_runtime::NodeRuntime;
40
41use remote::SshRemoteClient;
42use rpc::{
43 AnyProtoClient, TypedEnvelope,
44 proto::{self},
45};
46use serde_json::Value;
47use settings::{Settings, WorktreeId};
48use smol::{lock::Mutex, stream::StreamExt};
49use std::{
50 borrow::Borrow,
51 collections::{BTreeMap, HashSet},
52 ffi::OsStr,
53 net::Ipv4Addr,
54 path::{Path, PathBuf},
55 sync::{Arc, Once},
56};
57use task::{DebugScenario, SpawnInTerminal};
58use util::ResultExt as _;
59use worktree::Worktree;
60
61#[derive(Debug)]
62pub enum DapStoreEvent {
63 DebugClientStarted(SessionId),
64 DebugSessionInitialized(SessionId),
65 DebugClientShutdown(SessionId),
66 DebugClientEvent {
67 session_id: SessionId,
68 message: Message,
69 },
70 RunInTerminal {
71 session_id: SessionId,
72 title: Option<String>,
73 cwd: Option<Arc<Path>>,
74 command: Option<String>,
75 args: Vec<String>,
76 envs: HashMap<String, String>,
77 sender: mpsc::Sender<Result<u32>>,
78 },
79 SpawnChildSession {
80 request: StartDebuggingRequestArguments,
81 parent_session: Entity<Session>,
82 },
83 Notification(String),
84 RemoteHasInitialized,
85}
86
87#[allow(clippy::large_enum_variant)]
88enum DapStoreMode {
89 Local(LocalDapStore),
90 Ssh(SshDapStore),
91 Collab,
92}
93
94pub struct LocalDapStore {
95 fs: Arc<dyn Fs>,
96 node_runtime: NodeRuntime,
97 http_client: Arc<dyn HttpClient>,
98 environment: Entity<ProjectEnvironment>,
99 language_registry: Arc<LanguageRegistry>,
100 toolchain_store: Arc<dyn LanguageToolchainStore>,
101}
102
103pub struct SshDapStore {
104 ssh_client: Entity<SshRemoteClient>,
105 upstream_client: AnyProtoClient,
106 upstream_project_id: u64,
107}
108
109pub struct DapStore {
110 mode: DapStoreMode,
111 downstream_client: Option<(AnyProtoClient, u64)>,
112 breakpoint_store: Entity<BreakpointStore>,
113 worktree_store: Entity<WorktreeStore>,
114 sessions: BTreeMap<SessionId, Entity<Session>>,
115 next_session_id: u32,
116 start_debugging_tx: futures::channel::mpsc::UnboundedSender<(SessionId, Message)>,
117 _start_debugging_task: Task<()>,
118}
119
120impl EventEmitter<DapStoreEvent> for DapStore {}
121
122impl DapStore {
123 pub fn init(client: &AnyProtoClient, cx: &mut App) {
124 static ADD_LOCATORS: Once = Once::new();
125 client.add_entity_request_handler(Self::handle_run_debug_locator);
126 client.add_entity_request_handler(Self::handle_get_debug_adapter_binary);
127 ADD_LOCATORS.call_once(|| {
128 DapRegistry::global(cx)
129 .add_locator("cargo".into(), Arc::new(locators::cargo::CargoLocator {}))
130 });
131 }
132
133 #[expect(clippy::too_many_arguments)]
134 pub fn new_local(
135 http_client: Arc<dyn HttpClient>,
136 node_runtime: NodeRuntime,
137 fs: Arc<dyn Fs>,
138 language_registry: Arc<LanguageRegistry>,
139 environment: Entity<ProjectEnvironment>,
140 toolchain_store: Arc<dyn LanguageToolchainStore>,
141 worktree_store: Entity<WorktreeStore>,
142 breakpoint_store: Entity<BreakpointStore>,
143 cx: &mut Context<Self>,
144 ) -> Self {
145 let mode = DapStoreMode::Local(LocalDapStore {
146 fs,
147 environment,
148 http_client,
149 node_runtime,
150 toolchain_store,
151 language_registry,
152 });
153
154 Self::new(mode, breakpoint_store, worktree_store, cx)
155 }
156
157 pub fn new_ssh(
158 project_id: u64,
159 ssh_client: Entity<SshRemoteClient>,
160 breakpoint_store: Entity<BreakpointStore>,
161 worktree_store: Entity<WorktreeStore>,
162 cx: &mut Context<Self>,
163 ) -> Self {
164 let mode = DapStoreMode::Ssh(SshDapStore {
165 upstream_client: ssh_client.read(cx).proto_client(),
166 ssh_client,
167 upstream_project_id: project_id,
168 });
169
170 Self::new(mode, breakpoint_store, worktree_store, cx)
171 }
172
173 pub fn new_collab(
174 _project_id: u64,
175 _upstream_client: AnyProtoClient,
176 breakpoint_store: Entity<BreakpointStore>,
177 worktree_store: Entity<WorktreeStore>,
178 cx: &mut Context<Self>,
179 ) -> Self {
180 Self::new(DapStoreMode::Collab, breakpoint_store, worktree_store, cx)
181 }
182
183 fn new(
184 mode: DapStoreMode,
185 breakpoint_store: Entity<BreakpointStore>,
186 worktree_store: Entity<WorktreeStore>,
187 cx: &mut Context<Self>,
188 ) -> Self {
189 let (start_debugging_tx, mut message_rx) =
190 futures::channel::mpsc::unbounded::<(SessionId, Message)>();
191 let task = cx.spawn(async move |this, cx| {
192 while let Some((session_id, message)) = message_rx.next().await {
193 match message {
194 Message::Request(request) => {
195 let _ = this
196 .update(cx, |this, cx| {
197 if request.command == StartDebugging::COMMAND {
198 this.handle_start_debugging_request(session_id, request, cx)
199 .detach_and_log_err(cx);
200 } else if request.command == RunInTerminal::COMMAND {
201 this.handle_run_in_terminal_request(session_id, request, cx)
202 .detach_and_log_err(cx);
203 }
204 })
205 .log_err();
206 }
207 _ => {}
208 }
209 }
210 });
211
212 Self {
213 mode,
214 _start_debugging_task: task,
215 start_debugging_tx,
216 next_session_id: 0,
217 downstream_client: None,
218 breakpoint_store,
219 worktree_store,
220 sessions: Default::default(),
221 }
222 }
223
224 pub fn get_debug_adapter_binary(
225 &mut self,
226 definition: DebugTaskDefinition,
227 cx: &mut Context<Self>,
228 ) -> Task<Result<DebugAdapterBinary>> {
229 match &self.mode {
230 DapStoreMode::Local(_) => {
231 let Some(worktree) = self.worktree_store.read(cx).visible_worktrees(cx).next()
232 else {
233 return Task::ready(Err(anyhow!("Failed to find a worktree")));
234 };
235 let Some(adapter) = DapRegistry::global(cx).adapter(&definition.adapter) else {
236 return Task::ready(Err(anyhow!("Failed to find a debug adapter")));
237 };
238
239 let user_installed_path = ProjectSettings::get_global(cx)
240 .dap
241 .get(&adapter.name())
242 .and_then(|s| s.binary.as_ref().map(PathBuf::from));
243
244 let delegate = self.delegate(&worktree, cx);
245 let cwd: Arc<Path> = definition
246 .cwd()
247 .unwrap_or(worktree.read(cx).abs_path().as_ref())
248 .into();
249
250 cx.spawn(async move |this, cx| {
251 let mut binary = adapter
252 .get_binary(&delegate, &definition, user_installed_path, cx)
253 .await?;
254
255 let env = this
256 .update(cx, |this, cx| {
257 this.as_local()
258 .unwrap()
259 .environment
260 .update(cx, |environment, cx| {
261 environment.get_directory_environment(cwd, cx)
262 })
263 })?
264 .await;
265
266 if let Some(mut env) = env {
267 env.extend(std::mem::take(&mut binary.envs));
268 binary.envs = env;
269 }
270
271 Ok(binary)
272 })
273 }
274 DapStoreMode::Ssh(ssh) => {
275 let request = ssh.upstream_client.request(proto::GetDebugAdapterBinary {
276 project_id: ssh.upstream_project_id,
277 definition: Some(definition.to_proto()),
278 });
279 let ssh_client = ssh.ssh_client.clone();
280
281 cx.spawn(async move |_, cx| {
282 let response = request.await?;
283 let binary = DebugAdapterBinary::from_proto(response)?;
284 let mut ssh_command = ssh_client.update(cx, |ssh, _| {
285 anyhow::Ok(SshCommand {
286 arguments: ssh
287 .ssh_args()
288 .ok_or_else(|| anyhow!("SSH arguments not found"))?,
289 })
290 })??;
291
292 let mut connection = None;
293 if let Some(c) = binary.connection {
294 let local_bind_addr = Ipv4Addr::new(127, 0, 0, 1);
295 let port =
296 dap::transport::TcpTransport::unused_port(local_bind_addr).await?;
297
298 ssh_command.add_port_forwarding(port, c.host.to_string(), c.port);
299 connection = Some(TcpArguments {
300 port: c.port,
301 host: local_bind_addr,
302 timeout: c.timeout,
303 })
304 }
305
306 let (program, args) = wrap_for_ssh(
307 &ssh_command,
308 Some((&binary.command, &binary.arguments)),
309 binary.cwd.as_deref(),
310 binary.envs,
311 None,
312 );
313
314 Ok(DebugAdapterBinary {
315 command: program,
316 arguments: args,
317 envs: HashMap::default(),
318 cwd: None,
319 connection,
320 request_args: binary.request_args,
321 })
322 })
323 }
324 DapStoreMode::Collab => {
325 Task::ready(Err(anyhow!("Debugging is not yet supported via collab")))
326 }
327 }
328 }
329
330 pub fn debug_scenario_for_build_task(
331 &self,
332 mut build: SpawnInTerminal,
333 unresoved_label: SharedString,
334 adapter: SharedString,
335 cx: &mut App,
336 ) -> Option<DebugScenario> {
337 build.args = build
338 .args
339 .into_iter()
340 .map(|arg| {
341 if arg.starts_with("$") {
342 arg.strip_prefix("$")
343 .and_then(|arg| build.env.get(arg).map(ToOwned::to_owned))
344 .unwrap_or_else(|| arg)
345 } else {
346 arg
347 }
348 })
349 .collect();
350
351 DapRegistry::global(cx)
352 .locators()
353 .values()
354 .find(|locator| locator.accepts(&build))
355 .map(|_| DebugScenario {
356 adapter,
357 label: format!("Debug `{}`", build.label).into(),
358 build: Some(unresoved_label),
359 request: None,
360 initialize_args: None,
361 tcp_connection: None,
362 stop_on_entry: None,
363 })
364 }
365
366 pub fn run_debug_locator(
367 &mut self,
368 mut build_command: SpawnInTerminal,
369 cx: &mut Context<Self>,
370 ) -> Task<Result<DebugRequest>> {
371 match &self.mode {
372 DapStoreMode::Local(_) => {
373 // Pre-resolve args with existing environment.
374 build_command.args = build_command
375 .args
376 .into_iter()
377 .map(|arg| {
378 if arg.starts_with("$") {
379 arg.strip_prefix("$")
380 .and_then(|arg| build_command.env.get(arg).map(ToOwned::to_owned))
381 .unwrap_or_else(|| arg)
382 } else {
383 arg
384 }
385 })
386 .collect();
387 let locators = DapRegistry::global(cx)
388 .locators()
389 .values()
390 .filter(|locator| locator.accepts(&build_command))
391 .cloned()
392 .collect::<Vec<_>>();
393 if !locators.is_empty() {
394 cx.background_spawn(async move {
395 for locator in locators {
396 let result = locator
397 .run(build_command.clone())
398 .await
399 .log_with_level(log::Level::Error);
400 if let Some(result) = result {
401 return Ok(result);
402 }
403 }
404 Err(anyhow!(
405 "None of the locators for task `{}` completed successfully",
406 build_command.label
407 ))
408 })
409 } else {
410 Task::ready(Err(anyhow!(
411 "Couldn't find any locator for task `{}`. Specify the `attach` or `launch` arguments in your debug scenario definition",
412 build_command.label
413 )))
414 }
415 }
416 DapStoreMode::Ssh(ssh) => {
417 let request = ssh.upstream_client.request(proto::RunDebugLocators {
418 project_id: ssh.upstream_project_id,
419 build_command: Some(build_command.to_proto()),
420 });
421 cx.background_spawn(async move {
422 let response = request.await?;
423 DebugRequest::from_proto(response)
424 })
425 }
426 DapStoreMode::Collab => {
427 Task::ready(Err(anyhow!("Debugging is not yet supported via collab")))
428 }
429 }
430 }
431
432 fn as_local(&self) -> Option<&LocalDapStore> {
433 match &self.mode {
434 DapStoreMode::Local(local_dap_store) => Some(local_dap_store),
435 _ => None,
436 }
437 }
438
439 pub fn new_session(
440 &mut self,
441 template: DebugTaskDefinition,
442 parent_session: Option<Entity<Session>>,
443 cx: &mut Context<Self>,
444 ) -> Entity<Session> {
445 let session_id = SessionId(util::post_inc(&mut self.next_session_id));
446
447 if let Some(session) = &parent_session {
448 session.update(cx, |session, _| {
449 session.add_child_session_id(session_id);
450 });
451 }
452
453 let start_debugging_tx = self.start_debugging_tx.clone();
454
455 let session = Session::new(
456 self.breakpoint_store.clone(),
457 session_id,
458 parent_session,
459 template.clone(),
460 start_debugging_tx,
461 cx,
462 );
463
464 self.sessions.insert(session_id, session.clone());
465 cx.notify();
466
467 cx.subscribe(&session, {
468 move |this: &mut DapStore, _, event: &SessionStateEvent, cx| match event {
469 SessionStateEvent::Shutdown => {
470 this.shutdown_session(session_id, cx).detach_and_log_err(cx);
471 }
472 SessionStateEvent::Restart => {}
473 SessionStateEvent::Running => {
474 cx.emit(DapStoreEvent::DebugClientStarted(session_id));
475 }
476 }
477 })
478 .detach();
479
480 session
481 }
482
483 pub fn boot_session(
484 &self,
485 session: Entity<Session>,
486 cx: &mut Context<Self>,
487 ) -> Task<Result<()>> {
488 let Some(worktree) = self.worktree_store.read(cx).visible_worktrees(cx).next() else {
489 return Task::ready(Err(anyhow!("Failed to find a worktree")));
490 };
491
492 let dap_store = cx.weak_entity();
493 let breakpoint_store = self.breakpoint_store.clone();
494 let definition = session.read(cx).definition();
495
496 cx.spawn({
497 let session = session.clone();
498 async move |this, cx| {
499 let binary = this
500 .update(cx, |this, cx| {
501 this.get_debug_adapter_binary(definition.clone(), cx)
502 })?
503 .await?;
504
505 session
506 .update(cx, |session, cx| {
507 session.boot(binary, worktree, breakpoint_store, dap_store, cx)
508 })?
509 .await
510 }
511 })
512 }
513
514 pub fn session_by_id(
515 &self,
516 session_id: impl Borrow<SessionId>,
517 ) -> Option<Entity<session::Session>> {
518 let session_id = session_id.borrow();
519 let client = self.sessions.get(session_id).cloned();
520
521 client
522 }
523 pub fn sessions(&self) -> impl Iterator<Item = &Entity<Session>> {
524 self.sessions.values()
525 }
526
527 pub fn capabilities_by_id(
528 &self,
529 session_id: impl Borrow<SessionId>,
530 cx: &App,
531 ) -> Option<Capabilities> {
532 let session_id = session_id.borrow();
533 self.sessions
534 .get(session_id)
535 .map(|client| client.read(cx).capabilities.clone())
536 }
537
538 pub fn breakpoint_store(&self) -> &Entity<BreakpointStore> {
539 &self.breakpoint_store
540 }
541
542 pub fn worktree_store(&self) -> &Entity<WorktreeStore> {
543 &self.worktree_store
544 }
545
546 #[allow(dead_code)]
547 async fn handle_ignore_breakpoint_state(
548 this: Entity<Self>,
549 envelope: TypedEnvelope<proto::IgnoreBreakpointState>,
550 mut cx: AsyncApp,
551 ) -> Result<()> {
552 let session_id = SessionId::from_proto(envelope.payload.session_id);
553
554 this.update(&mut cx, |this, cx| {
555 if let Some(session) = this.session_by_id(&session_id) {
556 session.update(cx, |session, cx| {
557 session.set_ignore_breakpoints(envelope.payload.ignore, cx)
558 })
559 } else {
560 Task::ready(HashMap::default())
561 }
562 })?
563 .await;
564
565 Ok(())
566 }
567
568 fn delegate(&self, worktree: &Entity<Worktree>, cx: &mut App) -> DapAdapterDelegate {
569 let Some(local_store) = self.as_local() else {
570 unimplemented!("Starting session on remote side");
571 };
572
573 DapAdapterDelegate::new(
574 local_store.fs.clone(),
575 worktree.read(cx).id(),
576 local_store.node_runtime.clone(),
577 local_store.http_client.clone(),
578 local_store.language_registry.clone(),
579 local_store.toolchain_store.clone(),
580 local_store.environment.update(cx, |env, cx| {
581 env.get_worktree_environment(worktree.clone(), cx)
582 }),
583 )
584 }
585
586 fn handle_start_debugging_request(
587 &mut self,
588 session_id: SessionId,
589 request: dap::messages::Request,
590 cx: &mut Context<Self>,
591 ) -> Task<Result<()>> {
592 let Some(parent_session) = self.session_by_id(session_id) else {
593 return Task::ready(Err(anyhow!("Session not found")));
594 };
595 let request_seq = request.seq;
596
597 let launch_request: Option<Result<StartDebuggingRequestArguments, _>> = request
598 .arguments
599 .as_ref()
600 .map(|value| serde_json::from_value(value.clone()));
601
602 let mut success = true;
603 if let Some(Ok(request)) = launch_request {
604 cx.emit(DapStoreEvent::SpawnChildSession {
605 request,
606 parent_session: parent_session.clone(),
607 });
608 } else {
609 log::error!(
610 "Failed to parse launch request arguments: {:?}",
611 request.arguments
612 );
613 success = false;
614 }
615
616 cx.spawn(async move |_, cx| {
617 parent_session
618 .update(cx, |session, cx| {
619 session.respond_to_client(
620 request_seq,
621 success,
622 StartDebugging::COMMAND.to_string(),
623 None,
624 cx,
625 )
626 })?
627 .await
628 })
629 }
630
631 fn handle_run_in_terminal_request(
632 &mut self,
633 session_id: SessionId,
634 request: dap::messages::Request,
635 cx: &mut Context<Self>,
636 ) -> Task<Result<()>> {
637 let Some(session) = self.session_by_id(session_id) else {
638 return Task::ready(Err(anyhow!("Session not found")));
639 };
640
641 let request_args = serde_json::from_value::<RunInTerminalRequestArguments>(
642 request.arguments.unwrap_or_default(),
643 )
644 .expect("To parse StartDebuggingRequestArguments");
645
646 let seq = request.seq;
647
648 let cwd = Path::new(&request_args.cwd);
649
650 match cwd.try_exists() {
651 Ok(false) | Err(_) if !request_args.cwd.is_empty() => {
652 return session.update(cx, |session, cx| {
653 session.respond_to_client(
654 seq,
655 false,
656 RunInTerminal::COMMAND.to_string(),
657 serde_json::to_value(dap::ErrorResponse {
658 error: Some(dap::Message {
659 id: seq,
660 format: format!("Received invalid/unknown cwd: {cwd:?}"),
661 variables: None,
662 send_telemetry: None,
663 show_user: None,
664 url: None,
665 url_label: None,
666 }),
667 })
668 .ok(),
669 cx,
670 )
671 });
672 }
673 _ => (),
674 }
675 let mut args = request_args.args.clone();
676
677 // Handle special case for NodeJS debug adapter
678 // If only the Node binary path is provided, we set the command to None
679 // This prevents the NodeJS REPL from appearing, which is not the desired behavior
680 // The expected usage is for users to provide their own Node command, e.g., `node test.js`
681 // This allows the NodeJS debug client to attach correctly
682 let command = if args.len() > 1 {
683 Some(args.remove(0))
684 } else {
685 None
686 };
687
688 let mut envs: HashMap<String, String> = Default::default();
689 if let Some(Value::Object(env)) = request_args.env {
690 for (key, value) in env {
691 let value_str = match (key.as_str(), value) {
692 (_, Value::String(value)) => value,
693 _ => continue,
694 };
695
696 envs.insert(key, value_str);
697 }
698 }
699
700 let (tx, mut rx) = mpsc::channel::<Result<u32>>(1);
701 let cwd = Some(cwd)
702 .filter(|cwd| cwd.as_os_str().len() > 0)
703 .map(Arc::from)
704 .or_else(|| {
705 self.session_by_id(session_id)
706 .and_then(|session| session.read(cx).binary().cwd.as_deref().map(Arc::from))
707 });
708 cx.emit(DapStoreEvent::RunInTerminal {
709 session_id,
710 title: request_args.title,
711 cwd,
712 command,
713 args,
714 envs,
715 sender: tx,
716 });
717 cx.notify();
718
719 let session = session.downgrade();
720 cx.spawn(async move |_, cx| {
721 let (success, body) = match rx.next().await {
722 Some(Ok(pid)) => (
723 true,
724 serde_json::to_value(dap::RunInTerminalResponse {
725 process_id: None,
726 shell_process_id: Some(pid as u64),
727 })
728 .ok(),
729 ),
730 Some(Err(error)) => (
731 false,
732 serde_json::to_value(dap::ErrorResponse {
733 error: Some(dap::Message {
734 id: seq,
735 format: error.to_string(),
736 variables: None,
737 send_telemetry: None,
738 show_user: None,
739 url: None,
740 url_label: None,
741 }),
742 })
743 .ok(),
744 ),
745 None => (
746 false,
747 serde_json::to_value(dap::ErrorResponse {
748 error: Some(dap::Message {
749 id: seq,
750 format: "failed to receive response from spawn terminal".to_string(),
751 variables: None,
752 send_telemetry: None,
753 show_user: None,
754 url: None,
755 url_label: None,
756 }),
757 })
758 .ok(),
759 ),
760 };
761
762 session
763 .update(cx, |session, cx| {
764 session.respond_to_client(
765 seq,
766 success,
767 RunInTerminal::COMMAND.to_string(),
768 body,
769 cx,
770 )
771 })?
772 .await
773 })
774 }
775
776 pub fn evaluate(
777 &self,
778 session_id: &SessionId,
779 stack_frame_id: u64,
780 expression: String,
781 context: EvaluateArgumentsContext,
782 source: Option<Source>,
783 cx: &mut Context<Self>,
784 ) -> Task<Result<EvaluateResponse>> {
785 let Some(client) = self
786 .session_by_id(session_id)
787 .and_then(|client| client.read(cx).adapter_client())
788 else {
789 return Task::ready(Err(anyhow!("Could not find client: {:?}", session_id)));
790 };
791
792 cx.background_executor().spawn(async move {
793 client
794 .request::<Evaluate>(EvaluateArguments {
795 expression: expression.clone(),
796 frame_id: Some(stack_frame_id),
797 context: Some(context),
798 format: None,
799 line: None,
800 column: None,
801 source,
802 })
803 .await
804 })
805 }
806
807 pub fn completions(
808 &self,
809 session_id: &SessionId,
810 stack_frame_id: u64,
811 text: String,
812 completion_column: u64,
813 cx: &mut Context<Self>,
814 ) -> Task<Result<Vec<CompletionItem>>> {
815 let Some(client) = self
816 .session_by_id(session_id)
817 .and_then(|client| client.read(cx).adapter_client())
818 else {
819 return Task::ready(Err(anyhow!("Could not find client: {:?}", session_id)));
820 };
821
822 cx.background_executor().spawn(async move {
823 Ok(client
824 .request::<Completions>(CompletionsArguments {
825 frame_id: Some(stack_frame_id),
826 line: None,
827 text,
828 column: completion_column,
829 })
830 .await?
831 .targets)
832 })
833 }
834
835 pub fn resolve_inline_values(
836 &self,
837 session: Entity<Session>,
838 stack_frame_id: StackFrameId,
839 buffer_handle: Entity<Buffer>,
840 inline_values: Vec<lsp::InlineValue>,
841 cx: &mut Context<Self>,
842 ) -> Task<Result<Vec<InlayHint>>> {
843 let snapshot = buffer_handle.read(cx).snapshot();
844 let all_variables = session.read(cx).variables_by_stack_frame_id(stack_frame_id);
845
846 cx.spawn(async move |_, cx| {
847 let mut inlay_hints = Vec::with_capacity(inline_values.len());
848 for inline_value in inline_values.iter() {
849 match inline_value {
850 lsp::InlineValue::Text(text) => {
851 inlay_hints.push(InlayHint {
852 position: snapshot.anchor_after(range_from_lsp(text.range).end),
853 label: InlayHintLabel::String(format!(": {}", text.text)),
854 kind: Some(InlayHintKind::Type),
855 padding_left: false,
856 padding_right: false,
857 tooltip: None,
858 resolve_state: ResolveState::Resolved,
859 });
860 }
861 lsp::InlineValue::VariableLookup(variable_lookup) => {
862 let range = range_from_lsp(variable_lookup.range);
863
864 let mut variable_name = variable_lookup
865 .variable_name
866 .clone()
867 .unwrap_or_else(|| snapshot.text_for_range(range.clone()).collect());
868
869 if !variable_lookup.case_sensitive_lookup {
870 variable_name = variable_name.to_ascii_lowercase();
871 }
872
873 let Some(variable) = all_variables.iter().find(|variable| {
874 if variable_lookup.case_sensitive_lookup {
875 variable.name == variable_name
876 } else {
877 variable.name.to_ascii_lowercase() == variable_name
878 }
879 }) else {
880 continue;
881 };
882
883 inlay_hints.push(InlayHint {
884 position: snapshot.anchor_after(range.end),
885 label: InlayHintLabel::String(format!(": {}", variable.value)),
886 kind: Some(InlayHintKind::Type),
887 padding_left: false,
888 padding_right: false,
889 tooltip: None,
890 resolve_state: ResolveState::Resolved,
891 });
892 }
893 lsp::InlineValue::EvaluatableExpression(expression) => {
894 let range = range_from_lsp(expression.range);
895
896 let expression = expression
897 .expression
898 .clone()
899 .unwrap_or_else(|| snapshot.text_for_range(range.clone()).collect());
900
901 let Ok(eval_task) = session.update(cx, |session, _| {
902 session.mode.request_dap(EvaluateCommand {
903 expression,
904 frame_id: Some(stack_frame_id),
905 source: None,
906 context: Some(EvaluateArgumentsContext::Variables),
907 })
908 }) else {
909 continue;
910 };
911
912 if let Some(response) = eval_task.await.log_err() {
913 inlay_hints.push(InlayHint {
914 position: snapshot.anchor_after(range.end),
915 label: InlayHintLabel::String(format!(": {}", response.result)),
916 kind: Some(InlayHintKind::Type),
917 padding_left: false,
918 padding_right: false,
919 tooltip: None,
920 resolve_state: ResolveState::Resolved,
921 });
922 };
923 }
924 };
925 }
926
927 Ok(inlay_hints)
928 })
929 }
930
931 pub fn shutdown_sessions(&mut self, cx: &mut Context<Self>) -> Task<()> {
932 let mut tasks = vec![];
933 for session_id in self.sessions.keys().cloned().collect::<Vec<_>>() {
934 tasks.push(self.shutdown_session(session_id, cx));
935 }
936
937 cx.background_executor().spawn(async move {
938 futures::future::join_all(tasks).await;
939 })
940 }
941
942 pub fn shutdown_session(
943 &mut self,
944 session_id: SessionId,
945 cx: &mut Context<Self>,
946 ) -> Task<Result<()>> {
947 let Some(session) = self.sessions.remove(&session_id) else {
948 return Task::ready(Err(anyhow!("Could not find session: {:?}", session_id)));
949 };
950
951 let shutdown_children = session
952 .read(cx)
953 .child_session_ids()
954 .iter()
955 .map(|session_id| self.shutdown_session(*session_id, cx))
956 .collect::<Vec<_>>();
957
958 let shutdown_parent_task = if let Some(parent_session) = session
959 .read(cx)
960 .parent_id(cx)
961 .and_then(|session_id| self.session_by_id(session_id))
962 {
963 let shutdown_id = parent_session.update(cx, |parent_session, _| {
964 parent_session.remove_child_session_id(session_id);
965
966 if parent_session.child_session_ids().len() == 0 {
967 Some(parent_session.session_id())
968 } else {
969 None
970 }
971 });
972
973 shutdown_id.map(|session_id| self.shutdown_session(session_id, cx))
974 } else {
975 None
976 };
977
978 let shutdown_task = session.update(cx, |this, cx| this.shutdown(cx));
979
980 cx.background_spawn(async move {
981 if shutdown_children.len() > 0 {
982 let _ = join_all(shutdown_children).await;
983 }
984
985 shutdown_task.await;
986
987 if let Some(parent_task) = shutdown_parent_task {
988 parent_task.await?;
989 }
990
991 Ok(())
992 })
993 }
994
995 pub fn shared(
996 &mut self,
997 project_id: u64,
998 downstream_client: AnyProtoClient,
999 _: &mut Context<Self>,
1000 ) {
1001 self.downstream_client = Some((downstream_client.clone(), project_id));
1002 }
1003
1004 pub fn unshared(&mut self, cx: &mut Context<Self>) {
1005 self.downstream_client.take();
1006
1007 cx.notify();
1008 }
1009
1010 async fn handle_run_debug_locator(
1011 this: Entity<Self>,
1012 envelope: TypedEnvelope<proto::RunDebugLocators>,
1013 mut cx: AsyncApp,
1014 ) -> Result<proto::DebugRequest> {
1015 let task = envelope
1016 .payload
1017 .build_command
1018 .ok_or_else(|| anyhow!("missing definition"))?;
1019 let build_task = SpawnInTerminal::from_proto(task);
1020 let request = this
1021 .update(&mut cx, |this, cx| this.run_debug_locator(build_task, cx))?
1022 .await?;
1023
1024 Ok(request.to_proto())
1025 }
1026
1027 async fn handle_get_debug_adapter_binary(
1028 this: Entity<Self>,
1029 envelope: TypedEnvelope<proto::GetDebugAdapterBinary>,
1030 mut cx: AsyncApp,
1031 ) -> Result<proto::DebugAdapterBinary> {
1032 let definition = DebugTaskDefinition::from_proto(
1033 envelope
1034 .payload
1035 .definition
1036 .ok_or_else(|| anyhow!("missing definition"))?,
1037 )?;
1038 let binary = this
1039 .update(&mut cx, |this, cx| {
1040 this.get_debug_adapter_binary(definition, cx)
1041 })?
1042 .await?;
1043 Ok(binary.to_proto())
1044 }
1045}
1046
1047#[derive(Clone)]
1048pub struct DapAdapterDelegate {
1049 fs: Arc<dyn Fs>,
1050 worktree_id: WorktreeId,
1051 node_runtime: NodeRuntime,
1052 http_client: Arc<dyn HttpClient>,
1053 language_registry: Arc<LanguageRegistry>,
1054 toolchain_store: Arc<dyn LanguageToolchainStore>,
1055 updated_adapters: Arc<Mutex<HashSet<DebugAdapterName>>>,
1056 load_shell_env_task: Shared<Task<Option<HashMap<String, String>>>>,
1057}
1058
1059impl DapAdapterDelegate {
1060 pub fn new(
1061 fs: Arc<dyn Fs>,
1062 worktree_id: WorktreeId,
1063 node_runtime: NodeRuntime,
1064 http_client: Arc<dyn HttpClient>,
1065 language_registry: Arc<LanguageRegistry>,
1066 toolchain_store: Arc<dyn LanguageToolchainStore>,
1067 load_shell_env_task: Shared<Task<Option<HashMap<String, String>>>>,
1068 ) -> Self {
1069 Self {
1070 fs,
1071 worktree_id,
1072 http_client,
1073 node_runtime,
1074 toolchain_store,
1075 language_registry,
1076 load_shell_env_task,
1077 updated_adapters: Default::default(),
1078 }
1079 }
1080}
1081
1082#[async_trait(?Send)]
1083impl dap::adapters::DapDelegate for DapAdapterDelegate {
1084 fn worktree_id(&self) -> WorktreeId {
1085 self.worktree_id
1086 }
1087
1088 fn http_client(&self) -> Arc<dyn HttpClient> {
1089 self.http_client.clone()
1090 }
1091
1092 fn node_runtime(&self) -> NodeRuntime {
1093 self.node_runtime.clone()
1094 }
1095
1096 fn fs(&self) -> Arc<dyn Fs> {
1097 self.fs.clone()
1098 }
1099
1100 fn updated_adapters(&self) -> Arc<Mutex<HashSet<DebugAdapterName>>> {
1101 self.updated_adapters.clone()
1102 }
1103
1104 fn update_status(&self, dap_name: DebugAdapterName, status: dap::adapters::DapStatus) {
1105 let name = SharedString::from(dap_name.to_string());
1106 let status = match status {
1107 DapStatus::None => BinaryStatus::None,
1108 DapStatus::Downloading => BinaryStatus::Downloading,
1109 DapStatus::Failed { error } => BinaryStatus::Failed { error },
1110 DapStatus::CheckingForUpdate => BinaryStatus::CheckingForUpdate,
1111 };
1112
1113 self.language_registry
1114 .update_dap_status(LanguageServerName(name), status);
1115 }
1116
1117 fn which(&self, command: &OsStr) -> Option<PathBuf> {
1118 which::which(command).ok()
1119 }
1120
1121 async fn shell_env(&self) -> HashMap<String, String> {
1122 let task = self.load_shell_env_task.clone();
1123 task.await.unwrap_or_default()
1124 }
1125
1126 fn toolchain_store(&self) -> Arc<dyn LanguageToolchainStore> {
1127 self.toolchain_store.clone()
1128 }
1129}