1use super::{
2 breakpoint_store::BreakpointStore,
3 locators,
4 session::{self, Session, SessionStateEvent},
5};
6use crate::{
7 InlayHint, InlayHintLabel, ProjectEnvironment, ResolveState,
8 project_settings::ProjectSettings,
9 terminals::{SshCommand, wrap_for_ssh},
10 worktree_store::WorktreeStore,
11};
12use anyhow::{Result, anyhow};
13use async_trait::async_trait;
14use collections::HashMap;
15use dap::{
16 Capabilities, CompletionItem, CompletionsArguments, DapRegistry, DebugRequest,
17 EvaluateArguments, EvaluateArgumentsContext, EvaluateResponse, RunInTerminalRequestArguments,
18 Source, StackFrameId, StartDebuggingRequestArguments,
19 adapters::{
20 DapStatus, DebugAdapterBinary, DebugAdapterName, DebugTaskDefinition, TcpArguments,
21 },
22 client::SessionId,
23 messages::Message,
24 requests::{Completions, Evaluate, Request as _, RunInTerminal, StartDebugging},
25};
26use fs::Fs;
27use futures::{
28 channel::mpsc,
29 future::{Shared, join_all},
30};
31use gpui::{App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Task};
32use http_client::HttpClient;
33use language::{
34 BinaryStatus, Buffer, LanguageRegistry, LanguageToolchainStore,
35 language_settings::InlayHintKind, range_from_lsp,
36};
37use lsp::LanguageServerName;
38use node_runtime::NodeRuntime;
39
40use remote::SshRemoteClient;
41use rpc::{
42 AnyProtoClient, TypedEnvelope,
43 proto::{self},
44};
45use serde_json::Value;
46use settings::{Settings, WorktreeId};
47use smol::{lock::Mutex, stream::StreamExt};
48use std::{
49 borrow::Borrow,
50 collections::{BTreeMap, HashSet},
51 ffi::OsStr,
52 net::Ipv4Addr,
53 path::{Path, PathBuf},
54 sync::{Arc, Once},
55};
56use task::{DebugScenario, SpawnInTerminal};
57use util::ResultExt as _;
58use worktree::Worktree;
59
60#[derive(Debug)]
61pub enum DapStoreEvent {
62 DebugClientStarted(SessionId),
63 DebugSessionInitialized(SessionId),
64 DebugClientShutdown(SessionId),
65 DebugClientEvent {
66 session_id: SessionId,
67 message: Message,
68 },
69 RunInTerminal {
70 session_id: SessionId,
71 title: Option<String>,
72 cwd: Option<Arc<Path>>,
73 command: Option<String>,
74 args: Vec<String>,
75 envs: HashMap<String, String>,
76 sender: mpsc::Sender<Result<u32>>,
77 },
78 SpawnChildSession {
79 request: StartDebuggingRequestArguments,
80 parent_session: Entity<Session>,
81 },
82 Notification(String),
83 RemoteHasInitialized,
84}
85
86#[allow(clippy::large_enum_variant)]
87enum DapStoreMode {
88 Local(LocalDapStore),
89 Ssh(SshDapStore),
90 Collab,
91}
92
93pub struct LocalDapStore {
94 fs: Arc<dyn Fs>,
95 node_runtime: NodeRuntime,
96 http_client: Arc<dyn HttpClient>,
97 environment: Entity<ProjectEnvironment>,
98 language_registry: Arc<LanguageRegistry>,
99 toolchain_store: Arc<dyn LanguageToolchainStore>,
100}
101
102pub struct SshDapStore {
103 ssh_client: Entity<SshRemoteClient>,
104 upstream_client: AnyProtoClient,
105 upstream_project_id: u64,
106}
107
108pub struct DapStore {
109 mode: DapStoreMode,
110 downstream_client: Option<(AnyProtoClient, u64)>,
111 breakpoint_store: Entity<BreakpointStore>,
112 worktree_store: Entity<WorktreeStore>,
113 sessions: BTreeMap<SessionId, Entity<Session>>,
114 next_session_id: u32,
115 start_debugging_tx: futures::channel::mpsc::UnboundedSender<(SessionId, Message)>,
116 _start_debugging_task: Task<()>,
117}
118
119impl EventEmitter<DapStoreEvent> for DapStore {}
120
121impl DapStore {
122 pub fn init(client: &AnyProtoClient, cx: &mut App) {
123 static ADD_LOCATORS: Once = Once::new();
124 client.add_entity_request_handler(Self::handle_run_debug_locator);
125 client.add_entity_request_handler(Self::handle_get_debug_adapter_binary);
126 ADD_LOCATORS.call_once(|| {
127 DapRegistry::global(cx)
128 .add_locator("cargo".into(), Arc::new(locators::cargo::CargoLocator {}))
129 });
130 }
131
132 #[expect(clippy::too_many_arguments)]
133 pub fn new_local(
134 http_client: Arc<dyn HttpClient>,
135 node_runtime: NodeRuntime,
136 fs: Arc<dyn Fs>,
137 language_registry: Arc<LanguageRegistry>,
138 environment: Entity<ProjectEnvironment>,
139 toolchain_store: Arc<dyn LanguageToolchainStore>,
140 worktree_store: Entity<WorktreeStore>,
141 breakpoint_store: Entity<BreakpointStore>,
142 cx: &mut Context<Self>,
143 ) -> Self {
144 let mode = DapStoreMode::Local(LocalDapStore {
145 fs,
146 environment,
147 http_client,
148 node_runtime,
149 toolchain_store,
150 language_registry,
151 });
152
153 Self::new(mode, breakpoint_store, worktree_store, cx)
154 }
155
156 pub fn new_ssh(
157 project_id: u64,
158 ssh_client: Entity<SshRemoteClient>,
159 breakpoint_store: Entity<BreakpointStore>,
160 worktree_store: Entity<WorktreeStore>,
161 cx: &mut Context<Self>,
162 ) -> Self {
163 let mode = DapStoreMode::Ssh(SshDapStore {
164 upstream_client: ssh_client.read(cx).proto_client(),
165 ssh_client,
166 upstream_project_id: project_id,
167 });
168
169 Self::new(mode, breakpoint_store, worktree_store, cx)
170 }
171
172 pub fn new_collab(
173 _project_id: u64,
174 _upstream_client: AnyProtoClient,
175 breakpoint_store: Entity<BreakpointStore>,
176 worktree_store: Entity<WorktreeStore>,
177 cx: &mut Context<Self>,
178 ) -> Self {
179 Self::new(DapStoreMode::Collab, breakpoint_store, worktree_store, cx)
180 }
181
182 fn new(
183 mode: DapStoreMode,
184 breakpoint_store: Entity<BreakpointStore>,
185 worktree_store: Entity<WorktreeStore>,
186 cx: &mut Context<Self>,
187 ) -> Self {
188 let (start_debugging_tx, mut message_rx) =
189 futures::channel::mpsc::unbounded::<(SessionId, Message)>();
190 let task = cx.spawn(async move |this, cx| {
191 while let Some((session_id, message)) = message_rx.next().await {
192 match message {
193 Message::Request(request) => {
194 let _ = this
195 .update(cx, |this, cx| {
196 if request.command == StartDebugging::COMMAND {
197 this.handle_start_debugging_request(session_id, request, cx)
198 .detach_and_log_err(cx);
199 } else if request.command == RunInTerminal::COMMAND {
200 this.handle_run_in_terminal_request(session_id, request, cx)
201 .detach_and_log_err(cx);
202 }
203 })
204 .log_err();
205 }
206 _ => {}
207 }
208 }
209 });
210
211 Self {
212 mode,
213 _start_debugging_task: task,
214 start_debugging_tx,
215 next_session_id: 0,
216 downstream_client: None,
217 breakpoint_store,
218 worktree_store,
219 sessions: Default::default(),
220 }
221 }
222
223 pub fn get_debug_adapter_binary(
224 &mut self,
225 definition: DebugTaskDefinition,
226 cx: &mut Context<Self>,
227 ) -> Task<Result<DebugAdapterBinary>> {
228 match &self.mode {
229 DapStoreMode::Local(_) => {
230 let Some(worktree) = self.worktree_store.read(cx).visible_worktrees(cx).next()
231 else {
232 return Task::ready(Err(anyhow!("Failed to find a worktree")));
233 };
234 let Some(adapter) = DapRegistry::global(cx).adapter(&definition.adapter) else {
235 return Task::ready(Err(anyhow!("Failed to find a debug adapter")));
236 };
237
238 let user_installed_path = ProjectSettings::get_global(cx)
239 .dap
240 .get(&adapter.name())
241 .and_then(|s| s.binary.as_ref().map(PathBuf::from));
242
243 let delegate = self.delegate(&worktree, cx);
244 let cwd: Arc<Path> = definition
245 .cwd()
246 .unwrap_or(worktree.read(cx).abs_path().as_ref())
247 .into();
248
249 cx.spawn(async move |this, cx| {
250 let mut binary = adapter
251 .get_binary(&delegate, &definition, user_installed_path, cx)
252 .await?;
253
254 let env = this
255 .update(cx, |this, cx| {
256 this.as_local()
257 .unwrap()
258 .environment
259 .update(cx, |environment, cx| {
260 environment.get_directory_environment(cwd, cx)
261 })
262 })?
263 .await;
264
265 if let Some(mut env) = env {
266 env.extend(std::mem::take(&mut binary.envs));
267 binary.envs = env;
268 }
269
270 Ok(binary)
271 })
272 }
273 DapStoreMode::Ssh(ssh) => {
274 let request = ssh.upstream_client.request(proto::GetDebugAdapterBinary {
275 project_id: ssh.upstream_project_id,
276 definition: Some(definition.to_proto()),
277 });
278 let ssh_client = ssh.ssh_client.clone();
279
280 cx.spawn(async move |_, cx| {
281 let response = request.await?;
282 let binary = DebugAdapterBinary::from_proto(response)?;
283 let mut ssh_command = ssh_client.update(cx, |ssh, _| {
284 anyhow::Ok(SshCommand {
285 arguments: ssh
286 .ssh_args()
287 .ok_or_else(|| anyhow!("SSH arguments not found"))?,
288 })
289 })??;
290
291 let mut connection = None;
292 if let Some(c) = binary.connection {
293 let local_bind_addr = Ipv4Addr::new(127, 0, 0, 1);
294 let port =
295 dap::transport::TcpTransport::unused_port(local_bind_addr).await?;
296
297 ssh_command.add_port_forwarding(port, c.host.to_string(), c.port);
298 connection = Some(TcpArguments {
299 port: c.port,
300 host: local_bind_addr,
301 timeout: c.timeout,
302 })
303 }
304
305 let (program, args) = wrap_for_ssh(
306 &ssh_command,
307 Some((&binary.command, &binary.arguments)),
308 binary.cwd.as_deref(),
309 binary.envs,
310 None,
311 );
312
313 Ok(DebugAdapterBinary {
314 command: program,
315 arguments: args,
316 envs: HashMap::default(),
317 cwd: None,
318 connection,
319 request_args: binary.request_args,
320 })
321 })
322 }
323 DapStoreMode::Collab => {
324 Task::ready(Err(anyhow!("Debugging is not yet supported via collab")))
325 }
326 }
327 }
328
329 pub fn debug_scenario_for_build_task(
330 &self,
331 mut build: SpawnInTerminal,
332 unresoved_label: SharedString,
333 adapter: SharedString,
334 cx: &mut App,
335 ) -> Option<DebugScenario> {
336 build.args = build
337 .args
338 .into_iter()
339 .map(|arg| {
340 if arg.starts_with("$") {
341 arg.strip_prefix("$")
342 .and_then(|arg| build.env.get(arg).map(ToOwned::to_owned))
343 .unwrap_or_else(|| arg)
344 } else {
345 arg
346 }
347 })
348 .collect();
349
350 DapRegistry::global(cx)
351 .locators()
352 .values()
353 .find(|locator| locator.accepts(&build))
354 .map(|_| DebugScenario {
355 adapter,
356 label: format!("Debug `{}`", build.label).into(),
357 build: Some(unresoved_label),
358 request: None,
359 initialize_args: None,
360 tcp_connection: None,
361 stop_on_entry: None,
362 })
363 }
364
365 pub fn run_debug_locator(
366 &mut self,
367 mut build_command: SpawnInTerminal,
368 cx: &mut Context<Self>,
369 ) -> Task<Result<DebugRequest>> {
370 match &self.mode {
371 DapStoreMode::Local(_) => {
372 // Pre-resolve args with existing environment.
373 build_command.args = build_command
374 .args
375 .into_iter()
376 .map(|arg| {
377 if arg.starts_with("$") {
378 arg.strip_prefix("$")
379 .and_then(|arg| build_command.env.get(arg).map(ToOwned::to_owned))
380 .unwrap_or_else(|| arg)
381 } else {
382 arg
383 }
384 })
385 .collect();
386 let locators = DapRegistry::global(cx)
387 .locators()
388 .values()
389 .filter(|locator| locator.accepts(&build_command))
390 .cloned()
391 .collect::<Vec<_>>();
392 if !locators.is_empty() {
393 cx.background_spawn(async move {
394 for locator in locators {
395 let result = locator
396 .run(build_command.clone())
397 .await
398 .log_with_level(log::Level::Error);
399 if let Some(result) = result {
400 return Ok(result);
401 }
402 }
403 Err(anyhow!(
404 "None of the locators for task `{}` completed successfully",
405 build_command.label
406 ))
407 })
408 } else {
409 Task::ready(Err(anyhow!(
410 "Couldn't find any locator for task `{}`. Specify the `attach` or `launch` arguments in your debug scenario definition",
411 build_command.label
412 )))
413 }
414 }
415 DapStoreMode::Ssh(ssh) => {
416 let request = ssh.upstream_client.request(proto::RunDebugLocators {
417 project_id: ssh.upstream_project_id,
418 build_command: Some(build_command.to_proto()),
419 });
420 cx.background_spawn(async move {
421 let response = request.await?;
422 DebugRequest::from_proto(response)
423 })
424 }
425 DapStoreMode::Collab => {
426 Task::ready(Err(anyhow!("Debugging is not yet supported via collab")))
427 }
428 }
429 }
430
431 fn as_local(&self) -> Option<&LocalDapStore> {
432 match &self.mode {
433 DapStoreMode::Local(local_dap_store) => Some(local_dap_store),
434 _ => None,
435 }
436 }
437
438 pub fn new_session(
439 &mut self,
440 template: DebugTaskDefinition,
441 parent_session: Option<Entity<Session>>,
442 cx: &mut Context<Self>,
443 ) -> Entity<Session> {
444 let session_id = SessionId(util::post_inc(&mut self.next_session_id));
445
446 if let Some(session) = &parent_session {
447 session.update(cx, |session, _| {
448 session.add_child_session_id(session_id);
449 });
450 }
451
452 let start_debugging_tx = self.start_debugging_tx.clone();
453
454 let session = Session::new(
455 self.breakpoint_store.clone(),
456 session_id,
457 parent_session,
458 template.clone(),
459 start_debugging_tx,
460 cx,
461 );
462
463 self.sessions.insert(session_id, session.clone());
464 cx.notify();
465
466 cx.subscribe(&session, {
467 move |this: &mut DapStore, _, event: &SessionStateEvent, cx| match event {
468 SessionStateEvent::Shutdown => {
469 this.shutdown_session(session_id, cx).detach_and_log_err(cx);
470 }
471 SessionStateEvent::Restart => {}
472 SessionStateEvent::Running => {
473 cx.emit(DapStoreEvent::DebugClientStarted(session_id));
474 }
475 }
476 })
477 .detach();
478
479 session
480 }
481
482 pub fn boot_session(
483 &self,
484 session: Entity<Session>,
485 cx: &mut Context<Self>,
486 ) -> Task<Result<()>> {
487 let Some(worktree) = self.worktree_store.read(cx).visible_worktrees(cx).next() else {
488 return Task::ready(Err(anyhow!("Failed to find a worktree")));
489 };
490
491 let dap_store = cx.weak_entity();
492 let breakpoint_store = self.breakpoint_store.clone();
493 let definition = session.read(cx).definition();
494
495 cx.spawn({
496 let session = session.clone();
497 async move |this, cx| {
498 let binary = this
499 .update(cx, |this, cx| {
500 this.get_debug_adapter_binary(definition.clone(), cx)
501 })?
502 .await?;
503
504 session
505 .update(cx, |session, cx| {
506 session.boot(binary, worktree, breakpoint_store, dap_store, cx)
507 })?
508 .await
509 }
510 })
511 }
512
513 pub fn session_by_id(
514 &self,
515 session_id: impl Borrow<SessionId>,
516 ) -> Option<Entity<session::Session>> {
517 let session_id = session_id.borrow();
518 let client = self.sessions.get(session_id).cloned();
519
520 client
521 }
522 pub fn sessions(&self) -> impl Iterator<Item = &Entity<Session>> {
523 self.sessions.values()
524 }
525
526 pub fn capabilities_by_id(
527 &self,
528 session_id: impl Borrow<SessionId>,
529 cx: &App,
530 ) -> Option<Capabilities> {
531 let session_id = session_id.borrow();
532 self.sessions
533 .get(session_id)
534 .map(|client| client.read(cx).capabilities.clone())
535 }
536
537 pub fn breakpoint_store(&self) -> &Entity<BreakpointStore> {
538 &self.breakpoint_store
539 }
540
541 pub fn worktree_store(&self) -> &Entity<WorktreeStore> {
542 &self.worktree_store
543 }
544
545 #[allow(dead_code)]
546 async fn handle_ignore_breakpoint_state(
547 this: Entity<Self>,
548 envelope: TypedEnvelope<proto::IgnoreBreakpointState>,
549 mut cx: AsyncApp,
550 ) -> Result<()> {
551 let session_id = SessionId::from_proto(envelope.payload.session_id);
552
553 this.update(&mut cx, |this, cx| {
554 if let Some(session) = this.session_by_id(&session_id) {
555 session.update(cx, |session, cx| {
556 session.set_ignore_breakpoints(envelope.payload.ignore, cx)
557 })
558 } else {
559 Task::ready(HashMap::default())
560 }
561 })?
562 .await;
563
564 Ok(())
565 }
566
567 fn delegate(&self, worktree: &Entity<Worktree>, cx: &mut App) -> DapAdapterDelegate {
568 let Some(local_store) = self.as_local() else {
569 unimplemented!("Starting session on remote side");
570 };
571
572 DapAdapterDelegate::new(
573 local_store.fs.clone(),
574 worktree.read(cx).id(),
575 local_store.node_runtime.clone(),
576 local_store.http_client.clone(),
577 local_store.language_registry.clone(),
578 local_store.toolchain_store.clone(),
579 local_store.environment.update(cx, |env, cx| {
580 env.get_worktree_environment(worktree.clone(), cx)
581 }),
582 )
583 }
584
585 fn handle_start_debugging_request(
586 &mut self,
587 session_id: SessionId,
588 request: dap::messages::Request,
589 cx: &mut Context<Self>,
590 ) -> Task<Result<()>> {
591 let Some(parent_session) = self.session_by_id(session_id) else {
592 return Task::ready(Err(anyhow!("Session not found")));
593 };
594 let request_seq = request.seq;
595
596 let launch_request: Option<Result<StartDebuggingRequestArguments, _>> = request
597 .arguments
598 .as_ref()
599 .map(|value| serde_json::from_value(value.clone()));
600
601 let mut success = true;
602 if let Some(Ok(request)) = launch_request {
603 cx.emit(DapStoreEvent::SpawnChildSession {
604 request,
605 parent_session: parent_session.clone(),
606 });
607 } else {
608 log::error!(
609 "Failed to parse launch request arguments: {:?}",
610 request.arguments
611 );
612 success = false;
613 }
614
615 cx.spawn(async move |_, cx| {
616 parent_session
617 .update(cx, |session, cx| {
618 session.respond_to_client(
619 request_seq,
620 success,
621 StartDebugging::COMMAND.to_string(),
622 None,
623 cx,
624 )
625 })?
626 .await
627 })
628 }
629
630 fn handle_run_in_terminal_request(
631 &mut self,
632 session_id: SessionId,
633 request: dap::messages::Request,
634 cx: &mut Context<Self>,
635 ) -> Task<Result<()>> {
636 let Some(session) = self.session_by_id(session_id) else {
637 return Task::ready(Err(anyhow!("Session not found")));
638 };
639
640 let request_args = serde_json::from_value::<RunInTerminalRequestArguments>(
641 request.arguments.unwrap_or_default(),
642 )
643 .expect("To parse StartDebuggingRequestArguments");
644
645 let seq = request.seq;
646
647 let cwd = Path::new(&request_args.cwd);
648
649 match cwd.try_exists() {
650 Ok(false) | Err(_) if !request_args.cwd.is_empty() => {
651 return session.update(cx, |session, cx| {
652 session.respond_to_client(
653 seq,
654 false,
655 RunInTerminal::COMMAND.to_string(),
656 serde_json::to_value(dap::ErrorResponse {
657 error: Some(dap::Message {
658 id: seq,
659 format: format!("Received invalid/unknown cwd: {cwd:?}"),
660 variables: None,
661 send_telemetry: None,
662 show_user: None,
663 url: None,
664 url_label: None,
665 }),
666 })
667 .ok(),
668 cx,
669 )
670 });
671 }
672 _ => (),
673 }
674 let mut args = request_args.args.clone();
675
676 // Handle special case for NodeJS debug adapter
677 // If only the Node binary path is provided, we set the command to None
678 // This prevents the NodeJS REPL from appearing, which is not the desired behavior
679 // The expected usage is for users to provide their own Node command, e.g., `node test.js`
680 // This allows the NodeJS debug client to attach correctly
681 let command = if args.len() > 1 {
682 Some(args.remove(0))
683 } else {
684 None
685 };
686
687 let mut envs: HashMap<String, String> = Default::default();
688 if let Some(Value::Object(env)) = request_args.env {
689 for (key, value) in env {
690 let value_str = match (key.as_str(), value) {
691 (_, Value::String(value)) => value,
692 _ => continue,
693 };
694
695 envs.insert(key, value_str);
696 }
697 }
698
699 let (tx, mut rx) = mpsc::channel::<Result<u32>>(1);
700 let cwd = Some(cwd)
701 .filter(|cwd| cwd.as_os_str().len() > 0)
702 .map(Arc::from)
703 .or_else(|| {
704 self.session_by_id(session_id)
705 .and_then(|session| session.read(cx).binary().cwd.as_deref().map(Arc::from))
706 });
707 cx.emit(DapStoreEvent::RunInTerminal {
708 session_id,
709 title: request_args.title,
710 cwd,
711 command,
712 args,
713 envs,
714 sender: tx,
715 });
716 cx.notify();
717
718 let session = session.downgrade();
719 cx.spawn(async move |_, cx| {
720 let (success, body) = match rx.next().await {
721 Some(Ok(pid)) => (
722 true,
723 serde_json::to_value(dap::RunInTerminalResponse {
724 process_id: None,
725 shell_process_id: Some(pid as u64),
726 })
727 .ok(),
728 ),
729 Some(Err(error)) => (
730 false,
731 serde_json::to_value(dap::ErrorResponse {
732 error: Some(dap::Message {
733 id: seq,
734 format: error.to_string(),
735 variables: None,
736 send_telemetry: None,
737 show_user: None,
738 url: None,
739 url_label: None,
740 }),
741 })
742 .ok(),
743 ),
744 None => (
745 false,
746 serde_json::to_value(dap::ErrorResponse {
747 error: Some(dap::Message {
748 id: seq,
749 format: "failed to receive response from spawn terminal".to_string(),
750 variables: None,
751 send_telemetry: None,
752 show_user: None,
753 url: None,
754 url_label: None,
755 }),
756 })
757 .ok(),
758 ),
759 };
760
761 session
762 .update(cx, |session, cx| {
763 session.respond_to_client(
764 seq,
765 success,
766 RunInTerminal::COMMAND.to_string(),
767 body,
768 cx,
769 )
770 })?
771 .await
772 })
773 }
774
775 pub fn evaluate(
776 &self,
777 session_id: &SessionId,
778 stack_frame_id: u64,
779 expression: String,
780 context: EvaluateArgumentsContext,
781 source: Option<Source>,
782 cx: &mut Context<Self>,
783 ) -> Task<Result<EvaluateResponse>> {
784 let Some(client) = self
785 .session_by_id(session_id)
786 .and_then(|client| client.read(cx).adapter_client())
787 else {
788 return Task::ready(Err(anyhow!("Could not find client: {:?}", session_id)));
789 };
790
791 cx.background_executor().spawn(async move {
792 client
793 .request::<Evaluate>(EvaluateArguments {
794 expression: expression.clone(),
795 frame_id: Some(stack_frame_id),
796 context: Some(context),
797 format: None,
798 line: None,
799 column: None,
800 source,
801 })
802 .await
803 })
804 }
805
806 pub fn completions(
807 &self,
808 session_id: &SessionId,
809 stack_frame_id: u64,
810 text: String,
811 completion_column: u64,
812 cx: &mut Context<Self>,
813 ) -> Task<Result<Vec<CompletionItem>>> {
814 let Some(client) = self
815 .session_by_id(session_id)
816 .and_then(|client| client.read(cx).adapter_client())
817 else {
818 return Task::ready(Err(anyhow!("Could not find client: {:?}", session_id)));
819 };
820
821 cx.background_executor().spawn(async move {
822 Ok(client
823 .request::<Completions>(CompletionsArguments {
824 frame_id: Some(stack_frame_id),
825 line: None,
826 text,
827 column: completion_column,
828 })
829 .await?
830 .targets)
831 })
832 }
833
834 pub fn resolve_inline_values(
835 &self,
836 session: Entity<Session>,
837 stack_frame_id: StackFrameId,
838 buffer_handle: Entity<Buffer>,
839 inline_values: Vec<lsp::InlineValue>,
840 cx: &mut Context<Self>,
841 ) -> Task<Result<Vec<InlayHint>>> {
842 let snapshot = buffer_handle.read(cx).snapshot();
843 let all_variables = session.read(cx).variables_by_stack_frame_id(stack_frame_id);
844
845 cx.spawn(async move |_, cx| {
846 let mut inlay_hints = Vec::with_capacity(inline_values.len());
847 for inline_value in inline_values.iter() {
848 match inline_value {
849 lsp::InlineValue::Text(text) => {
850 inlay_hints.push(InlayHint {
851 position: snapshot.anchor_after(range_from_lsp(text.range).end),
852 label: InlayHintLabel::String(format!(": {}", text.text)),
853 kind: Some(InlayHintKind::Type),
854 padding_left: false,
855 padding_right: false,
856 tooltip: None,
857 resolve_state: ResolveState::Resolved,
858 });
859 }
860 lsp::InlineValue::VariableLookup(variable_lookup) => {
861 let range = range_from_lsp(variable_lookup.range);
862
863 let mut variable_name = variable_lookup
864 .variable_name
865 .clone()
866 .unwrap_or_else(|| snapshot.text_for_range(range.clone()).collect());
867
868 if !variable_lookup.case_sensitive_lookup {
869 variable_name = variable_name.to_ascii_lowercase();
870 }
871
872 let Some(variable) = all_variables.iter().find(|variable| {
873 if variable_lookup.case_sensitive_lookup {
874 variable.name == variable_name
875 } else {
876 variable.name.to_ascii_lowercase() == variable_name
877 }
878 }) else {
879 continue;
880 };
881
882 inlay_hints.push(InlayHint {
883 position: snapshot.anchor_after(range.end),
884 label: InlayHintLabel::String(format!(": {}", variable.value)),
885 kind: Some(InlayHintKind::Type),
886 padding_left: false,
887 padding_right: false,
888 tooltip: None,
889 resolve_state: ResolveState::Resolved,
890 });
891 }
892 lsp::InlineValue::EvaluatableExpression(expression) => {
893 let range = range_from_lsp(expression.range);
894
895 let expression = expression
896 .expression
897 .clone()
898 .unwrap_or_else(|| snapshot.text_for_range(range.clone()).collect());
899
900 let Ok(eval_task) = session.update(cx, |session, cx| {
901 session.evaluate(
902 expression,
903 Some(EvaluateArgumentsContext::Variables),
904 Some(stack_frame_id),
905 None,
906 cx,
907 )
908 }) else {
909 continue;
910 };
911
912 if let Some(response) = eval_task.await {
913 inlay_hints.push(InlayHint {
914 position: snapshot.anchor_after(range.end),
915 label: InlayHintLabel::String(format!(": {}", response.result)),
916 kind: Some(InlayHintKind::Type),
917 padding_left: false,
918 padding_right: false,
919 tooltip: None,
920 resolve_state: ResolveState::Resolved,
921 });
922 };
923 }
924 };
925 }
926
927 Ok(inlay_hints)
928 })
929 }
930
931 pub fn shutdown_sessions(&mut self, cx: &mut Context<Self>) -> Task<()> {
932 let mut tasks = vec![];
933 for session_id in self.sessions.keys().cloned().collect::<Vec<_>>() {
934 tasks.push(self.shutdown_session(session_id, cx));
935 }
936
937 cx.background_executor().spawn(async move {
938 futures::future::join_all(tasks).await;
939 })
940 }
941
942 pub fn shutdown_session(
943 &mut self,
944 session_id: SessionId,
945 cx: &mut Context<Self>,
946 ) -> Task<Result<()>> {
947 let Some(session) = self.sessions.remove(&session_id) else {
948 return Task::ready(Err(anyhow!("Could not find session: {:?}", session_id)));
949 };
950
951 let shutdown_children = session
952 .read(cx)
953 .child_session_ids()
954 .iter()
955 .map(|session_id| self.shutdown_session(*session_id, cx))
956 .collect::<Vec<_>>();
957
958 let shutdown_parent_task = if let Some(parent_session) = session
959 .read(cx)
960 .parent_id(cx)
961 .and_then(|session_id| self.session_by_id(session_id))
962 {
963 let shutdown_id = parent_session.update(cx, |parent_session, _| {
964 parent_session.remove_child_session_id(session_id);
965
966 if parent_session.child_session_ids().len() == 0 {
967 Some(parent_session.session_id())
968 } else {
969 None
970 }
971 });
972
973 shutdown_id.map(|session_id| self.shutdown_session(session_id, cx))
974 } else {
975 None
976 };
977
978 let shutdown_task = session.update(cx, |this, cx| this.shutdown(cx));
979
980 cx.background_spawn(async move {
981 if shutdown_children.len() > 0 {
982 let _ = join_all(shutdown_children).await;
983 }
984
985 shutdown_task.await;
986
987 if let Some(parent_task) = shutdown_parent_task {
988 parent_task.await?;
989 }
990
991 Ok(())
992 })
993 }
994
995 pub fn shared(
996 &mut self,
997 project_id: u64,
998 downstream_client: AnyProtoClient,
999 _: &mut Context<Self>,
1000 ) {
1001 self.downstream_client = Some((downstream_client.clone(), project_id));
1002 }
1003
1004 pub fn unshared(&mut self, cx: &mut Context<Self>) {
1005 self.downstream_client.take();
1006
1007 cx.notify();
1008 }
1009
1010 async fn handle_run_debug_locator(
1011 this: Entity<Self>,
1012 envelope: TypedEnvelope<proto::RunDebugLocators>,
1013 mut cx: AsyncApp,
1014 ) -> Result<proto::DebugRequest> {
1015 let task = envelope
1016 .payload
1017 .build_command
1018 .ok_or_else(|| anyhow!("missing definition"))?;
1019 let build_task = SpawnInTerminal::from_proto(task);
1020 let request = this
1021 .update(&mut cx, |this, cx| this.run_debug_locator(build_task, cx))?
1022 .await?;
1023
1024 Ok(request.to_proto())
1025 }
1026
1027 async fn handle_get_debug_adapter_binary(
1028 this: Entity<Self>,
1029 envelope: TypedEnvelope<proto::GetDebugAdapterBinary>,
1030 mut cx: AsyncApp,
1031 ) -> Result<proto::DebugAdapterBinary> {
1032 let definition = DebugTaskDefinition::from_proto(
1033 envelope
1034 .payload
1035 .definition
1036 .ok_or_else(|| anyhow!("missing definition"))?,
1037 )?;
1038 let binary = this
1039 .update(&mut cx, |this, cx| {
1040 this.get_debug_adapter_binary(definition, cx)
1041 })?
1042 .await?;
1043 Ok(binary.to_proto())
1044 }
1045}
1046
1047#[derive(Clone)]
1048pub struct DapAdapterDelegate {
1049 fs: Arc<dyn Fs>,
1050 worktree_id: WorktreeId,
1051 node_runtime: NodeRuntime,
1052 http_client: Arc<dyn HttpClient>,
1053 language_registry: Arc<LanguageRegistry>,
1054 toolchain_store: Arc<dyn LanguageToolchainStore>,
1055 updated_adapters: Arc<Mutex<HashSet<DebugAdapterName>>>,
1056 load_shell_env_task: Shared<Task<Option<HashMap<String, String>>>>,
1057}
1058
1059impl DapAdapterDelegate {
1060 pub fn new(
1061 fs: Arc<dyn Fs>,
1062 worktree_id: WorktreeId,
1063 node_runtime: NodeRuntime,
1064 http_client: Arc<dyn HttpClient>,
1065 language_registry: Arc<LanguageRegistry>,
1066 toolchain_store: Arc<dyn LanguageToolchainStore>,
1067 load_shell_env_task: Shared<Task<Option<HashMap<String, String>>>>,
1068 ) -> Self {
1069 Self {
1070 fs,
1071 worktree_id,
1072 http_client,
1073 node_runtime,
1074 toolchain_store,
1075 language_registry,
1076 load_shell_env_task,
1077 updated_adapters: Default::default(),
1078 }
1079 }
1080}
1081
1082#[async_trait(?Send)]
1083impl dap::adapters::DapDelegate for DapAdapterDelegate {
1084 fn worktree_id(&self) -> WorktreeId {
1085 self.worktree_id
1086 }
1087
1088 fn http_client(&self) -> Arc<dyn HttpClient> {
1089 self.http_client.clone()
1090 }
1091
1092 fn node_runtime(&self) -> NodeRuntime {
1093 self.node_runtime.clone()
1094 }
1095
1096 fn fs(&self) -> Arc<dyn Fs> {
1097 self.fs.clone()
1098 }
1099
1100 fn updated_adapters(&self) -> Arc<Mutex<HashSet<DebugAdapterName>>> {
1101 self.updated_adapters.clone()
1102 }
1103
1104 fn update_status(&self, dap_name: DebugAdapterName, status: dap::adapters::DapStatus) {
1105 let name = SharedString::from(dap_name.to_string());
1106 let status = match status {
1107 DapStatus::None => BinaryStatus::None,
1108 DapStatus::Downloading => BinaryStatus::Downloading,
1109 DapStatus::Failed { error } => BinaryStatus::Failed { error },
1110 DapStatus::CheckingForUpdate => BinaryStatus::CheckingForUpdate,
1111 };
1112
1113 self.language_registry
1114 .update_dap_status(LanguageServerName(name), status);
1115 }
1116
1117 fn which(&self, command: &OsStr) -> Option<PathBuf> {
1118 which::which(command).ok()
1119 }
1120
1121 async fn shell_env(&self) -> HashMap<String, String> {
1122 let task = self.load_shell_env_task.clone();
1123 task.await.unwrap_or_default()
1124 }
1125
1126 fn toolchain_store(&self) -> Arc<dyn LanguageToolchainStore> {
1127 self.toolchain_store.clone()
1128 }
1129}