1use anyhow::{Context as _, Result, anyhow};
2use lsp::LanguageServerId;
3
4use extension::ExtensionHostProxy;
5use extension_host::headless_host::HeadlessExtensionStore;
6use fs::Fs;
7use gpui::{App, AppContext as _, AsyncApp, Context, Entity, PromptLevel};
8use http_client::HttpClient;
9use language::{Buffer, BufferEvent, LanguageRegistry, proto::serialize_operation};
10use node_runtime::NodeRuntime;
11use project::{
12 LspStore, LspStoreEvent, ManifestTree, PrettierStore, ProjectEnvironment, ProjectPath,
13 ToolchainStore, WorktreeId,
14 agent_server_store::AgentServerStore,
15 buffer_store::{BufferStore, BufferStoreEvent},
16 debugger::{breakpoint_store::BreakpointStore, dap_store::DapStore},
17 git_store::GitStore,
18 lsp_store::log_store::{self, GlobalLogStore, LanguageServerKind},
19 project_settings::SettingsObserver,
20 search::SearchQuery,
21 task_store::TaskStore,
22 worktree_store::WorktreeStore,
23};
24use rpc::{
25 AnyProtoClient, TypedEnvelope,
26 proto::{self, REMOTE_SERVER_PEER_ID, REMOTE_SERVER_PROJECT_ID},
27};
28
29use settings::{Settings as _, initial_server_settings_content};
30use smol::stream::StreamExt;
31use std::{
32 path::{Path, PathBuf},
33 sync::{Arc, atomic::AtomicUsize},
34};
35use sysinfo::{ProcessRefreshKind, RefreshKind, System, UpdateKind};
36use util::{ResultExt, paths::PathStyle, rel_path::RelPath};
37use worktree::Worktree;
38
39pub struct HeadlessProject {
40 pub fs: Arc<dyn Fs>,
41 pub session: AnyProtoClient,
42 pub worktree_store: Entity<WorktreeStore>,
43 pub buffer_store: Entity<BufferStore>,
44 pub lsp_store: Entity<LspStore>,
45 pub task_store: Entity<TaskStore>,
46 pub dap_store: Entity<DapStore>,
47 pub agent_server_store: Entity<AgentServerStore>,
48 pub settings_observer: Entity<SettingsObserver>,
49 pub next_entry_id: Arc<AtomicUsize>,
50 pub languages: Arc<LanguageRegistry>,
51 pub extensions: Entity<HeadlessExtensionStore>,
52 pub git_store: Entity<GitStore>,
53 pub environment: Entity<ProjectEnvironment>,
54 // Used mostly to keep alive the toolchain store for RPC handlers.
55 // Local variant is used within LSP store, but that's a separate entity.
56 pub _toolchain_store: Entity<ToolchainStore>,
57}
58
59pub struct HeadlessAppState {
60 pub session: AnyProtoClient,
61 pub fs: Arc<dyn Fs>,
62 pub http_client: Arc<dyn HttpClient>,
63 pub node_runtime: NodeRuntime,
64 pub languages: Arc<LanguageRegistry>,
65 pub extension_host_proxy: Arc<ExtensionHostProxy>,
66}
67
68impl HeadlessProject {
69 pub fn init(cx: &mut App) {
70 settings::init(cx);
71 language::init(cx);
72 project::Project::init_settings(cx);
73 extension_host::ExtensionSettings::register(cx);
74 log_store::init(true, cx);
75 }
76
77 pub fn new(
78 HeadlessAppState {
79 session,
80 fs,
81 http_client,
82 node_runtime,
83 languages,
84 extension_host_proxy: proxy,
85 }: HeadlessAppState,
86 cx: &mut Context<Self>,
87 ) -> Self {
88 debug_adapter_extension::init(proxy.clone(), cx);
89 languages::init(languages.clone(), fs.clone(), node_runtime.clone(), cx);
90
91 let worktree_store = cx.new(|cx| {
92 let mut store = WorktreeStore::local(true, fs.clone());
93 store.shared(REMOTE_SERVER_PROJECT_ID, session.clone(), cx);
94 store
95 });
96
97 let environment = cx.new(|cx| ProjectEnvironment::new(None, cx));
98 let manifest_tree = ManifestTree::new(worktree_store.clone(), cx);
99 let toolchain_store = cx.new(|cx| {
100 ToolchainStore::local(
101 languages.clone(),
102 worktree_store.clone(),
103 environment.clone(),
104 manifest_tree.clone(),
105 fs.clone(),
106 cx,
107 )
108 });
109
110 let buffer_store = cx.new(|cx| {
111 let mut buffer_store = BufferStore::local(worktree_store.clone(), cx);
112 buffer_store.shared(REMOTE_SERVER_PROJECT_ID, session.clone(), cx);
113 buffer_store
114 });
115
116 let breakpoint_store =
117 cx.new(|_| BreakpointStore::local(worktree_store.clone(), buffer_store.clone()));
118
119 let dap_store = cx.new(|cx| {
120 let mut dap_store = DapStore::new_local(
121 http_client.clone(),
122 node_runtime.clone(),
123 fs.clone(),
124 environment.clone(),
125 toolchain_store.read(cx).as_language_toolchain_store(),
126 worktree_store.clone(),
127 breakpoint_store.clone(),
128 true,
129 cx,
130 );
131 dap_store.shared(REMOTE_SERVER_PROJECT_ID, session.clone(), cx);
132 dap_store
133 });
134
135 let git_store = cx.new(|cx| {
136 let mut store = GitStore::local(
137 &worktree_store,
138 buffer_store.clone(),
139 environment.clone(),
140 fs.clone(),
141 cx,
142 );
143 store.shared(REMOTE_SERVER_PROJECT_ID, session.clone(), cx);
144 store
145 });
146
147 let prettier_store = cx.new(|cx| {
148 PrettierStore::new(
149 node_runtime.clone(),
150 fs.clone(),
151 languages.clone(),
152 worktree_store.clone(),
153 cx,
154 )
155 });
156
157 let task_store = cx.new(|cx| {
158 let mut task_store = TaskStore::local(
159 buffer_store.downgrade(),
160 worktree_store.clone(),
161 toolchain_store.read(cx).as_language_toolchain_store(),
162 environment.clone(),
163 cx,
164 );
165 task_store.shared(REMOTE_SERVER_PROJECT_ID, session.clone(), cx);
166 task_store
167 });
168 let settings_observer = cx.new(|cx| {
169 let mut observer = SettingsObserver::new_local(
170 fs.clone(),
171 worktree_store.clone(),
172 task_store.clone(),
173 cx,
174 );
175 observer.shared(REMOTE_SERVER_PROJECT_ID, session.clone(), cx);
176 observer
177 });
178
179 let lsp_store = cx.new(|cx| {
180 let mut lsp_store = LspStore::new_local(
181 buffer_store.clone(),
182 worktree_store.clone(),
183 prettier_store.clone(),
184 toolchain_store
185 .read(cx)
186 .as_local_store()
187 .expect("Toolchain store to be local")
188 .clone(),
189 environment.clone(),
190 manifest_tree,
191 languages.clone(),
192 http_client.clone(),
193 fs.clone(),
194 cx,
195 );
196 lsp_store.shared(REMOTE_SERVER_PROJECT_ID, session.clone(), cx);
197 lsp_store
198 });
199
200 let agent_server_store = cx.new(|cx| {
201 let mut agent_server_store = AgentServerStore::local(
202 node_runtime.clone(),
203 fs.clone(),
204 environment.clone(),
205 http_client.clone(),
206 cx,
207 );
208 agent_server_store.shared(REMOTE_SERVER_PROJECT_ID, session.clone(), cx);
209 agent_server_store
210 });
211
212 cx.subscribe(&lsp_store, Self::on_lsp_store_event).detach();
213 language_extension::init(
214 language_extension::LspAccess::ViaLspStore(lsp_store.clone()),
215 proxy.clone(),
216 languages.clone(),
217 );
218
219 cx.subscribe(&buffer_store, |_this, _buffer_store, event, cx| {
220 if let BufferStoreEvent::BufferAdded(buffer) = event {
221 cx.subscribe(buffer, Self::on_buffer_event).detach();
222 }
223 })
224 .detach();
225
226 let extensions = HeadlessExtensionStore::new(
227 fs.clone(),
228 http_client.clone(),
229 paths::remote_extensions_dir().to_path_buf(),
230 proxy,
231 node_runtime,
232 cx,
233 );
234
235 // local_machine -> ssh handlers
236 session.subscribe_to_entity(REMOTE_SERVER_PROJECT_ID, &worktree_store);
237 session.subscribe_to_entity(REMOTE_SERVER_PROJECT_ID, &buffer_store);
238 session.subscribe_to_entity(REMOTE_SERVER_PROJECT_ID, &cx.entity());
239 session.subscribe_to_entity(REMOTE_SERVER_PROJECT_ID, &lsp_store);
240 session.subscribe_to_entity(REMOTE_SERVER_PROJECT_ID, &task_store);
241 session.subscribe_to_entity(REMOTE_SERVER_PROJECT_ID, &toolchain_store);
242 session.subscribe_to_entity(REMOTE_SERVER_PROJECT_ID, &dap_store);
243 session.subscribe_to_entity(REMOTE_SERVER_PROJECT_ID, &settings_observer);
244 session.subscribe_to_entity(REMOTE_SERVER_PROJECT_ID, &git_store);
245 session.subscribe_to_entity(REMOTE_SERVER_PROJECT_ID, &agent_server_store);
246
247 session.add_request_handler(cx.weak_entity(), Self::handle_list_remote_directory);
248 session.add_request_handler(cx.weak_entity(), Self::handle_get_path_metadata);
249 session.add_request_handler(cx.weak_entity(), Self::handle_shutdown_remote_server);
250 session.add_request_handler(cx.weak_entity(), Self::handle_ping);
251 session.add_request_handler(cx.weak_entity(), Self::handle_get_processes);
252
253 session.add_entity_request_handler(Self::handle_add_worktree);
254 session.add_request_handler(cx.weak_entity(), Self::handle_remove_worktree);
255
256 session.add_entity_request_handler(Self::handle_open_buffer_by_path);
257 session.add_entity_request_handler(Self::handle_open_new_buffer);
258 session.add_entity_request_handler(Self::handle_find_search_candidates);
259 session.add_entity_request_handler(Self::handle_open_server_settings);
260 session.add_entity_request_handler(Self::handle_get_directory_environment);
261 session.add_entity_message_handler(Self::handle_toggle_lsp_logs);
262
263 session.add_entity_request_handler(BufferStore::handle_update_buffer);
264 session.add_entity_message_handler(BufferStore::handle_close_buffer);
265
266 session.add_request_handler(
267 extensions.downgrade(),
268 HeadlessExtensionStore::handle_sync_extensions,
269 );
270 session.add_request_handler(
271 extensions.downgrade(),
272 HeadlessExtensionStore::handle_install_extension,
273 );
274
275 BufferStore::init(&session);
276 WorktreeStore::init(&session);
277 SettingsObserver::init(&session);
278 LspStore::init(&session);
279 TaskStore::init(Some(&session));
280 ToolchainStore::init(&session);
281 DapStore::init(&session, cx);
282 // todo(debugger): Re init breakpoint store when we set it up for collab
283 // BreakpointStore::init(&client);
284 GitStore::init(&session);
285 AgentServerStore::init_headless(&session);
286
287 HeadlessProject {
288 next_entry_id: Default::default(),
289 session,
290 settings_observer,
291 fs,
292 worktree_store,
293 buffer_store,
294 lsp_store,
295 task_store,
296 dap_store,
297 agent_server_store,
298 languages,
299 extensions,
300 git_store,
301 environment,
302 _toolchain_store: toolchain_store,
303 }
304 }
305
306 fn on_buffer_event(
307 &mut self,
308 buffer: Entity<Buffer>,
309 event: &BufferEvent,
310 cx: &mut Context<Self>,
311 ) {
312 if let BufferEvent::Operation {
313 operation,
314 is_local: true,
315 } = event
316 {
317 cx.background_spawn(self.session.request(proto::UpdateBuffer {
318 project_id: REMOTE_SERVER_PROJECT_ID,
319 buffer_id: buffer.read(cx).remote_id().to_proto(),
320 operations: vec![serialize_operation(operation)],
321 }))
322 .detach()
323 }
324 }
325
326 fn on_lsp_store_event(
327 &mut self,
328 lsp_store: Entity<LspStore>,
329 event: &LspStoreEvent,
330 cx: &mut Context<Self>,
331 ) {
332 match event {
333 LspStoreEvent::LanguageServerAdded(id, name, worktree_id) => {
334 let log_store = cx
335 .try_global::<GlobalLogStore>()
336 .map(|lsp_logs| lsp_logs.0.clone());
337 if let Some(log_store) = log_store {
338 log_store.update(cx, |log_store, cx| {
339 log_store.add_language_server(
340 LanguageServerKind::LocalSsh {
341 lsp_store: self.lsp_store.downgrade(),
342 },
343 *id,
344 Some(name.clone()),
345 *worktree_id,
346 lsp_store.read(cx).language_server_for_id(*id),
347 cx,
348 );
349 });
350 }
351 }
352 LspStoreEvent::LanguageServerRemoved(id) => {
353 let log_store = cx
354 .try_global::<GlobalLogStore>()
355 .map(|lsp_logs| lsp_logs.0.clone());
356 if let Some(log_store) = log_store {
357 log_store.update(cx, |log_store, cx| {
358 log_store.remove_language_server(*id, cx);
359 });
360 }
361 }
362 LspStoreEvent::LanguageServerUpdate {
363 language_server_id,
364 name,
365 message,
366 } => {
367 self.session
368 .send(proto::UpdateLanguageServer {
369 project_id: REMOTE_SERVER_PROJECT_ID,
370 server_name: name.as_ref().map(|name| name.to_string()),
371 language_server_id: language_server_id.to_proto(),
372 variant: Some(message.clone()),
373 })
374 .log_err();
375 }
376 LspStoreEvent::Notification(message) => {
377 self.session
378 .send(proto::Toast {
379 project_id: REMOTE_SERVER_PROJECT_ID,
380 notification_id: "lsp".to_string(),
381 message: message.clone(),
382 })
383 .log_err();
384 }
385 LspStoreEvent::LanguageServerPrompt(prompt) => {
386 let request = self.session.request(proto::LanguageServerPromptRequest {
387 project_id: REMOTE_SERVER_PROJECT_ID,
388 actions: prompt
389 .actions
390 .iter()
391 .map(|action| action.title.to_string())
392 .collect(),
393 level: Some(prompt_to_proto(prompt)),
394 lsp_name: prompt.lsp_name.clone(),
395 message: prompt.message.clone(),
396 });
397 let prompt = prompt.clone();
398 cx.background_spawn(async move {
399 let response = request.await?;
400 if let Some(action_response) = response.action_response {
401 prompt.respond(action_response as usize).await;
402 }
403 anyhow::Ok(())
404 })
405 .detach();
406 }
407 _ => {}
408 }
409 }
410
411 pub async fn handle_add_worktree(
412 this: Entity<Self>,
413 message: TypedEnvelope<proto::AddWorktree>,
414 mut cx: AsyncApp,
415 ) -> Result<proto::AddWorktreeResponse> {
416 use client::ErrorCodeExt;
417 let fs = this.read_with(&cx, |this, _| this.fs.clone())?;
418 let path = PathBuf::from(shellexpand::tilde(&message.payload.path).to_string());
419
420 let canonicalized = match fs.canonicalize(&path).await {
421 Ok(path) => path,
422 Err(e) => {
423 let mut parent = path
424 .parent()
425 .ok_or(e)
426 .with_context(|| format!("{path:?} does not exist"))?;
427 if parent == Path::new("") {
428 parent = util::paths::home_dir();
429 }
430 let parent = fs.canonicalize(parent).await.map_err(|_| {
431 anyhow!(
432 proto::ErrorCode::DevServerProjectPathDoesNotExist
433 .with_tag("path", path.to_string_lossy().as_ref())
434 )
435 })?;
436 parent.join(path.file_name().unwrap())
437 }
438 };
439
440 let worktree = this
441 .read_with(&cx.clone(), |this, _| {
442 Worktree::local(
443 Arc::from(canonicalized.as_path()),
444 message.payload.visible,
445 this.fs.clone(),
446 this.next_entry_id.clone(),
447 &mut cx,
448 )
449 })?
450 .await?;
451
452 let response = this.read_with(&cx, |_, cx| {
453 let worktree = worktree.read(cx);
454 proto::AddWorktreeResponse {
455 worktree_id: worktree.id().to_proto(),
456 canonicalized_path: canonicalized.to_string_lossy().into_owned(),
457 }
458 })?;
459
460 // We spawn this asynchronously, so that we can send the response back
461 // *before* `worktree_store.add()` can send out UpdateProject requests
462 // to the client about the new worktree.
463 //
464 // That lets the client manage the reference/handles of the newly-added
465 // worktree, before getting interrupted by an UpdateProject request.
466 //
467 // This fixes the problem of the client sending the AddWorktree request,
468 // headless project sending out a project update, client receiving it
469 // and immediately dropping the reference of the new client, causing it
470 // to be dropped on the headless project, and the client only then
471 // receiving a response to AddWorktree.
472 cx.spawn(async move |cx| {
473 this.update(cx, |this, cx| {
474 this.worktree_store.update(cx, |worktree_store, cx| {
475 worktree_store.add(&worktree, cx);
476 });
477 })
478 .log_err();
479 })
480 .detach();
481
482 Ok(response)
483 }
484
485 pub async fn handle_remove_worktree(
486 this: Entity<Self>,
487 envelope: TypedEnvelope<proto::RemoveWorktree>,
488 mut cx: AsyncApp,
489 ) -> Result<proto::Ack> {
490 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
491 this.update(&mut cx, |this, cx| {
492 this.worktree_store.update(cx, |worktree_store, cx| {
493 worktree_store.remove_worktree(worktree_id, cx);
494 });
495 })?;
496 Ok(proto::Ack {})
497 }
498
499 pub async fn handle_open_buffer_by_path(
500 this: Entity<Self>,
501 message: TypedEnvelope<proto::OpenBufferByPath>,
502 mut cx: AsyncApp,
503 ) -> Result<proto::OpenBufferResponse> {
504 let worktree_id = WorktreeId::from_proto(message.payload.worktree_id);
505 let path = RelPath::from_proto(&message.payload.path)?;
506 let (buffer_store, buffer) = this.update(&mut cx, |this, cx| {
507 let buffer_store = this.buffer_store.clone();
508 let buffer = this.buffer_store.update(cx, |buffer_store, cx| {
509 buffer_store.open_buffer(
510 ProjectPath {
511 worktree_id,
512 path: Arc::<Path>::from_proto(message.payload.path),
513 },
514 None,
515 false,
516 true,
517 cx,
518 )
519 });
520 anyhow::Ok((buffer_store, buffer))
521 })??;
522
523 let buffer = buffer.await?;
524 let buffer_id = buffer.read_with(&cx, |b, _| b.remote_id())?;
525 buffer_store.update(&mut cx, |buffer_store, cx| {
526 buffer_store
527 .create_buffer_for_peer(&buffer, REMOTE_SERVER_PEER_ID, cx)
528 .detach_and_log_err(cx);
529 })?;
530
531 Ok(proto::OpenBufferResponse {
532 buffer_id: buffer_id.to_proto(),
533 })
534 }
535
536 pub async fn handle_open_new_buffer(
537 this: Entity<Self>,
538 _message: TypedEnvelope<proto::OpenNewBuffer>,
539 mut cx: AsyncApp,
540 ) -> Result<proto::OpenBufferResponse> {
541 let (buffer_store, buffer) = this.update(&mut cx, |this, cx| {
542 let buffer_store = this.buffer_store.clone();
543 let buffer = this
544 .buffer_store
545 .update(cx, |buffer_store, cx| buffer_store.create_buffer(true, cx));
546 anyhow::Ok((buffer_store, buffer))
547 })??;
548
549 let buffer = buffer.await?;
550 let buffer_id = buffer.read_with(&cx, |b, _| b.remote_id())?;
551 buffer_store.update(&mut cx, |buffer_store, cx| {
552 buffer_store
553 .create_buffer_for_peer(&buffer, REMOTE_SERVER_PEER_ID, cx)
554 .detach_and_log_err(cx);
555 })?;
556
557 Ok(proto::OpenBufferResponse {
558 buffer_id: buffer_id.to_proto(),
559 })
560 }
561
562 async fn handle_toggle_lsp_logs(
563 _: Entity<Self>,
564 envelope: TypedEnvelope<proto::ToggleLspLogs>,
565 mut cx: AsyncApp,
566 ) -> Result<()> {
567 let server_id = LanguageServerId::from_proto(envelope.payload.server_id);
568 let lsp_logs = cx
569 .update(|cx| {
570 cx.try_global::<GlobalLogStore>()
571 .map(|lsp_logs| lsp_logs.0.clone())
572 })?
573 .context("lsp logs store is missing")?;
574
575 lsp_logs.update(&mut cx, |lsp_logs, _| {
576 // RPC logs are very noisy and we need to toggle it on the headless server too.
577 // The rest of the logs for the ssh project are very important to have toggled always,
578 // to e.g. send language server error logs to the client before anything is toggled.
579 if envelope.payload.enabled {
580 lsp_logs.enable_rpc_trace_for_language_server(server_id);
581 } else {
582 lsp_logs.disable_rpc_trace_for_language_server(server_id);
583 }
584 })?;
585 Ok(())
586 }
587
588 async fn handle_open_server_settings(
589 this: Entity<Self>,
590 _: TypedEnvelope<proto::OpenServerSettings>,
591 mut cx: AsyncApp,
592 ) -> Result<proto::OpenBufferResponse> {
593 let settings_path = paths::settings_file();
594 let (worktree, path) = this
595 .update(&mut cx, |this, cx| {
596 this.worktree_store.update(cx, |worktree_store, cx| {
597 worktree_store.find_or_create_worktree(settings_path, false, cx)
598 })
599 })?
600 .await?;
601
602 let (buffer, buffer_store) = this.update(&mut cx, |this, cx| {
603 let buffer = this.buffer_store.update(cx, |buffer_store, cx| {
604 buffer_store.open_buffer(
605 ProjectPath {
606 worktree_id: worktree.read(cx).id(),
607 path: path,
608 },
609 None,
610 false,
611 true,
612 cx,
613 )
614 });
615
616 (buffer, this.buffer_store.clone())
617 })?;
618
619 let buffer = buffer.await?;
620
621 let buffer_id = cx.update(|cx| {
622 if buffer.read(cx).is_empty() {
623 buffer.update(cx, |buffer, cx| {
624 buffer.edit([(0..0, initial_server_settings_content())], None, cx)
625 });
626 }
627
628 let buffer_id = buffer.read(cx).remote_id();
629
630 buffer_store.update(cx, |buffer_store, cx| {
631 buffer_store
632 .create_buffer_for_peer(&buffer, REMOTE_SERVER_PEER_ID, cx)
633 .detach_and_log_err(cx);
634 });
635
636 buffer_id
637 })?;
638
639 Ok(proto::OpenBufferResponse {
640 buffer_id: buffer_id.to_proto(),
641 })
642 }
643
644 async fn handle_find_search_candidates(
645 this: Entity<Self>,
646 envelope: TypedEnvelope<proto::FindSearchCandidates>,
647 mut cx: AsyncApp,
648 ) -> Result<proto::FindSearchCandidatesResponse> {
649 let message = envelope.payload;
650 let query = SearchQuery::from_proto(
651 message.query.context("missing query field")?,
652 PathStyle::local(),
653 )?;
654 let results = this.update(&mut cx, |this, cx| {
655 this.buffer_store.update(cx, |buffer_store, cx| {
656 buffer_store.find_search_candidates(&query, message.limit as _, this.fs.clone(), cx)
657 })
658 })?;
659
660 let mut response = proto::FindSearchCandidatesResponse {
661 buffer_ids: Vec::new(),
662 };
663
664 let buffer_store = this.read_with(&cx, |this, _| this.buffer_store.clone())?;
665
666 while let Ok(buffer) = results.recv().await {
667 let buffer_id = buffer.read_with(&cx, |this, _| this.remote_id())?;
668 response.buffer_ids.push(buffer_id.to_proto());
669 buffer_store
670 .update(&mut cx, |buffer_store, cx| {
671 buffer_store.create_buffer_for_peer(&buffer, REMOTE_SERVER_PEER_ID, cx)
672 })?
673 .await?;
674 }
675
676 Ok(response)
677 }
678
679 async fn handle_list_remote_directory(
680 this: Entity<Self>,
681 envelope: TypedEnvelope<proto::ListRemoteDirectory>,
682 cx: AsyncApp,
683 ) -> Result<proto::ListRemoteDirectoryResponse> {
684 let fs = cx.read_entity(&this, |this, _| this.fs.clone())?;
685 let expanded = PathBuf::from(shellexpand::tilde(&envelope.payload.path).to_string());
686 let check_info = envelope
687 .payload
688 .config
689 .as_ref()
690 .is_some_and(|config| config.is_dir);
691
692 let mut entries = Vec::new();
693 let mut entry_info = Vec::new();
694 let mut response = fs.read_dir(&expanded).await?;
695 while let Some(path) = response.next().await {
696 let path = path?;
697 if let Some(file_name) = path.file_name() {
698 entries.push(file_name.to_string_lossy().into_owned());
699 if check_info {
700 let is_dir = fs.is_dir(&path).await;
701 entry_info.push(proto::EntryInfo { is_dir });
702 }
703 }
704 }
705 Ok(proto::ListRemoteDirectoryResponse {
706 entries,
707 entry_info,
708 })
709 }
710
711 async fn handle_get_path_metadata(
712 this: Entity<Self>,
713 envelope: TypedEnvelope<proto::GetPathMetadata>,
714 cx: AsyncApp,
715 ) -> Result<proto::GetPathMetadataResponse> {
716 let fs = cx.read_entity(&this, |this, _| this.fs.clone())?;
717 let expanded = PathBuf::from(shellexpand::tilde(&envelope.payload.path).to_string());
718
719 let metadata = fs.metadata(&expanded).await?;
720 let is_dir = metadata.map(|metadata| metadata.is_dir).unwrap_or(false);
721
722 Ok(proto::GetPathMetadataResponse {
723 exists: metadata.is_some(),
724 is_dir,
725 path: expanded.to_string_lossy().into_owned(),
726 })
727 }
728
729 async fn handle_shutdown_remote_server(
730 _this: Entity<Self>,
731 _envelope: TypedEnvelope<proto::ShutdownRemoteServer>,
732 cx: AsyncApp,
733 ) -> Result<proto::Ack> {
734 cx.spawn(async move |cx| {
735 cx.update(|cx| {
736 // TODO: This is a hack, because in a headless project, shutdown isn't executed
737 // when calling quit, but it should be.
738 cx.shutdown();
739 cx.quit();
740 })
741 })
742 .detach();
743
744 Ok(proto::Ack {})
745 }
746
747 pub async fn handle_ping(
748 _this: Entity<Self>,
749 _envelope: TypedEnvelope<proto::Ping>,
750 _cx: AsyncApp,
751 ) -> Result<proto::Ack> {
752 log::debug!("Received ping from client");
753 Ok(proto::Ack {})
754 }
755
756 async fn handle_get_processes(
757 _this: Entity<Self>,
758 _envelope: TypedEnvelope<proto::GetProcesses>,
759 _cx: AsyncApp,
760 ) -> Result<proto::GetProcessesResponse> {
761 let mut processes = Vec::new();
762 let refresh_kind = RefreshKind::nothing().with_processes(
763 ProcessRefreshKind::nothing()
764 .without_tasks()
765 .with_cmd(UpdateKind::Always),
766 );
767
768 for process in System::new_with_specifics(refresh_kind)
769 .processes()
770 .values()
771 {
772 let name = process.name().to_string_lossy().into_owned();
773 let command = process
774 .cmd()
775 .iter()
776 .map(|s| s.to_string_lossy().into_owned())
777 .collect::<Vec<_>>();
778
779 processes.push(proto::ProcessInfo {
780 pid: process.pid().as_u32(),
781 name,
782 command,
783 });
784 }
785
786 processes.sort_by_key(|p| p.name.clone());
787
788 Ok(proto::GetProcessesResponse { processes })
789 }
790
791 async fn handle_get_directory_environment(
792 this: Entity<Self>,
793 envelope: TypedEnvelope<proto::GetDirectoryEnvironment>,
794 mut cx: AsyncApp,
795 ) -> Result<proto::DirectoryEnvironment> {
796 let shell = task::shell_from_proto(envelope.payload.shell.context("missing shell")?)?;
797 let directory = PathBuf::from(envelope.payload.directory);
798 let environment = this
799 .update(&mut cx, |this, cx| {
800 this.environment.update(cx, |environment, cx| {
801 environment.get_local_directory_environment(&shell, directory.into(), cx)
802 })
803 })?
804 .await
805 .context("failed to get directory environment")?
806 .into_iter()
807 .collect();
808 Ok(proto::DirectoryEnvironment { environment })
809 }
810}
811
812fn prompt_to_proto(
813 prompt: &project::LanguageServerPromptRequest,
814) -> proto::language_server_prompt_request::Level {
815 match prompt.level {
816 PromptLevel::Info => proto::language_server_prompt_request::Level::Info(
817 proto::language_server_prompt_request::Info {},
818 ),
819 PromptLevel::Warning => proto::language_server_prompt_request::Level::Warning(
820 proto::language_server_prompt_request::Warning {},
821 ),
822 PromptLevel::Critical => proto::language_server_prompt_request::Level::Critical(
823 proto::language_server_prompt_request::Critical {},
824 ),
825 }
826}