1use anyhow::{Context as _, Result, anyhow};
2use lsp::LanguageServerId;
3
4use extension::ExtensionHostProxy;
5use extension_host::headless_host::HeadlessExtensionStore;
6use fs::Fs;
7use gpui::{App, AppContext as _, AsyncApp, Context, Entity, PromptLevel};
8use http_client::HttpClient;
9use language::{Buffer, BufferEvent, LanguageRegistry, proto::serialize_operation};
10use node_runtime::NodeRuntime;
11use project::{
12 LspStore, LspStoreEvent, ManifestTree, PrettierStore, ProjectEnvironment, ProjectPath,
13 ToolchainStore, WorktreeId,
14 agent_server_store::AgentServerStore,
15 buffer_store::{BufferStore, BufferStoreEvent},
16 debugger::{breakpoint_store::BreakpointStore, dap_store::DapStore},
17 git_store::GitStore,
18 lsp_store::log_store::{self, GlobalLogStore, LanguageServerKind},
19 project_settings::SettingsObserver,
20 search::SearchQuery,
21 task_store::TaskStore,
22 worktree_store::WorktreeStore,
23};
24use rpc::{
25 AnyProtoClient, TypedEnvelope,
26 proto::{self, REMOTE_SERVER_PEER_ID, REMOTE_SERVER_PROJECT_ID},
27};
28
29use settings::{Settings as _, initial_server_settings_content};
30use smol::stream::StreamExt;
31use std::{
32 path::{Path, PathBuf},
33 sync::{Arc, atomic::AtomicUsize},
34};
35use sysinfo::System;
36use util::{ResultExt, paths::PathStyle, rel_path::RelPath};
37use worktree::Worktree;
38
39pub struct HeadlessProject {
40 pub fs: Arc<dyn Fs>,
41 pub session: AnyProtoClient,
42 pub worktree_store: Entity<WorktreeStore>,
43 pub buffer_store: Entity<BufferStore>,
44 pub lsp_store: Entity<LspStore>,
45 pub task_store: Entity<TaskStore>,
46 pub dap_store: Entity<DapStore>,
47 pub agent_server_store: Entity<AgentServerStore>,
48 pub settings_observer: Entity<SettingsObserver>,
49 pub next_entry_id: Arc<AtomicUsize>,
50 pub languages: Arc<LanguageRegistry>,
51 pub extensions: Entity<HeadlessExtensionStore>,
52 pub git_store: Entity<GitStore>,
53 // Used mostly to keep alive the toolchain store for RPC handlers.
54 // Local variant is used within LSP store, but that's a separate entity.
55 pub _toolchain_store: Entity<ToolchainStore>,
56}
57
58pub struct HeadlessAppState {
59 pub session: AnyProtoClient,
60 pub fs: Arc<dyn Fs>,
61 pub http_client: Arc<dyn HttpClient>,
62 pub node_runtime: NodeRuntime,
63 pub languages: Arc<LanguageRegistry>,
64 pub extension_host_proxy: Arc<ExtensionHostProxy>,
65}
66
67impl HeadlessProject {
68 pub fn init(cx: &mut App) {
69 settings::init(cx);
70 language::init(cx);
71 project::Project::init_settings(cx);
72 extension_host::ExtensionSettings::register(cx);
73 log_store::init(true, cx);
74 }
75
76 pub fn new(
77 HeadlessAppState {
78 session,
79 fs,
80 http_client,
81 node_runtime,
82 languages,
83 extension_host_proxy: proxy,
84 }: HeadlessAppState,
85 cx: &mut Context<Self>,
86 ) -> Self {
87 debug_adapter_extension::init(proxy.clone(), cx);
88 languages::init(languages.clone(), fs.clone(), node_runtime.clone(), cx);
89
90 let worktree_store = cx.new(|cx| {
91 let mut store = WorktreeStore::local(true, fs.clone());
92 store.shared(REMOTE_SERVER_PROJECT_ID, session.clone(), cx);
93 store
94 });
95
96 let environment = cx.new(|_| ProjectEnvironment::new(None));
97 let manifest_tree = ManifestTree::new(worktree_store.clone(), cx);
98 let toolchain_store = cx.new(|cx| {
99 ToolchainStore::local(
100 languages.clone(),
101 worktree_store.clone(),
102 environment.clone(),
103 manifest_tree.clone(),
104 cx,
105 )
106 });
107
108 let buffer_store = cx.new(|cx| {
109 let mut buffer_store = BufferStore::local(worktree_store.clone(), cx);
110 buffer_store.shared(REMOTE_SERVER_PROJECT_ID, session.clone(), cx);
111 buffer_store
112 });
113
114 let breakpoint_store =
115 cx.new(|_| BreakpointStore::local(worktree_store.clone(), buffer_store.clone()));
116
117 let dap_store = cx.new(|cx| {
118 let mut dap_store = DapStore::new_local(
119 http_client.clone(),
120 node_runtime.clone(),
121 fs.clone(),
122 environment.clone(),
123 toolchain_store.read(cx).as_language_toolchain_store(),
124 worktree_store.clone(),
125 breakpoint_store.clone(),
126 cx,
127 );
128 dap_store.shared(REMOTE_SERVER_PROJECT_ID, session.clone(), cx);
129 dap_store
130 });
131
132 let git_store = cx.new(|cx| {
133 let mut store = GitStore::local(
134 &worktree_store,
135 buffer_store.clone(),
136 environment.clone(),
137 fs.clone(),
138 cx,
139 );
140 store.shared(REMOTE_SERVER_PROJECT_ID, session.clone(), cx);
141 store
142 });
143
144 let prettier_store = cx.new(|cx| {
145 PrettierStore::new(
146 node_runtime.clone(),
147 fs.clone(),
148 languages.clone(),
149 worktree_store.clone(),
150 cx,
151 )
152 });
153
154 let task_store = cx.new(|cx| {
155 let mut task_store = TaskStore::local(
156 buffer_store.downgrade(),
157 worktree_store.clone(),
158 toolchain_store.read(cx).as_language_toolchain_store(),
159 environment.clone(),
160 cx,
161 );
162 task_store.shared(REMOTE_SERVER_PROJECT_ID, session.clone(), cx);
163 task_store
164 });
165 let settings_observer = cx.new(|cx| {
166 let mut observer = SettingsObserver::new_local(
167 fs.clone(),
168 worktree_store.clone(),
169 task_store.clone(),
170 cx,
171 );
172 observer.shared(REMOTE_SERVER_PROJECT_ID, session.clone(), cx);
173 observer
174 });
175
176 let lsp_store = cx.new(|cx| {
177 let mut lsp_store = LspStore::new_local(
178 buffer_store.clone(),
179 worktree_store.clone(),
180 prettier_store.clone(),
181 toolchain_store
182 .read(cx)
183 .as_local_store()
184 .expect("Toolchain store to be local")
185 .clone(),
186 environment.clone(),
187 manifest_tree,
188 languages.clone(),
189 http_client.clone(),
190 fs.clone(),
191 cx,
192 );
193 lsp_store.shared(REMOTE_SERVER_PROJECT_ID, session.clone(), cx);
194 lsp_store
195 });
196
197 let agent_server_store = cx.new(|cx| {
198 let mut agent_server_store =
199 AgentServerStore::local(node_runtime.clone(), fs.clone(), environment, cx);
200 agent_server_store.shared(REMOTE_SERVER_PROJECT_ID, session.clone(), cx);
201 agent_server_store
202 });
203
204 cx.subscribe(&lsp_store, Self::on_lsp_store_event).detach();
205 language_extension::init(
206 language_extension::LspAccess::ViaLspStore(lsp_store.clone()),
207 proxy.clone(),
208 languages.clone(),
209 );
210
211 cx.subscribe(&buffer_store, |_this, _buffer_store, event, cx| {
212 if let BufferStoreEvent::BufferAdded(buffer) = event {
213 cx.subscribe(buffer, Self::on_buffer_event).detach();
214 }
215 })
216 .detach();
217
218 let extensions = HeadlessExtensionStore::new(
219 fs.clone(),
220 http_client.clone(),
221 paths::remote_extensions_dir().to_path_buf(),
222 proxy,
223 node_runtime,
224 cx,
225 );
226
227 // local_machine -> ssh handlers
228 session.subscribe_to_entity(REMOTE_SERVER_PROJECT_ID, &worktree_store);
229 session.subscribe_to_entity(REMOTE_SERVER_PROJECT_ID, &buffer_store);
230 session.subscribe_to_entity(REMOTE_SERVER_PROJECT_ID, &cx.entity());
231 session.subscribe_to_entity(REMOTE_SERVER_PROJECT_ID, &lsp_store);
232 session.subscribe_to_entity(REMOTE_SERVER_PROJECT_ID, &task_store);
233 session.subscribe_to_entity(REMOTE_SERVER_PROJECT_ID, &toolchain_store);
234 session.subscribe_to_entity(REMOTE_SERVER_PROJECT_ID, &dap_store);
235 session.subscribe_to_entity(REMOTE_SERVER_PROJECT_ID, &settings_observer);
236 session.subscribe_to_entity(REMOTE_SERVER_PROJECT_ID, &git_store);
237 session.subscribe_to_entity(REMOTE_SERVER_PROJECT_ID, &agent_server_store);
238
239 session.add_request_handler(cx.weak_entity(), Self::handle_list_remote_directory);
240 session.add_request_handler(cx.weak_entity(), Self::handle_get_path_metadata);
241 session.add_request_handler(cx.weak_entity(), Self::handle_shutdown_remote_server);
242 session.add_request_handler(cx.weak_entity(), Self::handle_ping);
243 session.add_request_handler(cx.weak_entity(), Self::handle_get_processes);
244
245 session.add_entity_request_handler(Self::handle_add_worktree);
246 session.add_request_handler(cx.weak_entity(), Self::handle_remove_worktree);
247
248 session.add_entity_request_handler(Self::handle_open_buffer_by_path);
249 session.add_entity_request_handler(Self::handle_open_new_buffer);
250 session.add_entity_request_handler(Self::handle_find_search_candidates);
251 session.add_entity_request_handler(Self::handle_open_server_settings);
252 session.add_entity_message_handler(Self::handle_toggle_lsp_logs);
253
254 session.add_entity_request_handler(BufferStore::handle_update_buffer);
255 session.add_entity_message_handler(BufferStore::handle_close_buffer);
256
257 session.add_request_handler(
258 extensions.downgrade(),
259 HeadlessExtensionStore::handle_sync_extensions,
260 );
261 session.add_request_handler(
262 extensions.downgrade(),
263 HeadlessExtensionStore::handle_install_extension,
264 );
265
266 BufferStore::init(&session);
267 WorktreeStore::init(&session);
268 SettingsObserver::init(&session);
269 LspStore::init(&session);
270 TaskStore::init(Some(&session));
271 ToolchainStore::init(&session);
272 DapStore::init(&session, cx);
273 // todo(debugger): Re init breakpoint store when we set it up for collab
274 // BreakpointStore::init(&client);
275 GitStore::init(&session);
276 AgentServerStore::init_headless(&session);
277
278 HeadlessProject {
279 next_entry_id: Default::default(),
280 session,
281 settings_observer,
282 fs,
283 worktree_store,
284 buffer_store,
285 lsp_store,
286 task_store,
287 dap_store,
288 agent_server_store,
289 languages,
290 extensions,
291 git_store,
292 _toolchain_store: toolchain_store,
293 }
294 }
295
296 fn on_buffer_event(
297 &mut self,
298 buffer: Entity<Buffer>,
299 event: &BufferEvent,
300 cx: &mut Context<Self>,
301 ) {
302 if let BufferEvent::Operation {
303 operation,
304 is_local: true,
305 } = event
306 {
307 cx.background_spawn(self.session.request(proto::UpdateBuffer {
308 project_id: REMOTE_SERVER_PROJECT_ID,
309 buffer_id: buffer.read(cx).remote_id().to_proto(),
310 operations: vec![serialize_operation(operation)],
311 }))
312 .detach()
313 }
314 }
315
316 fn on_lsp_store_event(
317 &mut self,
318 lsp_store: Entity<LspStore>,
319 event: &LspStoreEvent,
320 cx: &mut Context<Self>,
321 ) {
322 match event {
323 LspStoreEvent::LanguageServerAdded(id, name, worktree_id) => {
324 let log_store = cx
325 .try_global::<GlobalLogStore>()
326 .map(|lsp_logs| lsp_logs.0.clone());
327 if let Some(log_store) = log_store {
328 log_store.update(cx, |log_store, cx| {
329 log_store.add_language_server(
330 LanguageServerKind::LocalSsh {
331 lsp_store: self.lsp_store.downgrade(),
332 },
333 *id,
334 Some(name.clone()),
335 *worktree_id,
336 lsp_store.read(cx).language_server_for_id(*id),
337 cx,
338 );
339 });
340 }
341 }
342 LspStoreEvent::LanguageServerRemoved(id) => {
343 let log_store = cx
344 .try_global::<GlobalLogStore>()
345 .map(|lsp_logs| lsp_logs.0.clone());
346 if let Some(log_store) = log_store {
347 log_store.update(cx, |log_store, cx| {
348 log_store.remove_language_server(*id, cx);
349 });
350 }
351 }
352 LspStoreEvent::LanguageServerUpdate {
353 language_server_id,
354 name,
355 message,
356 } => {
357 self.session
358 .send(proto::UpdateLanguageServer {
359 project_id: REMOTE_SERVER_PROJECT_ID,
360 server_name: name.as_ref().map(|name| name.to_string()),
361 language_server_id: language_server_id.to_proto(),
362 variant: Some(message.clone()),
363 })
364 .log_err();
365 }
366 LspStoreEvent::Notification(message) => {
367 self.session
368 .send(proto::Toast {
369 project_id: REMOTE_SERVER_PROJECT_ID,
370 notification_id: "lsp".to_string(),
371 message: message.clone(),
372 })
373 .log_err();
374 }
375 LspStoreEvent::LanguageServerPrompt(prompt) => {
376 let request = self.session.request(proto::LanguageServerPromptRequest {
377 project_id: REMOTE_SERVER_PROJECT_ID,
378 actions: prompt
379 .actions
380 .iter()
381 .map(|action| action.title.to_string())
382 .collect(),
383 level: Some(prompt_to_proto(prompt)),
384 lsp_name: prompt.lsp_name.clone(),
385 message: prompt.message.clone(),
386 });
387 let prompt = prompt.clone();
388 cx.background_spawn(async move {
389 let response = request.await?;
390 if let Some(action_response) = response.action_response {
391 prompt.respond(action_response as usize).await;
392 }
393 anyhow::Ok(())
394 })
395 .detach();
396 }
397 _ => {}
398 }
399 }
400
401 pub async fn handle_add_worktree(
402 this: Entity<Self>,
403 message: TypedEnvelope<proto::AddWorktree>,
404 mut cx: AsyncApp,
405 ) -> Result<proto::AddWorktreeResponse> {
406 use client::ErrorCodeExt;
407 let fs = this.read_with(&cx, |this, _| this.fs.clone())?;
408 let path = PathBuf::from(shellexpand::tilde(&message.payload.path).to_string());
409
410 let canonicalized = match fs.canonicalize(&path).await {
411 Ok(path) => path,
412 Err(e) => {
413 let mut parent = path
414 .parent()
415 .ok_or(e)
416 .with_context(|| format!("{path:?} does not exist"))?;
417 if parent == Path::new("") {
418 parent = util::paths::home_dir();
419 }
420 let parent = fs.canonicalize(parent).await.map_err(|_| {
421 anyhow!(
422 proto::ErrorCode::DevServerProjectPathDoesNotExist
423 .with_tag("path", path.to_string_lossy().as_ref())
424 )
425 })?;
426 parent.join(path.file_name().unwrap())
427 }
428 };
429
430 let worktree = this
431 .read_with(&cx.clone(), |this, _| {
432 Worktree::local(
433 Arc::from(canonicalized.as_path()),
434 message.payload.visible,
435 this.fs.clone(),
436 this.next_entry_id.clone(),
437 &mut cx,
438 )
439 })?
440 .await?;
441
442 let response = this.read_with(&cx, |_, cx| {
443 let worktree = worktree.read(cx);
444 proto::AddWorktreeResponse {
445 worktree_id: worktree.id().to_proto(),
446 canonicalized_path: canonicalized.to_string_lossy().into_owned(),
447 }
448 })?;
449
450 // We spawn this asynchronously, so that we can send the response back
451 // *before* `worktree_store.add()` can send out UpdateProject requests
452 // to the client about the new worktree.
453 //
454 // That lets the client manage the reference/handles of the newly-added
455 // worktree, before getting interrupted by an UpdateProject request.
456 //
457 // This fixes the problem of the client sending the AddWorktree request,
458 // headless project sending out a project update, client receiving it
459 // and immediately dropping the reference of the new client, causing it
460 // to be dropped on the headless project, and the client only then
461 // receiving a response to AddWorktree.
462 cx.spawn(async move |cx| {
463 this.update(cx, |this, cx| {
464 this.worktree_store.update(cx, |worktree_store, cx| {
465 worktree_store.add(&worktree, cx);
466 });
467 })
468 .log_err();
469 })
470 .detach();
471
472 Ok(response)
473 }
474
475 pub async fn handle_remove_worktree(
476 this: Entity<Self>,
477 envelope: TypedEnvelope<proto::RemoveWorktree>,
478 mut cx: AsyncApp,
479 ) -> Result<proto::Ack> {
480 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
481 this.update(&mut cx, |this, cx| {
482 this.worktree_store.update(cx, |worktree_store, cx| {
483 worktree_store.remove_worktree(worktree_id, cx);
484 });
485 })?;
486 Ok(proto::Ack {})
487 }
488
489 pub async fn handle_open_buffer_by_path(
490 this: Entity<Self>,
491 message: TypedEnvelope<proto::OpenBufferByPath>,
492 mut cx: AsyncApp,
493 ) -> Result<proto::OpenBufferResponse> {
494 let worktree_id = WorktreeId::from_proto(message.payload.worktree_id);
495 let path = RelPath::from_proto(&message.payload.path)?;
496 let (buffer_store, buffer) = this.update(&mut cx, |this, cx| {
497 let buffer_store = this.buffer_store.clone();
498 let buffer = this.buffer_store.update(cx, |buffer_store, cx| {
499 buffer_store.open_buffer(ProjectPath { worktree_id, path }, cx)
500 });
501 anyhow::Ok((buffer_store, buffer))
502 })??;
503
504 let buffer = buffer.await?;
505 let buffer_id = buffer.read_with(&cx, |b, _| b.remote_id())?;
506 buffer_store.update(&mut cx, |buffer_store, cx| {
507 buffer_store
508 .create_buffer_for_peer(&buffer, REMOTE_SERVER_PEER_ID, cx)
509 .detach_and_log_err(cx);
510 })?;
511
512 Ok(proto::OpenBufferResponse {
513 buffer_id: buffer_id.to_proto(),
514 })
515 }
516
517 pub async fn handle_open_new_buffer(
518 this: Entity<Self>,
519 _message: TypedEnvelope<proto::OpenNewBuffer>,
520 mut cx: AsyncApp,
521 ) -> Result<proto::OpenBufferResponse> {
522 let (buffer_store, buffer) = this.update(&mut cx, |this, cx| {
523 let buffer_store = this.buffer_store.clone();
524 let buffer = this
525 .buffer_store
526 .update(cx, |buffer_store, cx| buffer_store.create_buffer(true, cx));
527 anyhow::Ok((buffer_store, buffer))
528 })??;
529
530 let buffer = buffer.await?;
531 let buffer_id = buffer.read_with(&cx, |b, _| b.remote_id())?;
532 buffer_store.update(&mut cx, |buffer_store, cx| {
533 buffer_store
534 .create_buffer_for_peer(&buffer, REMOTE_SERVER_PEER_ID, cx)
535 .detach_and_log_err(cx);
536 })?;
537
538 Ok(proto::OpenBufferResponse {
539 buffer_id: buffer_id.to_proto(),
540 })
541 }
542
543 async fn handle_toggle_lsp_logs(
544 _: Entity<Self>,
545 envelope: TypedEnvelope<proto::ToggleLspLogs>,
546 mut cx: AsyncApp,
547 ) -> Result<()> {
548 let server_id = LanguageServerId::from_proto(envelope.payload.server_id);
549 let lsp_logs = cx
550 .update(|cx| {
551 cx.try_global::<GlobalLogStore>()
552 .map(|lsp_logs| lsp_logs.0.clone())
553 })?
554 .context("lsp logs store is missing")?;
555
556 lsp_logs.update(&mut cx, |lsp_logs, _| {
557 // RPC logs are very noisy and we need to toggle it on the headless server too.
558 // The rest of the logs for the ssh project are very important to have toggled always,
559 // to e.g. send language server error logs to the client before anything is toggled.
560 if envelope.payload.enabled {
561 lsp_logs.enable_rpc_trace_for_language_server(server_id);
562 } else {
563 lsp_logs.disable_rpc_trace_for_language_server(server_id);
564 }
565 })?;
566 Ok(())
567 }
568
569 async fn handle_open_server_settings(
570 this: Entity<Self>,
571 _: TypedEnvelope<proto::OpenServerSettings>,
572 mut cx: AsyncApp,
573 ) -> Result<proto::OpenBufferResponse> {
574 let settings_path = paths::settings_file();
575 let (worktree, path) = this
576 .update(&mut cx, |this, cx| {
577 this.worktree_store.update(cx, |worktree_store, cx| {
578 worktree_store.find_or_create_worktree(settings_path, false, cx)
579 })
580 })?
581 .await?;
582
583 let (buffer, buffer_store) = this.update(&mut cx, |this, cx| {
584 let buffer = this.buffer_store.update(cx, |buffer_store, cx| {
585 buffer_store.open_buffer(
586 ProjectPath {
587 worktree_id: worktree.read(cx).id(),
588 path: path,
589 },
590 cx,
591 )
592 });
593
594 (buffer, this.buffer_store.clone())
595 })?;
596
597 let buffer = buffer.await?;
598
599 let buffer_id = cx.update(|cx| {
600 if buffer.read(cx).is_empty() {
601 buffer.update(cx, |buffer, cx| {
602 buffer.edit([(0..0, initial_server_settings_content())], None, cx)
603 });
604 }
605
606 let buffer_id = buffer.read(cx).remote_id();
607
608 buffer_store.update(cx, |buffer_store, cx| {
609 buffer_store
610 .create_buffer_for_peer(&buffer, REMOTE_SERVER_PEER_ID, cx)
611 .detach_and_log_err(cx);
612 });
613
614 buffer_id
615 })?;
616
617 Ok(proto::OpenBufferResponse {
618 buffer_id: buffer_id.to_proto(),
619 })
620 }
621
622 async fn handle_find_search_candidates(
623 this: Entity<Self>,
624 envelope: TypedEnvelope<proto::FindSearchCandidates>,
625 mut cx: AsyncApp,
626 ) -> Result<proto::FindSearchCandidatesResponse> {
627 let message = envelope.payload;
628 let query = SearchQuery::from_proto(
629 message.query.context("missing query field")?,
630 PathStyle::local(),
631 )?;
632 let results = this.update(&mut cx, |this, cx| {
633 this.buffer_store.update(cx, |buffer_store, cx| {
634 buffer_store.find_search_candidates(&query, message.limit as _, this.fs.clone(), cx)
635 })
636 })?;
637
638 let mut response = proto::FindSearchCandidatesResponse {
639 buffer_ids: Vec::new(),
640 };
641
642 let buffer_store = this.read_with(&cx, |this, _| this.buffer_store.clone())?;
643
644 while let Ok(buffer) = results.recv().await {
645 let buffer_id = buffer.read_with(&cx, |this, _| this.remote_id())?;
646 response.buffer_ids.push(buffer_id.to_proto());
647 buffer_store
648 .update(&mut cx, |buffer_store, cx| {
649 buffer_store.create_buffer_for_peer(&buffer, REMOTE_SERVER_PEER_ID, cx)
650 })?
651 .await?;
652 }
653
654 Ok(response)
655 }
656
657 async fn handle_list_remote_directory(
658 this: Entity<Self>,
659 envelope: TypedEnvelope<proto::ListRemoteDirectory>,
660 cx: AsyncApp,
661 ) -> Result<proto::ListRemoteDirectoryResponse> {
662 let fs = cx.read_entity(&this, |this, _| this.fs.clone())?;
663 let expanded = PathBuf::from(shellexpand::tilde(&envelope.payload.path).to_string());
664 let check_info = envelope
665 .payload
666 .config
667 .as_ref()
668 .is_some_and(|config| config.is_dir);
669
670 let mut entries = Vec::new();
671 let mut entry_info = Vec::new();
672 let mut response = fs.read_dir(&expanded).await?;
673 while let Some(path) = response.next().await {
674 let path = path?;
675 if let Some(file_name) = path.file_name() {
676 entries.push(file_name.to_string_lossy().into_owned());
677 if check_info {
678 let is_dir = fs.is_dir(&path).await;
679 entry_info.push(proto::EntryInfo { is_dir });
680 }
681 }
682 }
683 Ok(proto::ListRemoteDirectoryResponse {
684 entries,
685 entry_info,
686 })
687 }
688
689 async fn handle_get_path_metadata(
690 this: Entity<Self>,
691 envelope: TypedEnvelope<proto::GetPathMetadata>,
692 cx: AsyncApp,
693 ) -> Result<proto::GetPathMetadataResponse> {
694 let fs = cx.read_entity(&this, |this, _| this.fs.clone())?;
695 let expanded = PathBuf::from(shellexpand::tilde(&envelope.payload.path).to_string());
696
697 let metadata = fs.metadata(&expanded).await?;
698 let is_dir = metadata.map(|metadata| metadata.is_dir).unwrap_or(false);
699
700 Ok(proto::GetPathMetadataResponse {
701 exists: metadata.is_some(),
702 is_dir,
703 path: expanded.to_string_lossy().into_owned(),
704 })
705 }
706
707 async fn handle_shutdown_remote_server(
708 _this: Entity<Self>,
709 _envelope: TypedEnvelope<proto::ShutdownRemoteServer>,
710 cx: AsyncApp,
711 ) -> Result<proto::Ack> {
712 cx.spawn(async move |cx| {
713 cx.update(|cx| {
714 // TODO: This is a hack, because in a headless project, shutdown isn't executed
715 // when calling quit, but it should be.
716 cx.shutdown();
717 cx.quit();
718 })
719 })
720 .detach();
721
722 Ok(proto::Ack {})
723 }
724
725 pub async fn handle_ping(
726 _this: Entity<Self>,
727 _envelope: TypedEnvelope<proto::Ping>,
728 _cx: AsyncApp,
729 ) -> Result<proto::Ack> {
730 log::debug!("Received ping from client");
731 Ok(proto::Ack {})
732 }
733
734 async fn handle_get_processes(
735 _this: Entity<Self>,
736 _envelope: TypedEnvelope<proto::GetProcesses>,
737 _cx: AsyncApp,
738 ) -> Result<proto::GetProcessesResponse> {
739 let mut processes = Vec::new();
740 let system = System::new_all();
741
742 for (_pid, process) in system.processes() {
743 let name = process.name().to_string_lossy().into_owned();
744 let command = process
745 .cmd()
746 .iter()
747 .map(|s| s.to_string_lossy().into_owned())
748 .collect::<Vec<_>>();
749
750 processes.push(proto::ProcessInfo {
751 pid: process.pid().as_u32(),
752 name,
753 command,
754 });
755 }
756
757 processes.sort_by_key(|p| p.name.clone());
758
759 Ok(proto::GetProcessesResponse { processes })
760 }
761}
762
763fn prompt_to_proto(
764 prompt: &project::LanguageServerPromptRequest,
765) -> proto::language_server_prompt_request::Level {
766 match prompt.level {
767 PromptLevel::Info => proto::language_server_prompt_request::Level::Info(
768 proto::language_server_prompt_request::Info {},
769 ),
770 PromptLevel::Warning => proto::language_server_prompt_request::Level::Warning(
771 proto::language_server_prompt_request::Warning {},
772 ),
773 PromptLevel::Critical => proto::language_server_prompt_request::Level::Critical(
774 proto::language_server_prompt_request::Critical {},
775 ),
776 }
777}