1use ::proto::{FromProto, ToProto};
2use anyhow::{Context as _, Result, anyhow};
3use lsp::LanguageServerId;
4
5use extension::ExtensionHostProxy;
6use extension_host::headless_host::HeadlessExtensionStore;
7use fs::Fs;
8use gpui::{App, AppContext as _, AsyncApp, Context, Entity, PromptLevel};
9use http_client::HttpClient;
10use language::{Buffer, BufferEvent, LanguageRegistry, proto::serialize_operation};
11use node_runtime::NodeRuntime;
12use project::{
13 LspStore, LspStoreEvent, ManifestTree, PrettierStore, ProjectEnvironment, ProjectPath,
14 ToolchainStore, WorktreeId,
15 agent_server_store::AgentServerStore,
16 buffer_store::{BufferStore, BufferStoreEvent},
17 debugger::{breakpoint_store::BreakpointStore, dap_store::DapStore},
18 git_store::GitStore,
19 lsp_store::log_store::{self, GlobalLogStore, LanguageServerKind},
20 project_settings::SettingsObserver,
21 search::SearchQuery,
22 task_store::TaskStore,
23 worktree_store::WorktreeStore,
24};
25use rpc::{
26 AnyProtoClient, TypedEnvelope,
27 proto::{self, REMOTE_SERVER_PEER_ID, REMOTE_SERVER_PROJECT_ID},
28};
29
30use settings::initial_server_settings_content;
31use smol::stream::StreamExt;
32use std::{
33 path::{Path, PathBuf},
34 sync::{Arc, atomic::AtomicUsize},
35};
36use sysinfo::System;
37use util::ResultExt;
38use worktree::Worktree;
39
40pub struct HeadlessProject {
41 pub fs: Arc<dyn Fs>,
42 pub session: AnyProtoClient,
43 pub worktree_store: Entity<WorktreeStore>,
44 pub buffer_store: Entity<BufferStore>,
45 pub lsp_store: Entity<LspStore>,
46 pub task_store: Entity<TaskStore>,
47 pub dap_store: Entity<DapStore>,
48 pub agent_server_store: Entity<AgentServerStore>,
49 pub settings_observer: Entity<SettingsObserver>,
50 pub next_entry_id: Arc<AtomicUsize>,
51 pub languages: Arc<LanguageRegistry>,
52 pub extensions: Entity<HeadlessExtensionStore>,
53 pub git_store: Entity<GitStore>,
54 // Used mostly to keep alive the toolchain store for RPC handlers.
55 // Local variant is used within LSP store, but that's a separate entity.
56 pub _toolchain_store: Entity<ToolchainStore>,
57}
58
59pub struct HeadlessAppState {
60 pub session: AnyProtoClient,
61 pub fs: Arc<dyn Fs>,
62 pub http_client: Arc<dyn HttpClient>,
63 pub node_runtime: NodeRuntime,
64 pub languages: Arc<LanguageRegistry>,
65 pub extension_host_proxy: Arc<ExtensionHostProxy>,
66}
67
68impl HeadlessProject {
69 pub fn init(cx: &mut App) {
70 settings::init(cx);
71 language::init(cx);
72 project::Project::init_settings(cx);
73 log_store::init(true, cx);
74 }
75
76 pub fn new(
77 HeadlessAppState {
78 session,
79 fs,
80 http_client,
81 node_runtime,
82 languages,
83 extension_host_proxy: proxy,
84 }: HeadlessAppState,
85 cx: &mut Context<Self>,
86 ) -> Self {
87 debug_adapter_extension::init(proxy.clone(), cx);
88 languages::init(languages.clone(), fs.clone(), node_runtime.clone(), cx);
89
90 let worktree_store = cx.new(|cx| {
91 let mut store = WorktreeStore::local(true, fs.clone());
92 store.shared(REMOTE_SERVER_PROJECT_ID, session.clone(), cx);
93 store
94 });
95
96 let environment = cx.new(|_| ProjectEnvironment::new(None));
97 let manifest_tree = ManifestTree::new(worktree_store.clone(), cx);
98 let toolchain_store = cx.new(|cx| {
99 ToolchainStore::local(
100 languages.clone(),
101 worktree_store.clone(),
102 environment.clone(),
103 manifest_tree.clone(),
104 cx,
105 )
106 });
107
108 let buffer_store = cx.new(|cx| {
109 let mut buffer_store = BufferStore::local(worktree_store.clone(), cx);
110 buffer_store.shared(REMOTE_SERVER_PROJECT_ID, session.clone(), cx);
111 buffer_store
112 });
113
114 let breakpoint_store =
115 cx.new(|_| BreakpointStore::local(worktree_store.clone(), buffer_store.clone()));
116
117 let dap_store = cx.new(|cx| {
118 let mut dap_store = DapStore::new_local(
119 http_client.clone(),
120 node_runtime.clone(),
121 fs.clone(),
122 environment.clone(),
123 toolchain_store.read(cx).as_language_toolchain_store(),
124 worktree_store.clone(),
125 breakpoint_store.clone(),
126 cx,
127 );
128 dap_store.shared(REMOTE_SERVER_PROJECT_ID, session.clone(), cx);
129 dap_store
130 });
131
132 let git_store = cx.new(|cx| {
133 let mut store = GitStore::local(
134 &worktree_store,
135 buffer_store.clone(),
136 environment.clone(),
137 fs.clone(),
138 cx,
139 );
140 store.shared(REMOTE_SERVER_PROJECT_ID, session.clone(), cx);
141 store
142 });
143
144 let prettier_store = cx.new(|cx| {
145 PrettierStore::new(
146 node_runtime.clone(),
147 fs.clone(),
148 languages.clone(),
149 worktree_store.clone(),
150 cx,
151 )
152 });
153
154 let task_store = cx.new(|cx| {
155 let mut task_store = TaskStore::local(
156 buffer_store.downgrade(),
157 worktree_store.clone(),
158 toolchain_store.read(cx).as_language_toolchain_store(),
159 environment.clone(),
160 cx,
161 );
162 task_store.shared(REMOTE_SERVER_PROJECT_ID, session.clone(), cx);
163 task_store
164 });
165 let settings_observer = cx.new(|cx| {
166 let mut observer = SettingsObserver::new_local(
167 fs.clone(),
168 worktree_store.clone(),
169 task_store.clone(),
170 cx,
171 );
172 observer.shared(REMOTE_SERVER_PROJECT_ID, session.clone(), cx);
173 observer
174 });
175
176 let lsp_store = cx.new(|cx| {
177 let mut lsp_store = LspStore::new_local(
178 buffer_store.clone(),
179 worktree_store.clone(),
180 prettier_store.clone(),
181 toolchain_store
182 .read(cx)
183 .as_local_store()
184 .expect("Toolchain store to be local")
185 .clone(),
186 environment.clone(),
187 manifest_tree,
188 languages.clone(),
189 http_client.clone(),
190 fs.clone(),
191 cx,
192 );
193 lsp_store.shared(REMOTE_SERVER_PROJECT_ID, session.clone(), cx);
194 lsp_store
195 });
196
197 let agent_server_store = cx.new(|cx| {
198 let mut agent_server_store =
199 AgentServerStore::local(node_runtime.clone(), fs.clone(), environment, cx);
200 agent_server_store.shared(REMOTE_SERVER_PROJECT_ID, session.clone(), cx);
201 agent_server_store
202 });
203
204 cx.subscribe(&lsp_store, Self::on_lsp_store_event).detach();
205 language_extension::init(
206 language_extension::LspAccess::ViaLspStore(lsp_store.clone()),
207 proxy.clone(),
208 languages.clone(),
209 );
210
211 cx.subscribe(&buffer_store, |_this, _buffer_store, event, cx| {
212 if let BufferStoreEvent::BufferAdded(buffer) = event {
213 cx.subscribe(buffer, Self::on_buffer_event).detach();
214 }
215 })
216 .detach();
217
218 let extensions = HeadlessExtensionStore::new(
219 fs.clone(),
220 http_client.clone(),
221 paths::remote_extensions_dir().to_path_buf(),
222 proxy,
223 node_runtime,
224 cx,
225 );
226
227 // local_machine -> ssh handlers
228 session.subscribe_to_entity(REMOTE_SERVER_PROJECT_ID, &worktree_store);
229 session.subscribe_to_entity(REMOTE_SERVER_PROJECT_ID, &buffer_store);
230 session.subscribe_to_entity(REMOTE_SERVER_PROJECT_ID, &cx.entity());
231 session.subscribe_to_entity(REMOTE_SERVER_PROJECT_ID, &lsp_store);
232 session.subscribe_to_entity(REMOTE_SERVER_PROJECT_ID, &task_store);
233 session.subscribe_to_entity(REMOTE_SERVER_PROJECT_ID, &toolchain_store);
234 session.subscribe_to_entity(REMOTE_SERVER_PROJECT_ID, &dap_store);
235 session.subscribe_to_entity(REMOTE_SERVER_PROJECT_ID, &settings_observer);
236 session.subscribe_to_entity(REMOTE_SERVER_PROJECT_ID, &git_store);
237 session.subscribe_to_entity(REMOTE_SERVER_PROJECT_ID, &agent_server_store);
238
239 session.add_request_handler(cx.weak_entity(), Self::handle_list_remote_directory);
240 session.add_request_handler(cx.weak_entity(), Self::handle_get_path_metadata);
241 session.add_request_handler(cx.weak_entity(), Self::handle_shutdown_remote_server);
242 session.add_request_handler(cx.weak_entity(), Self::handle_ping);
243 session.add_request_handler(cx.weak_entity(), Self::handle_get_processes);
244
245 session.add_entity_request_handler(Self::handle_add_worktree);
246 session.add_request_handler(cx.weak_entity(), Self::handle_remove_worktree);
247
248 session.add_entity_request_handler(Self::handle_open_buffer_by_path);
249 session.add_entity_request_handler(Self::handle_open_new_buffer);
250 session.add_entity_request_handler(Self::handle_find_search_candidates);
251 session.add_entity_request_handler(Self::handle_open_server_settings);
252 session.add_entity_message_handler(Self::handle_toggle_lsp_logs);
253
254 session.add_entity_request_handler(BufferStore::handle_update_buffer);
255 session.add_entity_message_handler(BufferStore::handle_close_buffer);
256
257 session.add_request_handler(
258 extensions.downgrade(),
259 HeadlessExtensionStore::handle_sync_extensions,
260 );
261 session.add_request_handler(
262 extensions.downgrade(),
263 HeadlessExtensionStore::handle_install_extension,
264 );
265
266 BufferStore::init(&session);
267 WorktreeStore::init(&session);
268 SettingsObserver::init(&session);
269 LspStore::init(&session);
270 TaskStore::init(Some(&session));
271 ToolchainStore::init(&session);
272 DapStore::init(&session, cx);
273 // todo(debugger): Re init breakpoint store when we set it up for collab
274 // BreakpointStore::init(&client);
275 GitStore::init(&session);
276 AgentServerStore::init_headless(&session);
277
278 HeadlessProject {
279 next_entry_id: Default::default(),
280 session,
281 settings_observer,
282 fs,
283 worktree_store,
284 buffer_store,
285 lsp_store,
286 task_store,
287 dap_store,
288 agent_server_store,
289 languages,
290 extensions,
291 git_store,
292 _toolchain_store: toolchain_store,
293 }
294 }
295
296 fn on_buffer_event(
297 &mut self,
298 buffer: Entity<Buffer>,
299 event: &BufferEvent,
300 cx: &mut Context<Self>,
301 ) {
302 if let BufferEvent::Operation {
303 operation,
304 is_local: true,
305 } = event
306 {
307 cx.background_spawn(self.session.request(proto::UpdateBuffer {
308 project_id: REMOTE_SERVER_PROJECT_ID,
309 buffer_id: buffer.read(cx).remote_id().to_proto(),
310 operations: vec![serialize_operation(operation)],
311 }))
312 .detach()
313 }
314 }
315
316 fn on_lsp_store_event(
317 &mut self,
318 lsp_store: Entity<LspStore>,
319 event: &LspStoreEvent,
320 cx: &mut Context<Self>,
321 ) {
322 match event {
323 LspStoreEvent::LanguageServerAdded(id, name, worktree_id) => {
324 let log_store = cx
325 .try_global::<GlobalLogStore>()
326 .map(|lsp_logs| lsp_logs.0.clone());
327 if let Some(log_store) = log_store {
328 log_store.update(cx, |log_store, cx| {
329 log_store.add_language_server(
330 LanguageServerKind::LocalSsh {
331 lsp_store: self.lsp_store.downgrade(),
332 },
333 *id,
334 Some(name.clone()),
335 *worktree_id,
336 lsp_store.read(cx).language_server_for_id(*id),
337 cx,
338 );
339 });
340 }
341 }
342 LspStoreEvent::LanguageServerRemoved(id) => {
343 let log_store = cx
344 .try_global::<GlobalLogStore>()
345 .map(|lsp_logs| lsp_logs.0.clone());
346 if let Some(log_store) = log_store {
347 log_store.update(cx, |log_store, cx| {
348 log_store.remove_language_server(*id, cx);
349 });
350 }
351 }
352 LspStoreEvent::LanguageServerUpdate {
353 language_server_id,
354 name,
355 message,
356 } => {
357 self.session
358 .send(proto::UpdateLanguageServer {
359 project_id: REMOTE_SERVER_PROJECT_ID,
360 server_name: name.as_ref().map(|name| name.to_string()),
361 language_server_id: language_server_id.to_proto(),
362 variant: Some(message.clone()),
363 })
364 .log_err();
365 }
366 LspStoreEvent::Notification(message) => {
367 self.session
368 .send(proto::Toast {
369 project_id: REMOTE_SERVER_PROJECT_ID,
370 notification_id: "lsp".to_string(),
371 message: message.clone(),
372 })
373 .log_err();
374 }
375 LspStoreEvent::LanguageServerPrompt(prompt) => {
376 let request = self.session.request(proto::LanguageServerPromptRequest {
377 project_id: REMOTE_SERVER_PROJECT_ID,
378 actions: prompt
379 .actions
380 .iter()
381 .map(|action| action.title.to_string())
382 .collect(),
383 level: Some(prompt_to_proto(prompt)),
384 lsp_name: prompt.lsp_name.clone(),
385 message: prompt.message.clone(),
386 });
387 let prompt = prompt.clone();
388 cx.background_spawn(async move {
389 let response = request.await?;
390 if let Some(action_response) = response.action_response {
391 prompt.respond(action_response as usize).await;
392 }
393 anyhow::Ok(())
394 })
395 .detach();
396 }
397 _ => {}
398 }
399 }
400
401 pub async fn handle_add_worktree(
402 this: Entity<Self>,
403 message: TypedEnvelope<proto::AddWorktree>,
404 mut cx: AsyncApp,
405 ) -> Result<proto::AddWorktreeResponse> {
406 use client::ErrorCodeExt;
407 let fs = this.read_with(&cx, |this, _| this.fs.clone())?;
408 let path = PathBuf::from_proto(shellexpand::tilde(&message.payload.path).to_string());
409
410 let canonicalized = match fs.canonicalize(&path).await {
411 Ok(path) => path,
412 Err(e) => {
413 let mut parent = path
414 .parent()
415 .ok_or(e)
416 .with_context(|| format!("{path:?} does not exist"))?;
417 if parent == Path::new("") {
418 parent = util::paths::home_dir();
419 }
420 let parent = fs.canonicalize(parent).await.map_err(|_| {
421 anyhow!(
422 proto::ErrorCode::DevServerProjectPathDoesNotExist
423 .with_tag("path", path.to_string_lossy().as_ref())
424 )
425 })?;
426 parent.join(path.file_name().unwrap())
427 }
428 };
429
430 let worktree = this
431 .read_with(&cx.clone(), |this, _| {
432 Worktree::local(
433 Arc::from(canonicalized.as_path()),
434 message.payload.visible,
435 this.fs.clone(),
436 this.next_entry_id.clone(),
437 &mut cx,
438 )
439 })?
440 .await?;
441
442 let response = this.read_with(&cx, |_, cx| {
443 let worktree = worktree.read(cx);
444 proto::AddWorktreeResponse {
445 worktree_id: worktree.id().to_proto(),
446 canonicalized_path: canonicalized.to_proto(),
447 }
448 })?;
449
450 // We spawn this asynchronously, so that we can send the response back
451 // *before* `worktree_store.add()` can send out UpdateProject requests
452 // to the client about the new worktree.
453 //
454 // That lets the client manage the reference/handles of the newly-added
455 // worktree, before getting interrupted by an UpdateProject request.
456 //
457 // This fixes the problem of the client sending the AddWorktree request,
458 // headless project sending out a project update, client receiving it
459 // and immediately dropping the reference of the new client, causing it
460 // to be dropped on the headless project, and the client only then
461 // receiving a response to AddWorktree.
462 cx.spawn(async move |cx| {
463 this.update(cx, |this, cx| {
464 this.worktree_store.update(cx, |worktree_store, cx| {
465 worktree_store.add(&worktree, cx);
466 });
467 })
468 .log_err();
469 })
470 .detach();
471
472 Ok(response)
473 }
474
475 pub async fn handle_remove_worktree(
476 this: Entity<Self>,
477 envelope: TypedEnvelope<proto::RemoveWorktree>,
478 mut cx: AsyncApp,
479 ) -> Result<proto::Ack> {
480 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
481 this.update(&mut cx, |this, cx| {
482 this.worktree_store.update(cx, |worktree_store, cx| {
483 worktree_store.remove_worktree(worktree_id, cx);
484 });
485 })?;
486 Ok(proto::Ack {})
487 }
488
489 pub async fn handle_open_buffer_by_path(
490 this: Entity<Self>,
491 message: TypedEnvelope<proto::OpenBufferByPath>,
492 mut cx: AsyncApp,
493 ) -> Result<proto::OpenBufferResponse> {
494 let worktree_id = WorktreeId::from_proto(message.payload.worktree_id);
495 let (buffer_store, buffer) = this.update(&mut cx, |this, cx| {
496 let buffer_store = this.buffer_store.clone();
497 let buffer = this.buffer_store.update(cx, |buffer_store, cx| {
498 buffer_store.open_buffer(
499 ProjectPath {
500 worktree_id,
501 path: Arc::<Path>::from_proto(message.payload.path),
502 },
503 cx,
504 )
505 });
506 anyhow::Ok((buffer_store, buffer))
507 })??;
508
509 let buffer = buffer.await?;
510 let buffer_id = buffer.read_with(&cx, |b, _| b.remote_id())?;
511 buffer_store.update(&mut cx, |buffer_store, cx| {
512 buffer_store
513 .create_buffer_for_peer(&buffer, REMOTE_SERVER_PEER_ID, cx)
514 .detach_and_log_err(cx);
515 })?;
516
517 Ok(proto::OpenBufferResponse {
518 buffer_id: buffer_id.to_proto(),
519 })
520 }
521
522 pub async fn handle_open_new_buffer(
523 this: Entity<Self>,
524 _message: TypedEnvelope<proto::OpenNewBuffer>,
525 mut cx: AsyncApp,
526 ) -> Result<proto::OpenBufferResponse> {
527 let (buffer_store, buffer) = this.update(&mut cx, |this, cx| {
528 let buffer_store = this.buffer_store.clone();
529 let buffer = this
530 .buffer_store
531 .update(cx, |buffer_store, cx| buffer_store.create_buffer(true, cx));
532 anyhow::Ok((buffer_store, buffer))
533 })??;
534
535 let buffer = buffer.await?;
536 let buffer_id = buffer.read_with(&cx, |b, _| b.remote_id())?;
537 buffer_store.update(&mut cx, |buffer_store, cx| {
538 buffer_store
539 .create_buffer_for_peer(&buffer, REMOTE_SERVER_PEER_ID, cx)
540 .detach_and_log_err(cx);
541 })?;
542
543 Ok(proto::OpenBufferResponse {
544 buffer_id: buffer_id.to_proto(),
545 })
546 }
547
548 async fn handle_toggle_lsp_logs(
549 _: Entity<Self>,
550 envelope: TypedEnvelope<proto::ToggleLspLogs>,
551 mut cx: AsyncApp,
552 ) -> Result<()> {
553 let server_id = LanguageServerId::from_proto(envelope.payload.server_id);
554 let lsp_logs = cx
555 .update(|cx| {
556 cx.try_global::<GlobalLogStore>()
557 .map(|lsp_logs| lsp_logs.0.clone())
558 })?
559 .context("lsp logs store is missing")?;
560
561 lsp_logs.update(&mut cx, |lsp_logs, _| {
562 // RPC logs are very noisy and we need to toggle it on the headless server too.
563 // The rest of the logs for the ssh project are very important to have toggled always,
564 // to e.g. send language server error logs to the client before anything is toggled.
565 if envelope.payload.enabled {
566 lsp_logs.enable_rpc_trace_for_language_server(server_id);
567 } else {
568 lsp_logs.disable_rpc_trace_for_language_server(server_id);
569 }
570 })?;
571 Ok(())
572 }
573
574 async fn handle_open_server_settings(
575 this: Entity<Self>,
576 _: TypedEnvelope<proto::OpenServerSettings>,
577 mut cx: AsyncApp,
578 ) -> Result<proto::OpenBufferResponse> {
579 let settings_path = paths::settings_file();
580 let (worktree, path) = this
581 .update(&mut cx, |this, cx| {
582 this.worktree_store.update(cx, |worktree_store, cx| {
583 worktree_store.find_or_create_worktree(settings_path, false, cx)
584 })
585 })?
586 .await?;
587
588 let (buffer, buffer_store) = this.update(&mut cx, |this, cx| {
589 let buffer = this.buffer_store.update(cx, |buffer_store, cx| {
590 buffer_store.open_buffer(
591 ProjectPath {
592 worktree_id: worktree.read(cx).id(),
593 path: path.into(),
594 },
595 cx,
596 )
597 });
598
599 (buffer, this.buffer_store.clone())
600 })?;
601
602 let buffer = buffer.await?;
603
604 let buffer_id = cx.update(|cx| {
605 if buffer.read(cx).is_empty() {
606 buffer.update(cx, |buffer, cx| {
607 buffer.edit([(0..0, initial_server_settings_content())], None, cx)
608 });
609 }
610
611 let buffer_id = buffer.read(cx).remote_id();
612
613 buffer_store.update(cx, |buffer_store, cx| {
614 buffer_store
615 .create_buffer_for_peer(&buffer, REMOTE_SERVER_PEER_ID, cx)
616 .detach_and_log_err(cx);
617 });
618
619 buffer_id
620 })?;
621
622 Ok(proto::OpenBufferResponse {
623 buffer_id: buffer_id.to_proto(),
624 })
625 }
626
627 async fn handle_find_search_candidates(
628 this: Entity<Self>,
629 envelope: TypedEnvelope<proto::FindSearchCandidates>,
630 mut cx: AsyncApp,
631 ) -> Result<proto::FindSearchCandidatesResponse> {
632 let message = envelope.payload;
633 let query = SearchQuery::from_proto(message.query.context("missing query field")?)?;
634 let results = this.update(&mut cx, |this, cx| {
635 this.buffer_store.update(cx, |buffer_store, cx| {
636 buffer_store.find_search_candidates(&query, message.limit as _, this.fs.clone(), cx)
637 })
638 })?;
639
640 let mut response = proto::FindSearchCandidatesResponse {
641 buffer_ids: Vec::new(),
642 };
643
644 let buffer_store = this.read_with(&cx, |this, _| this.buffer_store.clone())?;
645
646 while let Ok(buffer) = results.recv().await {
647 let buffer_id = buffer.read_with(&cx, |this, _| this.remote_id())?;
648 response.buffer_ids.push(buffer_id.to_proto());
649 buffer_store
650 .update(&mut cx, |buffer_store, cx| {
651 buffer_store.create_buffer_for_peer(&buffer, REMOTE_SERVER_PEER_ID, cx)
652 })?
653 .await?;
654 }
655
656 Ok(response)
657 }
658
659 async fn handle_list_remote_directory(
660 this: Entity<Self>,
661 envelope: TypedEnvelope<proto::ListRemoteDirectory>,
662 cx: AsyncApp,
663 ) -> Result<proto::ListRemoteDirectoryResponse> {
664 let fs = cx.read_entity(&this, |this, _| this.fs.clone())?;
665 let expanded = PathBuf::from_proto(shellexpand::tilde(&envelope.payload.path).to_string());
666 let check_info = envelope
667 .payload
668 .config
669 .as_ref()
670 .is_some_and(|config| config.is_dir);
671
672 let mut entries = Vec::new();
673 let mut entry_info = Vec::new();
674 let mut response = fs.read_dir(&expanded).await?;
675 while let Some(path) = response.next().await {
676 let path = path?;
677 if let Some(file_name) = path.file_name() {
678 entries.push(file_name.to_string_lossy().to_string());
679 if check_info {
680 let is_dir = fs.is_dir(&path).await;
681 entry_info.push(proto::EntryInfo { is_dir });
682 }
683 }
684 }
685 Ok(proto::ListRemoteDirectoryResponse {
686 entries,
687 entry_info,
688 })
689 }
690
691 async fn handle_get_path_metadata(
692 this: Entity<Self>,
693 envelope: TypedEnvelope<proto::GetPathMetadata>,
694 cx: AsyncApp,
695 ) -> Result<proto::GetPathMetadataResponse> {
696 let fs = cx.read_entity(&this, |this, _| this.fs.clone())?;
697 let expanded = PathBuf::from_proto(shellexpand::tilde(&envelope.payload.path).to_string());
698
699 let metadata = fs.metadata(&expanded).await?;
700 let is_dir = metadata.map(|metadata| metadata.is_dir).unwrap_or(false);
701
702 Ok(proto::GetPathMetadataResponse {
703 exists: metadata.is_some(),
704 is_dir,
705 path: expanded.to_proto(),
706 })
707 }
708
709 async fn handle_shutdown_remote_server(
710 _this: Entity<Self>,
711 _envelope: TypedEnvelope<proto::ShutdownRemoteServer>,
712 cx: AsyncApp,
713 ) -> Result<proto::Ack> {
714 cx.spawn(async move |cx| {
715 cx.update(|cx| {
716 // TODO: This is a hack, because in a headless project, shutdown isn't executed
717 // when calling quit, but it should be.
718 cx.shutdown();
719 cx.quit();
720 })
721 })
722 .detach();
723
724 Ok(proto::Ack {})
725 }
726
727 pub async fn handle_ping(
728 _this: Entity<Self>,
729 _envelope: TypedEnvelope<proto::Ping>,
730 _cx: AsyncApp,
731 ) -> Result<proto::Ack> {
732 log::debug!("Received ping from client");
733 Ok(proto::Ack {})
734 }
735
736 async fn handle_get_processes(
737 _this: Entity<Self>,
738 _envelope: TypedEnvelope<proto::GetProcesses>,
739 _cx: AsyncApp,
740 ) -> Result<proto::GetProcessesResponse> {
741 let mut processes = Vec::new();
742 let system = System::new_all();
743
744 for (_pid, process) in system.processes() {
745 let name = process.name().to_string_lossy().into_owned();
746 let command = process
747 .cmd()
748 .iter()
749 .map(|s| s.to_string_lossy().to_string())
750 .collect::<Vec<_>>();
751
752 processes.push(proto::ProcessInfo {
753 pid: process.pid().as_u32(),
754 name,
755 command,
756 });
757 }
758
759 processes.sort_by_key(|p| p.name.clone());
760
761 Ok(proto::GetProcessesResponse { processes })
762 }
763}
764
765fn prompt_to_proto(
766 prompt: &project::LanguageServerPromptRequest,
767) -> proto::language_server_prompt_request::Level {
768 match prompt.level {
769 PromptLevel::Info => proto::language_server_prompt_request::Level::Info(
770 proto::language_server_prompt_request::Info {},
771 ),
772 PromptLevel::Warning => proto::language_server_prompt_request::Level::Warning(
773 proto::language_server_prompt_request::Warning {},
774 ),
775 PromptLevel::Critical => proto::language_server_prompt_request::Level::Critical(
776 proto::language_server_prompt_request::Critical {},
777 ),
778 }
779}