1use ::proto::{FromProto, ToProto};
2use anyhow::{Context as _, Result, anyhow};
3
4use extension::ExtensionHostProxy;
5use extension_host::headless_host::HeadlessExtensionStore;
6use fs::Fs;
7use gpui::{App, AppContext as _, AsyncApp, Context, Entity, PromptLevel};
8use http_client::HttpClient;
9use language::{Buffer, BufferEvent, LanguageRegistry, proto::serialize_operation};
10use node_runtime::NodeRuntime;
11use project::{
12 LspStore, LspStoreEvent, ManifestTree, PrettierStore, ProjectEnvironment, ProjectPath,
13 ToolchainStore, WorktreeId,
14 buffer_store::{BufferStore, BufferStoreEvent},
15 debugger::{breakpoint_store::BreakpointStore, dap_store::DapStore},
16 git_store::GitStore,
17 project_settings::SettingsObserver,
18 search::SearchQuery,
19 task_store::TaskStore,
20 worktree_store::WorktreeStore,
21};
22use remote::ssh_session::ChannelClient;
23use rpc::{
24 AnyProtoClient, TypedEnvelope,
25 proto::{self, SSH_PEER_ID, SSH_PROJECT_ID},
26};
27
28use settings::initial_server_settings_content;
29use smol::stream::StreamExt;
30use std::{
31 path::{Path, PathBuf},
32 sync::{Arc, atomic::AtomicUsize},
33};
34use util::ResultExt;
35use worktree::Worktree;
36
37pub struct HeadlessProject {
38 pub fs: Arc<dyn Fs>,
39 pub session: AnyProtoClient,
40 pub worktree_store: Entity<WorktreeStore>,
41 pub buffer_store: Entity<BufferStore>,
42 pub lsp_store: Entity<LspStore>,
43 pub task_store: Entity<TaskStore>,
44 pub dap_store: Entity<DapStore>,
45 pub settings_observer: Entity<SettingsObserver>,
46 pub next_entry_id: Arc<AtomicUsize>,
47 pub languages: Arc<LanguageRegistry>,
48 pub extensions: Entity<HeadlessExtensionStore>,
49 pub git_store: Entity<GitStore>,
50}
51
52pub struct HeadlessAppState {
53 pub session: Arc<ChannelClient>,
54 pub fs: Arc<dyn Fs>,
55 pub http_client: Arc<dyn HttpClient>,
56 pub node_runtime: NodeRuntime,
57 pub languages: Arc<LanguageRegistry>,
58 pub extension_host_proxy: Arc<ExtensionHostProxy>,
59}
60
61impl HeadlessProject {
62 pub fn init(cx: &mut App) {
63 settings::init(cx);
64 language::init(cx);
65 project::Project::init_settings(cx);
66 }
67
68 pub fn new(
69 HeadlessAppState {
70 session,
71 fs,
72 http_client,
73 node_runtime,
74 languages,
75 extension_host_proxy: proxy,
76 }: HeadlessAppState,
77 cx: &mut Context<Self>,
78 ) -> Self {
79 debug_adapter_extension::init(proxy.clone(), cx);
80 language_extension::init(proxy.clone(), languages.clone());
81 languages::init(languages.clone(), node_runtime.clone(), cx);
82
83 let worktree_store = cx.new(|cx| {
84 let mut store = WorktreeStore::local(true, fs.clone());
85 store.shared(SSH_PROJECT_ID, session.clone().into(), cx);
86 store
87 });
88
89 let environment = cx.new(|_| ProjectEnvironment::new(None));
90 let manifest_tree = ManifestTree::new(worktree_store.clone(), cx);
91 let toolchain_store = cx.new(|cx| {
92 ToolchainStore::local(
93 languages.clone(),
94 worktree_store.clone(),
95 environment.clone(),
96 manifest_tree.clone(),
97 cx,
98 )
99 });
100
101 let buffer_store = cx.new(|cx| {
102 let mut buffer_store = BufferStore::local(worktree_store.clone(), cx);
103 buffer_store.shared(SSH_PROJECT_ID, session.clone().into(), cx);
104 buffer_store
105 });
106
107 let breakpoint_store =
108 cx.new(|_| BreakpointStore::local(worktree_store.clone(), buffer_store.clone()));
109
110 let dap_store = cx.new(|cx| {
111 let mut dap_store = DapStore::new_local(
112 http_client.clone(),
113 node_runtime.clone(),
114 fs.clone(),
115 environment.clone(),
116 toolchain_store.read(cx).as_language_toolchain_store(),
117 worktree_store.clone(),
118 breakpoint_store.clone(),
119 cx,
120 );
121 dap_store.shared(SSH_PROJECT_ID, session.clone().into(), cx);
122 dap_store
123 });
124
125 let git_store = cx.new(|cx| {
126 let mut store = GitStore::local(
127 &worktree_store,
128 buffer_store.clone(),
129 environment.clone(),
130 fs.clone(),
131 cx,
132 );
133 store.shared(SSH_PROJECT_ID, session.clone().into(), cx);
134 store
135 });
136
137 let prettier_store = cx.new(|cx| {
138 PrettierStore::new(
139 node_runtime.clone(),
140 fs.clone(),
141 languages.clone(),
142 worktree_store.clone(),
143 cx,
144 )
145 });
146
147 let task_store = cx.new(|cx| {
148 let mut task_store = TaskStore::local(
149 buffer_store.downgrade(),
150 worktree_store.clone(),
151 toolchain_store.read(cx).as_language_toolchain_store(),
152 environment.clone(),
153 cx,
154 );
155 task_store.shared(SSH_PROJECT_ID, session.clone().into(), cx);
156 task_store
157 });
158 let settings_observer = cx.new(|cx| {
159 let mut observer = SettingsObserver::new_local(
160 fs.clone(),
161 worktree_store.clone(),
162 task_store.clone(),
163 cx,
164 );
165 observer.shared(SSH_PROJECT_ID, session.clone().into(), cx);
166 observer
167 });
168
169 let lsp_store = cx.new(|cx| {
170 let mut lsp_store = LspStore::new_local(
171 buffer_store.clone(),
172 worktree_store.clone(),
173 prettier_store.clone(),
174 toolchain_store.clone(),
175 environment,
176 manifest_tree,
177 languages.clone(),
178 http_client.clone(),
179 fs.clone(),
180 cx,
181 );
182 lsp_store.shared(SSH_PROJECT_ID, session.clone().into(), cx);
183 lsp_store
184 });
185
186 cx.subscribe(&lsp_store, Self::on_lsp_store_event).detach();
187
188 cx.subscribe(
189 &buffer_store,
190 |_this, _buffer_store, event, cx| match event {
191 BufferStoreEvent::BufferAdded(buffer) => {
192 cx.subscribe(buffer, Self::on_buffer_event).detach();
193 }
194 _ => {}
195 },
196 )
197 .detach();
198
199 let extensions = HeadlessExtensionStore::new(
200 fs.clone(),
201 http_client.clone(),
202 paths::remote_extensions_dir().to_path_buf(),
203 proxy,
204 node_runtime,
205 cx,
206 );
207
208 let client: AnyProtoClient = session.clone().into();
209
210 // local_machine -> ssh handlers
211 session.subscribe_to_entity(SSH_PROJECT_ID, &worktree_store);
212 session.subscribe_to_entity(SSH_PROJECT_ID, &buffer_store);
213 session.subscribe_to_entity(SSH_PROJECT_ID, &cx.entity());
214 session.subscribe_to_entity(SSH_PROJECT_ID, &lsp_store);
215 session.subscribe_to_entity(SSH_PROJECT_ID, &task_store);
216 session.subscribe_to_entity(SSH_PROJECT_ID, &toolchain_store);
217 session.subscribe_to_entity(SSH_PROJECT_ID, &dap_store);
218 session.subscribe_to_entity(SSH_PROJECT_ID, &settings_observer);
219 session.subscribe_to_entity(SSH_PROJECT_ID, &git_store);
220
221 client.add_request_handler(cx.weak_entity(), Self::handle_list_remote_directory);
222 client.add_request_handler(cx.weak_entity(), Self::handle_get_path_metadata);
223 client.add_request_handler(cx.weak_entity(), Self::handle_shutdown_remote_server);
224 client.add_request_handler(cx.weak_entity(), Self::handle_ping);
225
226 client.add_entity_request_handler(Self::handle_add_worktree);
227 client.add_request_handler(cx.weak_entity(), Self::handle_remove_worktree);
228
229 client.add_entity_request_handler(Self::handle_open_buffer_by_path);
230 client.add_entity_request_handler(Self::handle_open_new_buffer);
231 client.add_entity_request_handler(Self::handle_find_search_candidates);
232 client.add_entity_request_handler(Self::handle_open_server_settings);
233
234 client.add_entity_request_handler(BufferStore::handle_update_buffer);
235 client.add_entity_message_handler(BufferStore::handle_close_buffer);
236
237 client.add_request_handler(
238 extensions.clone().downgrade(),
239 HeadlessExtensionStore::handle_sync_extensions,
240 );
241 client.add_request_handler(
242 extensions.clone().downgrade(),
243 HeadlessExtensionStore::handle_install_extension,
244 );
245
246 BufferStore::init(&client);
247 WorktreeStore::init(&client);
248 SettingsObserver::init(&client);
249 LspStore::init(&client);
250 TaskStore::init(Some(&client));
251 ToolchainStore::init(&client);
252 DapStore::init(&client, cx);
253 // todo(debugger): Re init breakpoint store when we set it up for collab
254 // BreakpointStore::init(&client);
255 GitStore::init(&client);
256
257 HeadlessProject {
258 session: client,
259 settings_observer,
260 fs,
261 worktree_store,
262 buffer_store,
263 lsp_store,
264 task_store,
265 dap_store,
266 next_entry_id: Default::default(),
267 languages,
268 extensions,
269 git_store,
270 }
271 }
272
273 fn on_buffer_event(
274 &mut self,
275 buffer: Entity<Buffer>,
276 event: &BufferEvent,
277 cx: &mut Context<Self>,
278 ) {
279 match event {
280 BufferEvent::Operation {
281 operation,
282 is_local: true,
283 } => cx
284 .background_spawn(self.session.request(proto::UpdateBuffer {
285 project_id: SSH_PROJECT_ID,
286 buffer_id: buffer.read(cx).remote_id().to_proto(),
287 operations: vec![serialize_operation(operation)],
288 }))
289 .detach(),
290 _ => {}
291 }
292 }
293
294 fn on_lsp_store_event(
295 &mut self,
296 _lsp_store: Entity<LspStore>,
297 event: &LspStoreEvent,
298 cx: &mut Context<Self>,
299 ) {
300 match event {
301 LspStoreEvent::LanguageServerUpdate {
302 language_server_id,
303 message,
304 } => {
305 self.session
306 .send(proto::UpdateLanguageServer {
307 project_id: SSH_PROJECT_ID,
308 language_server_id: language_server_id.to_proto(),
309 variant: Some(message.clone()),
310 })
311 .log_err();
312 }
313 LspStoreEvent::Notification(message) => {
314 self.session
315 .send(proto::Toast {
316 project_id: SSH_PROJECT_ID,
317 notification_id: "lsp".to_string(),
318 message: message.clone(),
319 })
320 .log_err();
321 }
322 LspStoreEvent::LanguageServerLog(language_server_id, log_type, message) => {
323 self.session
324 .send(proto::LanguageServerLog {
325 project_id: SSH_PROJECT_ID,
326 language_server_id: language_server_id.to_proto(),
327 message: message.clone(),
328 log_type: Some(log_type.to_proto()),
329 })
330 .log_err();
331 }
332 LspStoreEvent::LanguageServerPrompt(prompt) => {
333 let request = self.session.request(proto::LanguageServerPromptRequest {
334 project_id: SSH_PROJECT_ID,
335 actions: prompt
336 .actions
337 .iter()
338 .map(|action| action.title.to_string())
339 .collect(),
340 level: Some(prompt_to_proto(&prompt)),
341 lsp_name: prompt.lsp_name.clone(),
342 message: prompt.message.clone(),
343 });
344 let prompt = prompt.clone();
345 cx.background_spawn(async move {
346 let response = request.await?;
347 if let Some(action_response) = response.action_response {
348 prompt.respond(action_response as usize).await;
349 }
350 anyhow::Ok(())
351 })
352 .detach();
353 }
354 _ => {}
355 }
356 }
357
358 pub async fn handle_add_worktree(
359 this: Entity<Self>,
360 message: TypedEnvelope<proto::AddWorktree>,
361 mut cx: AsyncApp,
362 ) -> Result<proto::AddWorktreeResponse> {
363 use client::ErrorCodeExt;
364 let fs = this.read_with(&mut cx, |this, _| this.fs.clone())?;
365 let path = PathBuf::from_proto(shellexpand::tilde(&message.payload.path).to_string());
366
367 let canonicalized = match fs.canonicalize(&path).await {
368 Ok(path) => path,
369 Err(e) => {
370 let mut parent = path
371 .parent()
372 .ok_or(e)
373 .with_context(|| format!("{path:?} does not exist"))?;
374 if parent == Path::new("") {
375 parent = util::paths::home_dir();
376 }
377 let parent = fs.canonicalize(parent).await.map_err(|_| {
378 anyhow!(
379 proto::ErrorCode::DevServerProjectPathDoesNotExist
380 .with_tag("path", &path.to_string_lossy().as_ref())
381 )
382 })?;
383 parent.join(path.file_name().unwrap())
384 }
385 };
386
387 let worktree = this
388 .read_with(&mut cx.clone(), |this, _| {
389 Worktree::local(
390 Arc::from(canonicalized.as_path()),
391 message.payload.visible,
392 this.fs.clone(),
393 this.next_entry_id.clone(),
394 &mut cx,
395 )
396 })?
397 .await?;
398
399 let response = this.read_with(&mut cx, |_, cx| {
400 let worktree = worktree.read(cx);
401 proto::AddWorktreeResponse {
402 worktree_id: worktree.id().to_proto(),
403 canonicalized_path: canonicalized.to_proto(),
404 }
405 })?;
406
407 // We spawn this asynchronously, so that we can send the response back
408 // *before* `worktree_store.add()` can send out UpdateProject requests
409 // to the client about the new worktree.
410 //
411 // That lets the client manage the reference/handles of the newly-added
412 // worktree, before getting interrupted by an UpdateProject request.
413 //
414 // This fixes the problem of the client sending the AddWorktree request,
415 // headless project sending out a project update, client receiving it
416 // and immediately dropping the reference of the new client, causing it
417 // to be dropped on the headless project, and the client only then
418 // receiving a response to AddWorktree.
419 cx.spawn(async move |cx| {
420 this.update(cx, |this, cx| {
421 this.worktree_store.update(cx, |worktree_store, cx| {
422 worktree_store.add(&worktree, cx);
423 });
424 })
425 .log_err();
426 })
427 .detach();
428
429 Ok(response)
430 }
431
432 pub async fn handle_remove_worktree(
433 this: Entity<Self>,
434 envelope: TypedEnvelope<proto::RemoveWorktree>,
435 mut cx: AsyncApp,
436 ) -> Result<proto::Ack> {
437 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
438 this.update(&mut cx, |this, cx| {
439 this.worktree_store.update(cx, |worktree_store, cx| {
440 worktree_store.remove_worktree(worktree_id, cx);
441 });
442 })?;
443 Ok(proto::Ack {})
444 }
445
446 pub async fn handle_open_buffer_by_path(
447 this: Entity<Self>,
448 message: TypedEnvelope<proto::OpenBufferByPath>,
449 mut cx: AsyncApp,
450 ) -> Result<proto::OpenBufferResponse> {
451 let worktree_id = WorktreeId::from_proto(message.payload.worktree_id);
452 let (buffer_store, buffer) = this.update(&mut cx, |this, cx| {
453 let buffer_store = this.buffer_store.clone();
454 let buffer = this.buffer_store.update(cx, |buffer_store, cx| {
455 buffer_store.open_buffer(
456 ProjectPath {
457 worktree_id,
458 path: Arc::<Path>::from_proto(message.payload.path),
459 },
460 cx,
461 )
462 });
463 anyhow::Ok((buffer_store, buffer))
464 })??;
465
466 let buffer = buffer.await?;
467 let buffer_id = buffer.read_with(&cx, |b, _| b.remote_id())?;
468 buffer_store.update(&mut cx, |buffer_store, cx| {
469 buffer_store
470 .create_buffer_for_peer(&buffer, SSH_PEER_ID, cx)
471 .detach_and_log_err(cx);
472 })?;
473
474 Ok(proto::OpenBufferResponse {
475 buffer_id: buffer_id.to_proto(),
476 })
477 }
478
479 pub async fn handle_open_new_buffer(
480 this: Entity<Self>,
481 _message: TypedEnvelope<proto::OpenNewBuffer>,
482 mut cx: AsyncApp,
483 ) -> Result<proto::OpenBufferResponse> {
484 let (buffer_store, buffer) = this.update(&mut cx, |this, cx| {
485 let buffer_store = this.buffer_store.clone();
486 let buffer = this
487 .buffer_store
488 .update(cx, |buffer_store, cx| buffer_store.create_buffer(cx));
489 anyhow::Ok((buffer_store, buffer))
490 })??;
491
492 let buffer = buffer.await?;
493 let buffer_id = buffer.read_with(&cx, |b, _| b.remote_id())?;
494 buffer_store.update(&mut cx, |buffer_store, cx| {
495 buffer_store
496 .create_buffer_for_peer(&buffer, SSH_PEER_ID, cx)
497 .detach_and_log_err(cx);
498 })?;
499
500 Ok(proto::OpenBufferResponse {
501 buffer_id: buffer_id.to_proto(),
502 })
503 }
504
505 pub async fn handle_open_server_settings(
506 this: Entity<Self>,
507 _: TypedEnvelope<proto::OpenServerSettings>,
508 mut cx: AsyncApp,
509 ) -> Result<proto::OpenBufferResponse> {
510 let settings_path = paths::settings_file();
511 let (worktree, path) = this
512 .update(&mut cx, |this, cx| {
513 this.worktree_store.update(cx, |worktree_store, cx| {
514 worktree_store.find_or_create_worktree(settings_path, false, cx)
515 })
516 })?
517 .await?;
518
519 let (buffer, buffer_store) = this.update(&mut cx, |this, cx| {
520 let buffer = this.buffer_store.update(cx, |buffer_store, cx| {
521 buffer_store.open_buffer(
522 ProjectPath {
523 worktree_id: worktree.read(cx).id(),
524 path: path.into(),
525 },
526 cx,
527 )
528 });
529
530 (buffer, this.buffer_store.clone())
531 })?;
532
533 let buffer = buffer.await?;
534
535 let buffer_id = cx.update(|cx| {
536 if buffer.read(cx).is_empty() {
537 buffer.update(cx, |buffer, cx| {
538 buffer.edit([(0..0, initial_server_settings_content())], None, cx)
539 });
540 }
541
542 let buffer_id = buffer.read(cx).remote_id();
543
544 buffer_store.update(cx, |buffer_store, cx| {
545 buffer_store
546 .create_buffer_for_peer(&buffer, SSH_PEER_ID, cx)
547 .detach_and_log_err(cx);
548 });
549
550 buffer_id
551 })?;
552
553 Ok(proto::OpenBufferResponse {
554 buffer_id: buffer_id.to_proto(),
555 })
556 }
557
558 pub async fn handle_find_search_candidates(
559 this: Entity<Self>,
560 envelope: TypedEnvelope<proto::FindSearchCandidates>,
561 mut cx: AsyncApp,
562 ) -> Result<proto::FindSearchCandidatesResponse> {
563 let message = envelope.payload;
564 let query = SearchQuery::from_proto(message.query.context("missing query field")?)?;
565 let results = this.update(&mut cx, |this, cx| {
566 this.buffer_store.update(cx, |buffer_store, cx| {
567 buffer_store.find_search_candidates(&query, message.limit as _, this.fs.clone(), cx)
568 })
569 })?;
570
571 let mut response = proto::FindSearchCandidatesResponse {
572 buffer_ids: Vec::new(),
573 };
574
575 let buffer_store = this.read_with(&cx, |this, _| this.buffer_store.clone())?;
576
577 while let Ok(buffer) = results.recv().await {
578 let buffer_id = buffer.read_with(&mut cx, |this, _| this.remote_id())?;
579 response.buffer_ids.push(buffer_id.to_proto());
580 buffer_store
581 .update(&mut cx, |buffer_store, cx| {
582 buffer_store.create_buffer_for_peer(&buffer, SSH_PEER_ID, cx)
583 })?
584 .await?;
585 }
586
587 Ok(response)
588 }
589
590 pub async fn handle_list_remote_directory(
591 this: Entity<Self>,
592 envelope: TypedEnvelope<proto::ListRemoteDirectory>,
593 cx: AsyncApp,
594 ) -> Result<proto::ListRemoteDirectoryResponse> {
595 let fs = cx.read_entity(&this, |this, _| this.fs.clone())?;
596 let expanded = PathBuf::from_proto(shellexpand::tilde(&envelope.payload.path).to_string());
597 let check_info = envelope
598 .payload
599 .config
600 .as_ref()
601 .is_some_and(|config| config.is_dir);
602
603 let mut entries = Vec::new();
604 let mut entry_info = Vec::new();
605 let mut response = fs.read_dir(&expanded).await?;
606 while let Some(path) = response.next().await {
607 let path = path?;
608 if let Some(file_name) = path.file_name() {
609 entries.push(file_name.to_string_lossy().to_string());
610 if check_info {
611 let is_dir = fs.is_dir(&path).await;
612 entry_info.push(proto::EntryInfo { is_dir });
613 }
614 }
615 }
616 Ok(proto::ListRemoteDirectoryResponse {
617 entries,
618 entry_info,
619 })
620 }
621
622 pub async fn handle_get_path_metadata(
623 this: Entity<Self>,
624 envelope: TypedEnvelope<proto::GetPathMetadata>,
625 cx: AsyncApp,
626 ) -> Result<proto::GetPathMetadataResponse> {
627 let fs = cx.read_entity(&this, |this, _| this.fs.clone())?;
628 let expanded = PathBuf::from_proto(shellexpand::tilde(&envelope.payload.path).to_string());
629
630 let metadata = fs.metadata(&expanded).await?;
631 let is_dir = metadata.map(|metadata| metadata.is_dir).unwrap_or(false);
632
633 Ok(proto::GetPathMetadataResponse {
634 exists: metadata.is_some(),
635 is_dir,
636 path: expanded.to_proto(),
637 })
638 }
639
640 pub async fn handle_shutdown_remote_server(
641 _this: Entity<Self>,
642 _envelope: TypedEnvelope<proto::ShutdownRemoteServer>,
643 cx: AsyncApp,
644 ) -> Result<proto::Ack> {
645 cx.spawn(async move |cx| {
646 cx.update(|cx| {
647 // TODO: This is a hack, because in a headless project, shutdown isn't executed
648 // when calling quit, but it should be.
649 cx.shutdown();
650 cx.quit();
651 })
652 })
653 .detach();
654
655 Ok(proto::Ack {})
656 }
657
658 pub async fn handle_ping(
659 _this: Entity<Self>,
660 _envelope: TypedEnvelope<proto::Ping>,
661 _cx: AsyncApp,
662 ) -> Result<proto::Ack> {
663 log::debug!("Received ping from client");
664 Ok(proto::Ack {})
665 }
666}
667
668fn prompt_to_proto(
669 prompt: &project::LanguageServerPromptRequest,
670) -> proto::language_server_prompt_request::Level {
671 match prompt.level {
672 PromptLevel::Info => proto::language_server_prompt_request::Level::Info(
673 proto::language_server_prompt_request::Info {},
674 ),
675 PromptLevel::Warning => proto::language_server_prompt_request::Level::Warning(
676 proto::language_server_prompt_request::Warning {},
677 ),
678 PromptLevel::Critical => proto::language_server_prompt_request::Level::Critical(
679 proto::language_server_prompt_request::Critical {},
680 ),
681 }
682}