1use anyhow::{anyhow, Context as _, Result};
2use extension::ExtensionHostProxy;
3use extension_host::headless_host::HeadlessExtensionStore;
4use fs::{CreateOptions, Fs};
5use futures::channel::mpsc;
6use git::{repository::RepoPath, COMMIT_MESSAGE};
7use gpui::{App, AppContext as _, AsyncApp, Context, Entity, PromptLevel, SharedString};
8use http_client::HttpClient;
9use language::{proto::serialize_operation, Buffer, BufferEvent, LanguageRegistry};
10use node_runtime::NodeRuntime;
11use project::{
12 buffer_store::{BufferStore, BufferStoreEvent},
13 git::{GitRepo, GitState, RepositoryHandle},
14 project_settings::SettingsObserver,
15 search::SearchQuery,
16 task_store::TaskStore,
17 worktree_store::WorktreeStore,
18 LspStore, LspStoreEvent, PrettierStore, ProjectEntryId, ProjectPath, ToolchainStore,
19 WorktreeId,
20};
21use remote::ssh_session::ChannelClient;
22use rpc::{
23 proto::{self, SSH_PEER_ID, SSH_PROJECT_ID},
24 AnyProtoClient, TypedEnvelope,
25};
26
27use settings::initial_server_settings_content;
28use smol::stream::StreamExt;
29use std::{
30 path::{Path, PathBuf},
31 sync::{atomic::AtomicUsize, Arc},
32};
33use util::ResultExt;
34use worktree::Worktree;
35
36pub struct HeadlessProject {
37 pub fs: Arc<dyn Fs>,
38 pub session: AnyProtoClient,
39 pub worktree_store: Entity<WorktreeStore>,
40 pub buffer_store: Entity<BufferStore>,
41 pub lsp_store: Entity<LspStore>,
42 pub task_store: Entity<TaskStore>,
43 pub settings_observer: Entity<SettingsObserver>,
44 pub next_entry_id: Arc<AtomicUsize>,
45 pub languages: Arc<LanguageRegistry>,
46 pub extensions: Entity<HeadlessExtensionStore>,
47 pub git_state: Entity<GitState>,
48}
49
50pub struct HeadlessAppState {
51 pub session: Arc<ChannelClient>,
52 pub fs: Arc<dyn Fs>,
53 pub http_client: Arc<dyn HttpClient>,
54 pub node_runtime: NodeRuntime,
55 pub languages: Arc<LanguageRegistry>,
56 pub extension_host_proxy: Arc<ExtensionHostProxy>,
57}
58
59impl HeadlessProject {
60 pub fn init(cx: &mut App) {
61 settings::init(cx);
62 language::init(cx);
63 project::Project::init_settings(cx);
64 }
65
66 pub fn new(
67 HeadlessAppState {
68 session,
69 fs,
70 http_client,
71 node_runtime,
72 languages,
73 extension_host_proxy: proxy,
74 }: HeadlessAppState,
75 cx: &mut Context<Self>,
76 ) -> Self {
77 language_extension::init(proxy.clone(), languages.clone());
78 languages::init(languages.clone(), node_runtime.clone(), cx);
79
80 let worktree_store = cx.new(|cx| {
81 let mut store = WorktreeStore::local(true, fs.clone());
82 store.shared(SSH_PROJECT_ID, session.clone().into(), cx);
83 store
84 });
85
86 let git_state = cx.new(|cx| GitState::new(&worktree_store, None, None, cx));
87
88 let buffer_store = cx.new(|cx| {
89 let mut buffer_store = BufferStore::local(worktree_store.clone(), cx);
90 buffer_store.shared(SSH_PROJECT_ID, session.clone().into(), cx);
91 buffer_store
92 });
93 let prettier_store = cx.new(|cx| {
94 PrettierStore::new(
95 node_runtime.clone(),
96 fs.clone(),
97 languages.clone(),
98 worktree_store.clone(),
99 cx,
100 )
101 });
102 let environment = project::ProjectEnvironment::new(&worktree_store, None, cx);
103 let toolchain_store = cx.new(|cx| {
104 ToolchainStore::local(
105 languages.clone(),
106 worktree_store.clone(),
107 environment.clone(),
108 cx,
109 )
110 });
111
112 let task_store = cx.new(|cx| {
113 let mut task_store = TaskStore::local(
114 fs.clone(),
115 buffer_store.downgrade(),
116 worktree_store.clone(),
117 toolchain_store.read(cx).as_language_toolchain_store(),
118 environment.clone(),
119 cx,
120 );
121 task_store.shared(SSH_PROJECT_ID, session.clone().into(), cx);
122 task_store
123 });
124 let settings_observer = cx.new(|cx| {
125 let mut observer = SettingsObserver::new_local(
126 fs.clone(),
127 worktree_store.clone(),
128 task_store.clone(),
129 cx,
130 );
131 observer.shared(SSH_PROJECT_ID, session.clone().into(), cx);
132 observer
133 });
134
135 let lsp_store = cx.new(|cx| {
136 let mut lsp_store = LspStore::new_local(
137 buffer_store.clone(),
138 worktree_store.clone(),
139 prettier_store.clone(),
140 toolchain_store.clone(),
141 environment,
142 languages.clone(),
143 http_client.clone(),
144 fs.clone(),
145 cx,
146 );
147 lsp_store.shared(SSH_PROJECT_ID, session.clone().into(), cx);
148 lsp_store
149 });
150
151 cx.subscribe(&lsp_store, Self::on_lsp_store_event).detach();
152
153 cx.subscribe(
154 &buffer_store,
155 |_this, _buffer_store, event, cx| match event {
156 BufferStoreEvent::BufferAdded(buffer) => {
157 cx.subscribe(buffer, Self::on_buffer_event).detach();
158 }
159 _ => {}
160 },
161 )
162 .detach();
163
164 let extensions = HeadlessExtensionStore::new(
165 fs.clone(),
166 http_client.clone(),
167 paths::remote_extensions_dir().to_path_buf(),
168 proxy,
169 node_runtime,
170 cx,
171 );
172
173 let client: AnyProtoClient = session.clone().into();
174
175 // local_machine -> ssh handlers
176 session.subscribe_to_entity(SSH_PROJECT_ID, &worktree_store);
177 session.subscribe_to_entity(SSH_PROJECT_ID, &buffer_store);
178 session.subscribe_to_entity(SSH_PROJECT_ID, &cx.entity());
179 session.subscribe_to_entity(SSH_PROJECT_ID, &lsp_store);
180 session.subscribe_to_entity(SSH_PROJECT_ID, &task_store);
181 session.subscribe_to_entity(SSH_PROJECT_ID, &toolchain_store);
182 session.subscribe_to_entity(SSH_PROJECT_ID, &settings_observer);
183
184 client.add_request_handler(cx.weak_entity(), Self::handle_list_remote_directory);
185 client.add_request_handler(cx.weak_entity(), Self::handle_get_path_metadata);
186 client.add_request_handler(cx.weak_entity(), Self::handle_shutdown_remote_server);
187 client.add_request_handler(cx.weak_entity(), Self::handle_ping);
188
189 client.add_model_request_handler(Self::handle_add_worktree);
190 client.add_request_handler(cx.weak_entity(), Self::handle_remove_worktree);
191
192 client.add_model_request_handler(Self::handle_open_buffer_by_path);
193 client.add_model_request_handler(Self::handle_open_new_buffer);
194 client.add_model_request_handler(Self::handle_find_search_candidates);
195 client.add_model_request_handler(Self::handle_open_server_settings);
196
197 client.add_model_request_handler(BufferStore::handle_update_buffer);
198 client.add_model_message_handler(BufferStore::handle_close_buffer);
199
200 client.add_model_request_handler(Self::handle_stage);
201 client.add_model_request_handler(Self::handle_unstage);
202 client.add_model_request_handler(Self::handle_commit);
203 client.add_model_request_handler(Self::handle_open_commit_message_buffer);
204
205 client.add_request_handler(
206 extensions.clone().downgrade(),
207 HeadlessExtensionStore::handle_sync_extensions,
208 );
209 client.add_request_handler(
210 extensions.clone().downgrade(),
211 HeadlessExtensionStore::handle_install_extension,
212 );
213
214 BufferStore::init(&client);
215 WorktreeStore::init(&client);
216 SettingsObserver::init(&client);
217 LspStore::init(&client);
218 TaskStore::init(Some(&client));
219 ToolchainStore::init(&client);
220
221 HeadlessProject {
222 session: client,
223 settings_observer,
224 fs,
225 worktree_store,
226 buffer_store,
227 lsp_store,
228 task_store,
229 next_entry_id: Default::default(),
230 languages,
231 extensions,
232 git_state,
233 }
234 }
235
236 fn on_buffer_event(
237 &mut self,
238 buffer: Entity<Buffer>,
239 event: &BufferEvent,
240 cx: &mut Context<Self>,
241 ) {
242 match event {
243 BufferEvent::Operation {
244 operation,
245 is_local: true,
246 } => cx
247 .background_executor()
248 .spawn(self.session.request(proto::UpdateBuffer {
249 project_id: SSH_PROJECT_ID,
250 buffer_id: buffer.read(cx).remote_id().to_proto(),
251 operations: vec![serialize_operation(operation)],
252 }))
253 .detach(),
254 _ => {}
255 }
256 }
257
258 fn on_lsp_store_event(
259 &mut self,
260 _lsp_store: Entity<LspStore>,
261 event: &LspStoreEvent,
262 cx: &mut Context<Self>,
263 ) {
264 match event {
265 LspStoreEvent::LanguageServerUpdate {
266 language_server_id,
267 message,
268 } => {
269 self.session
270 .send(proto::UpdateLanguageServer {
271 project_id: SSH_PROJECT_ID,
272 language_server_id: language_server_id.to_proto(),
273 variant: Some(message.clone()),
274 })
275 .log_err();
276 }
277 LspStoreEvent::Notification(message) => {
278 self.session
279 .send(proto::Toast {
280 project_id: SSH_PROJECT_ID,
281 notification_id: "lsp".to_string(),
282 message: message.clone(),
283 })
284 .log_err();
285 }
286 LspStoreEvent::LanguageServerLog(language_server_id, log_type, message) => {
287 self.session
288 .send(proto::LanguageServerLog {
289 project_id: SSH_PROJECT_ID,
290 language_server_id: language_server_id.to_proto(),
291 message: message.clone(),
292 log_type: Some(log_type.to_proto()),
293 })
294 .log_err();
295 }
296 LspStoreEvent::LanguageServerPrompt(prompt) => {
297 let request = self.session.request(proto::LanguageServerPromptRequest {
298 project_id: SSH_PROJECT_ID,
299 actions: prompt
300 .actions
301 .iter()
302 .map(|action| action.title.to_string())
303 .collect(),
304 level: Some(prompt_to_proto(&prompt)),
305 lsp_name: prompt.lsp_name.clone(),
306 message: prompt.message.clone(),
307 });
308 let prompt = prompt.clone();
309 cx.background_executor()
310 .spawn(async move {
311 let response = request.await?;
312 if let Some(action_response) = response.action_response {
313 prompt.respond(action_response as usize).await;
314 }
315 anyhow::Ok(())
316 })
317 .detach();
318 }
319 _ => {}
320 }
321 }
322
323 pub async fn handle_add_worktree(
324 this: Entity<Self>,
325 message: TypedEnvelope<proto::AddWorktree>,
326 mut cx: AsyncApp,
327 ) -> Result<proto::AddWorktreeResponse> {
328 use client::ErrorCodeExt;
329 let path = shellexpand::tilde(&message.payload.path).to_string();
330
331 let fs = this.read_with(&mut cx, |this, _| this.fs.clone())?;
332 let path = PathBuf::from(path);
333
334 let canonicalized = match fs.canonicalize(&path).await {
335 Ok(path) => path,
336 Err(e) => {
337 let mut parent = path
338 .parent()
339 .ok_or(e)
340 .map_err(|_| anyhow!("{:?} does not exist", path))?;
341 if parent == Path::new("") {
342 parent = util::paths::home_dir();
343 }
344 let parent = fs.canonicalize(parent).await.map_err(|_| {
345 anyhow!(proto::ErrorCode::DevServerProjectPathDoesNotExist
346 .with_tag("path", &path.to_string_lossy().as_ref()))
347 })?;
348 parent.join(path.file_name().unwrap())
349 }
350 };
351
352 let worktree = this
353 .update(&mut cx.clone(), |this, _| {
354 Worktree::local(
355 Arc::from(canonicalized.as_path()),
356 message.payload.visible,
357 this.fs.clone(),
358 this.next_entry_id.clone(),
359 &mut cx,
360 )
361 })?
362 .await?;
363
364 let response = this.update(&mut cx, |_, cx| {
365 worktree.update(cx, |worktree, _| proto::AddWorktreeResponse {
366 worktree_id: worktree.id().to_proto(),
367 canonicalized_path: canonicalized.to_string_lossy().to_string(),
368 })
369 })?;
370
371 // We spawn this asynchronously, so that we can send the response back
372 // *before* `worktree_store.add()` can send out UpdateProject requests
373 // to the client about the new worktree.
374 //
375 // That lets the client manage the reference/handles of the newly-added
376 // worktree, before getting interrupted by an UpdateProject request.
377 //
378 // This fixes the problem of the client sending the AddWorktree request,
379 // headless project sending out a project update, client receiving it
380 // and immediately dropping the reference of the new client, causing it
381 // to be dropped on the headless project, and the client only then
382 // receiving a response to AddWorktree.
383 cx.spawn(|mut cx| async move {
384 this.update(&mut cx, |this, cx| {
385 this.worktree_store.update(cx, |worktree_store, cx| {
386 worktree_store.add(&worktree, cx);
387 });
388 })
389 .log_err();
390 })
391 .detach();
392
393 Ok(response)
394 }
395
396 pub async fn handle_remove_worktree(
397 this: Entity<Self>,
398 envelope: TypedEnvelope<proto::RemoveWorktree>,
399 mut cx: AsyncApp,
400 ) -> Result<proto::Ack> {
401 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
402 this.update(&mut cx, |this, cx| {
403 this.worktree_store.update(cx, |worktree_store, cx| {
404 worktree_store.remove_worktree(worktree_id, cx);
405 });
406 })?;
407 Ok(proto::Ack {})
408 }
409
410 pub async fn handle_open_buffer_by_path(
411 this: Entity<Self>,
412 message: TypedEnvelope<proto::OpenBufferByPath>,
413 mut cx: AsyncApp,
414 ) -> Result<proto::OpenBufferResponse> {
415 let worktree_id = WorktreeId::from_proto(message.payload.worktree_id);
416 let (buffer_store, buffer) = this.update(&mut cx, |this, cx| {
417 let buffer_store = this.buffer_store.clone();
418 let buffer = this.buffer_store.update(cx, |buffer_store, cx| {
419 buffer_store.open_buffer(
420 ProjectPath {
421 worktree_id,
422 path: PathBuf::from(message.payload.path).into(),
423 },
424 cx,
425 )
426 });
427 anyhow::Ok((buffer_store, buffer))
428 })??;
429
430 let buffer = buffer.await?;
431 let buffer_id = buffer.read_with(&cx, |b, _| b.remote_id())?;
432 buffer_store.update(&mut cx, |buffer_store, cx| {
433 buffer_store
434 .create_buffer_for_peer(&buffer, SSH_PEER_ID, cx)
435 .detach_and_log_err(cx);
436 })?;
437
438 Ok(proto::OpenBufferResponse {
439 buffer_id: buffer_id.to_proto(),
440 })
441 }
442
443 pub async fn handle_open_new_buffer(
444 this: Entity<Self>,
445 _message: TypedEnvelope<proto::OpenNewBuffer>,
446 mut cx: AsyncApp,
447 ) -> Result<proto::OpenBufferResponse> {
448 let (buffer_store, buffer) = this.update(&mut cx, |this, cx| {
449 let buffer_store = this.buffer_store.clone();
450 let buffer = this
451 .buffer_store
452 .update(cx, |buffer_store, cx| buffer_store.create_buffer(cx));
453 anyhow::Ok((buffer_store, buffer))
454 })??;
455
456 let buffer = buffer.await?;
457 let buffer_id = buffer.read_with(&cx, |b, _| b.remote_id())?;
458 buffer_store.update(&mut cx, |buffer_store, cx| {
459 buffer_store
460 .create_buffer_for_peer(&buffer, SSH_PEER_ID, cx)
461 .detach_and_log_err(cx);
462 })?;
463
464 Ok(proto::OpenBufferResponse {
465 buffer_id: buffer_id.to_proto(),
466 })
467 }
468
469 pub async fn handle_open_server_settings(
470 this: Entity<Self>,
471 _: TypedEnvelope<proto::OpenServerSettings>,
472 mut cx: AsyncApp,
473 ) -> Result<proto::OpenBufferResponse> {
474 let settings_path = paths::settings_file();
475 let (worktree, path) = this
476 .update(&mut cx, |this, cx| {
477 this.worktree_store.update(cx, |worktree_store, cx| {
478 worktree_store.find_or_create_worktree(settings_path, false, cx)
479 })
480 })?
481 .await?;
482
483 let (buffer, buffer_store) = this.update(&mut cx, |this, cx| {
484 let buffer = this.buffer_store.update(cx, |buffer_store, cx| {
485 buffer_store.open_buffer(
486 ProjectPath {
487 worktree_id: worktree.read(cx).id(),
488 path: path.into(),
489 },
490 cx,
491 )
492 });
493
494 (buffer, this.buffer_store.clone())
495 })?;
496
497 let buffer = buffer.await?;
498
499 let buffer_id = cx.update(|cx| {
500 if buffer.read(cx).is_empty() {
501 buffer.update(cx, |buffer, cx| {
502 buffer.edit([(0..0, initial_server_settings_content())], None, cx)
503 });
504 }
505
506 let buffer_id = buffer.read_with(cx, |b, _| b.remote_id());
507
508 buffer_store.update(cx, |buffer_store, cx| {
509 buffer_store
510 .create_buffer_for_peer(&buffer, SSH_PEER_ID, cx)
511 .detach_and_log_err(cx);
512 });
513
514 buffer_id
515 })?;
516
517 Ok(proto::OpenBufferResponse {
518 buffer_id: buffer_id.to_proto(),
519 })
520 }
521
522 pub async fn handle_find_search_candidates(
523 this: Entity<Self>,
524 envelope: TypedEnvelope<proto::FindSearchCandidates>,
525 mut cx: AsyncApp,
526 ) -> Result<proto::FindSearchCandidatesResponse> {
527 let message = envelope.payload;
528 let query = SearchQuery::from_proto(
529 message
530 .query
531 .ok_or_else(|| anyhow!("missing query field"))?,
532 )?;
533 let results = this.update(&mut cx, |this, cx| {
534 this.buffer_store.update(cx, |buffer_store, cx| {
535 buffer_store.find_search_candidates(&query, message.limit as _, this.fs.clone(), cx)
536 })
537 })?;
538
539 let mut response = proto::FindSearchCandidatesResponse {
540 buffer_ids: Vec::new(),
541 };
542
543 let buffer_store = this.read_with(&cx, |this, _| this.buffer_store.clone())?;
544
545 while let Ok(buffer) = results.recv().await {
546 let buffer_id = buffer.update(&mut cx, |this, _| this.remote_id())?;
547 response.buffer_ids.push(buffer_id.to_proto());
548 buffer_store
549 .update(&mut cx, |buffer_store, cx| {
550 buffer_store.create_buffer_for_peer(&buffer, SSH_PEER_ID, cx)
551 })?
552 .await?;
553 }
554
555 Ok(response)
556 }
557
558 pub async fn handle_list_remote_directory(
559 this: Entity<Self>,
560 envelope: TypedEnvelope<proto::ListRemoteDirectory>,
561 cx: AsyncApp,
562 ) -> Result<proto::ListRemoteDirectoryResponse> {
563 let expanded = shellexpand::tilde(&envelope.payload.path).to_string();
564 let fs = cx.read_entity(&this, |this, _| this.fs.clone())?;
565
566 let mut entries = Vec::new();
567 let mut response = fs.read_dir(Path::new(&expanded)).await?;
568 while let Some(path) = response.next().await {
569 if let Some(file_name) = path?.file_name() {
570 entries.push(file_name.to_string_lossy().to_string());
571 }
572 }
573 Ok(proto::ListRemoteDirectoryResponse { entries })
574 }
575
576 pub async fn handle_get_path_metadata(
577 this: Entity<Self>,
578 envelope: TypedEnvelope<proto::GetPathMetadata>,
579 cx: AsyncApp,
580 ) -> Result<proto::GetPathMetadataResponse> {
581 let fs = cx.read_entity(&this, |this, _| this.fs.clone())?;
582 let expanded = shellexpand::tilde(&envelope.payload.path).to_string();
583
584 let metadata = fs.metadata(&PathBuf::from(expanded.clone())).await?;
585 let is_dir = metadata.map(|metadata| metadata.is_dir).unwrap_or(false);
586
587 Ok(proto::GetPathMetadataResponse {
588 exists: metadata.is_some(),
589 is_dir,
590 path: expanded,
591 })
592 }
593
594 pub async fn handle_shutdown_remote_server(
595 _this: Entity<Self>,
596 _envelope: TypedEnvelope<proto::ShutdownRemoteServer>,
597 cx: AsyncApp,
598 ) -> Result<proto::Ack> {
599 cx.spawn(|cx| async move {
600 cx.update(|cx| {
601 // TODO: This is a hack, because in a headless project, shutdown isn't executed
602 // when calling quit, but it should be.
603 cx.shutdown();
604 cx.quit();
605 })
606 })
607 .detach();
608
609 Ok(proto::Ack {})
610 }
611
612 pub async fn handle_ping(
613 _this: Entity<Self>,
614 _envelope: TypedEnvelope<proto::Ping>,
615 _cx: AsyncApp,
616 ) -> Result<proto::Ack> {
617 log::debug!("Received ping from client");
618 Ok(proto::Ack {})
619 }
620
621 async fn handle_stage(
622 this: Entity<Self>,
623 envelope: TypedEnvelope<proto::Stage>,
624 mut cx: AsyncApp,
625 ) -> Result<proto::Ack> {
626 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
627 let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
628 let repository_handle =
629 Self::repository_for_request(&this, worktree_id, work_directory_id, &mut cx)?;
630
631 let entries = envelope
632 .payload
633 .paths
634 .into_iter()
635 .map(PathBuf::from)
636 .map(RepoPath::new)
637 .collect();
638 let (err_sender, mut err_receiver) = mpsc::channel(1);
639 repository_handle
640 .stage_entries(entries, err_sender)
641 .context("staging entries")?;
642 if let Some(error) = err_receiver.next().await {
643 Err(error.context("error during staging"))
644 } else {
645 Ok(proto::Ack {})
646 }
647 }
648
649 async fn handle_unstage(
650 this: Entity<Self>,
651 envelope: TypedEnvelope<proto::Unstage>,
652 mut cx: AsyncApp,
653 ) -> Result<proto::Ack> {
654 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
655 let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
656 let repository_handle =
657 Self::repository_for_request(&this, worktree_id, work_directory_id, &mut cx)?;
658
659 let entries = envelope
660 .payload
661 .paths
662 .into_iter()
663 .map(PathBuf::from)
664 .map(RepoPath::new)
665 .collect();
666 let (err_sender, mut err_receiver) = mpsc::channel(1);
667 repository_handle
668 .unstage_entries(entries, err_sender)
669 .context("unstaging entries")?;
670 if let Some(error) = err_receiver.next().await {
671 Err(error.context("error during unstaging"))
672 } else {
673 Ok(proto::Ack {})
674 }
675 }
676
677 async fn handle_commit(
678 this: Entity<Self>,
679 envelope: TypedEnvelope<proto::Commit>,
680 mut cx: AsyncApp,
681 ) -> Result<proto::Ack> {
682 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
683 let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
684 let repository_handle =
685 Self::repository_for_request(&this, worktree_id, work_directory_id, &mut cx)?;
686
687 let name = envelope.payload.name.map(SharedString::from);
688 let email = envelope.payload.email.map(SharedString::from);
689 let (err_sender, mut err_receiver) = mpsc::channel(1);
690 cx.update(|cx| {
691 repository_handle
692 .commit(name.zip(email), err_sender, cx)
693 .context("unstaging entries")
694 })??;
695 if let Some(error) = err_receiver.next().await {
696 Err(error.context("error during unstaging"))
697 } else {
698 Ok(proto::Ack {})
699 }
700 }
701
702 async fn handle_open_commit_message_buffer(
703 this: Entity<Self>,
704 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
705 mut cx: AsyncApp,
706 ) -> Result<proto::OpenBufferResponse> {
707 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
708 let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
709 let repository_handle =
710 Self::repository_for_request(&this, worktree_id, work_directory_id, &mut cx)?;
711 let git_repository = match &repository_handle.git_repo {
712 GitRepo::Local(git_repository) => git_repository.clone(),
713 GitRepo::Remote { .. } => {
714 anyhow::bail!("Cannot handle open commit message buffer for remote git repo")
715 }
716 };
717 let commit_message_file = git_repository.dot_git_dir().join(*COMMIT_MESSAGE);
718 let fs = this.update(&mut cx, |headless_project, _| headless_project.fs.clone())?;
719 fs.create_file(
720 &commit_message_file,
721 CreateOptions {
722 overwrite: false,
723 ignore_if_exists: true,
724 },
725 )
726 .await
727 .with_context(|| format!("creating commit message file {commit_message_file:?}"))?;
728
729 let (worktree, relative_path) = this
730 .update(&mut cx, |headless_project, cx| {
731 headless_project
732 .worktree_store
733 .update(cx, |worktree_store, cx| {
734 worktree_store.find_or_create_worktree(&commit_message_file, false, cx)
735 })
736 })?
737 .await
738 .with_context(|| {
739 format!("deriving worktree for commit message file {commit_message_file:?}")
740 })?;
741
742 let buffer = this
743 .update(&mut cx, |headless_project, cx| {
744 headless_project
745 .buffer_store
746 .update(cx, |buffer_store, cx| {
747 buffer_store.open_buffer(
748 ProjectPath {
749 worktree_id: worktree.read(cx).id(),
750 path: Arc::from(relative_path),
751 },
752 cx,
753 )
754 })
755 })
756 .with_context(|| {
757 format!("opening buffer for commit message file {commit_message_file:?}")
758 })?
759 .await?;
760
761 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
762 this.update(&mut cx, |headless_project, cx| {
763 headless_project
764 .buffer_store
765 .update(cx, |buffer_store, cx| {
766 buffer_store
767 .create_buffer_for_peer(&buffer, SSH_PEER_ID, cx)
768 .detach_and_log_err(cx);
769 })
770 })?;
771
772 Ok(proto::OpenBufferResponse {
773 buffer_id: buffer_id.to_proto(),
774 })
775 }
776
777 fn repository_for_request(
778 this: &Entity<Self>,
779 worktree_id: WorktreeId,
780 work_directory_id: ProjectEntryId,
781 cx: &mut AsyncApp,
782 ) -> Result<RepositoryHandle> {
783 this.update(cx, |project, cx| {
784 let repository_handle = project
785 .git_state
786 .read(cx)
787 .all_repositories()
788 .into_iter()
789 .find(|repository_handle| {
790 repository_handle.worktree_id == worktree_id
791 && repository_handle.repository_entry.work_directory_id()
792 == work_directory_id
793 })
794 .context("missing repository handle")?;
795 anyhow::Ok(repository_handle)
796 })?
797 }
798}
799
800fn prompt_to_proto(
801 prompt: &project::LanguageServerPromptRequest,
802) -> proto::language_server_prompt_request::Level {
803 match prompt.level {
804 PromptLevel::Info => proto::language_server_prompt_request::Level::Info(
805 proto::language_server_prompt_request::Info {},
806 ),
807 PromptLevel::Warning => proto::language_server_prompt_request::Level::Warning(
808 proto::language_server_prompt_request::Warning {},
809 ),
810 PromptLevel::Critical => proto::language_server_prompt_request::Level::Critical(
811 proto::language_server_prompt_request::Critical {},
812 ),
813 }
814}