1use anyhow::{anyhow, Context as _, Result};
2use extension::ExtensionHostProxy;
3use extension_host::headless_host::HeadlessExtensionStore;
4use fs::{CreateOptions, Fs};
5use futures::channel::mpsc;
6use git::{repository::RepoPath, COMMIT_MESSAGE};
7use gpui::{App, AppContext as _, AsyncApp, Context, Entity, PromptLevel, SharedString};
8use http_client::HttpClient;
9use language::{proto::serialize_operation, Buffer, BufferEvent, LanguageRegistry};
10use node_runtime::NodeRuntime;
11use project::{
12 buffer_store::{BufferStore, BufferStoreEvent},
13 git::{GitRepo, GitState, RepositoryHandle},
14 project_settings::SettingsObserver,
15 search::SearchQuery,
16 task_store::TaskStore,
17 worktree_store::WorktreeStore,
18 LspStore, LspStoreEvent, PrettierStore, ProjectEntryId, ProjectPath, ToolchainStore,
19 WorktreeId,
20};
21use remote::ssh_session::ChannelClient;
22use rpc::{
23 proto::{self, SSH_PEER_ID, SSH_PROJECT_ID},
24 AnyProtoClient, TypedEnvelope,
25};
26
27use settings::initial_server_settings_content;
28use smol::stream::StreamExt;
29use std::{
30 path::{Path, PathBuf},
31 sync::{atomic::AtomicUsize, Arc},
32};
33use util::ResultExt;
34use worktree::Worktree;
35
36pub struct HeadlessProject {
37 pub fs: Arc<dyn Fs>,
38 pub session: AnyProtoClient,
39 pub worktree_store: Entity<WorktreeStore>,
40 pub buffer_store: Entity<BufferStore>,
41 pub lsp_store: Entity<LspStore>,
42 pub task_store: Entity<TaskStore>,
43 pub settings_observer: Entity<SettingsObserver>,
44 pub next_entry_id: Arc<AtomicUsize>,
45 pub languages: Arc<LanguageRegistry>,
46 pub extensions: Entity<HeadlessExtensionStore>,
47 pub git_state: Entity<GitState>,
48}
49
50pub struct HeadlessAppState {
51 pub session: Arc<ChannelClient>,
52 pub fs: Arc<dyn Fs>,
53 pub http_client: Arc<dyn HttpClient>,
54 pub node_runtime: NodeRuntime,
55 pub languages: Arc<LanguageRegistry>,
56 pub extension_host_proxy: Arc<ExtensionHostProxy>,
57}
58
59impl HeadlessProject {
60 pub fn init(cx: &mut App) {
61 settings::init(cx);
62 language::init(cx);
63 project::Project::init_settings(cx);
64 }
65
66 pub fn new(
67 HeadlessAppState {
68 session,
69 fs,
70 http_client,
71 node_runtime,
72 languages,
73 extension_host_proxy: proxy,
74 }: HeadlessAppState,
75 cx: &mut Context<Self>,
76 ) -> Self {
77 language_extension::init(proxy.clone(), languages.clone());
78 languages::init(languages.clone(), node_runtime.clone(), cx);
79
80 let worktree_store = cx.new(|cx| {
81 let mut store = WorktreeStore::local(true, fs.clone());
82 store.shared(SSH_PROJECT_ID, session.clone().into(), cx);
83 store
84 });
85
86 let git_state = cx.new(|cx| GitState::new(&worktree_store, None, None, cx));
87
88 let buffer_store = cx.new(|cx| {
89 let mut buffer_store = BufferStore::local(worktree_store.clone(), cx);
90 buffer_store.shared(SSH_PROJECT_ID, session.clone().into(), cx);
91 buffer_store
92 });
93 let prettier_store = cx.new(|cx| {
94 PrettierStore::new(
95 node_runtime.clone(),
96 fs.clone(),
97 languages.clone(),
98 worktree_store.clone(),
99 cx,
100 )
101 });
102 let environment = project::ProjectEnvironment::new(&worktree_store, None, cx);
103 let toolchain_store = cx.new(|cx| {
104 ToolchainStore::local(
105 languages.clone(),
106 worktree_store.clone(),
107 environment.clone(),
108 cx,
109 )
110 });
111
112 let task_store = cx.new(|cx| {
113 let mut task_store = TaskStore::local(
114 fs.clone(),
115 buffer_store.downgrade(),
116 worktree_store.clone(),
117 toolchain_store.read(cx).as_language_toolchain_store(),
118 environment.clone(),
119 cx,
120 );
121 task_store.shared(SSH_PROJECT_ID, session.clone().into(), cx);
122 task_store
123 });
124 let settings_observer = cx.new(|cx| {
125 let mut observer = SettingsObserver::new_local(
126 fs.clone(),
127 worktree_store.clone(),
128 task_store.clone(),
129 cx,
130 );
131 observer.shared(SSH_PROJECT_ID, session.clone().into(), cx);
132 observer
133 });
134
135 let lsp_store = cx.new(|cx| {
136 let mut lsp_store = LspStore::new_local(
137 buffer_store.clone(),
138 worktree_store.clone(),
139 prettier_store.clone(),
140 toolchain_store.clone(),
141 environment,
142 languages.clone(),
143 http_client.clone(),
144 fs.clone(),
145 cx,
146 );
147 lsp_store.shared(SSH_PROJECT_ID, session.clone().into(), cx);
148 lsp_store
149 });
150
151 cx.subscribe(&lsp_store, Self::on_lsp_store_event).detach();
152
153 cx.subscribe(
154 &buffer_store,
155 |_this, _buffer_store, event, cx| match event {
156 BufferStoreEvent::BufferAdded(buffer) => {
157 cx.subscribe(buffer, Self::on_buffer_event).detach();
158 }
159 _ => {}
160 },
161 )
162 .detach();
163
164 let extensions = HeadlessExtensionStore::new(
165 fs.clone(),
166 http_client.clone(),
167 paths::remote_extensions_dir().to_path_buf(),
168 proxy,
169 node_runtime,
170 cx,
171 );
172
173 let client: AnyProtoClient = session.clone().into();
174
175 // local_machine -> ssh handlers
176 session.subscribe_to_entity(SSH_PROJECT_ID, &worktree_store);
177 session.subscribe_to_entity(SSH_PROJECT_ID, &buffer_store);
178 session.subscribe_to_entity(SSH_PROJECT_ID, &cx.entity());
179 session.subscribe_to_entity(SSH_PROJECT_ID, &lsp_store);
180 session.subscribe_to_entity(SSH_PROJECT_ID, &task_store);
181 session.subscribe_to_entity(SSH_PROJECT_ID, &toolchain_store);
182 session.subscribe_to_entity(SSH_PROJECT_ID, &settings_observer);
183
184 client.add_request_handler(cx.weak_entity(), Self::handle_list_remote_directory);
185 client.add_request_handler(cx.weak_entity(), Self::handle_get_path_metadata);
186 client.add_request_handler(cx.weak_entity(), Self::handle_shutdown_remote_server);
187 client.add_request_handler(cx.weak_entity(), Self::handle_ping);
188
189 client.add_model_request_handler(Self::handle_add_worktree);
190 client.add_request_handler(cx.weak_entity(), Self::handle_remove_worktree);
191
192 client.add_model_request_handler(Self::handle_open_buffer_by_path);
193 client.add_model_request_handler(Self::handle_open_new_buffer);
194 client.add_model_request_handler(Self::handle_find_search_candidates);
195 client.add_model_request_handler(Self::handle_open_server_settings);
196
197 client.add_model_request_handler(BufferStore::handle_update_buffer);
198 client.add_model_message_handler(BufferStore::handle_close_buffer);
199
200 client.add_model_request_handler(Self::handle_stage);
201 client.add_model_request_handler(Self::handle_unstage);
202 client.add_model_request_handler(Self::handle_commit);
203 client.add_model_request_handler(Self::handle_open_commit_message_buffer);
204
205 client.add_request_handler(
206 extensions.clone().downgrade(),
207 HeadlessExtensionStore::handle_sync_extensions,
208 );
209 client.add_request_handler(
210 extensions.clone().downgrade(),
211 HeadlessExtensionStore::handle_install_extension,
212 );
213
214 BufferStore::init(&client);
215 WorktreeStore::init(&client);
216 SettingsObserver::init(&client);
217 LspStore::init(&client);
218 TaskStore::init(Some(&client));
219 ToolchainStore::init(&client);
220
221 HeadlessProject {
222 session: client,
223 settings_observer,
224 fs,
225 worktree_store,
226 buffer_store,
227 lsp_store,
228 task_store,
229 next_entry_id: Default::default(),
230 languages,
231 extensions,
232 git_state,
233 }
234 }
235
236 fn on_buffer_event(
237 &mut self,
238 buffer: Entity<Buffer>,
239 event: &BufferEvent,
240 cx: &mut Context<Self>,
241 ) {
242 match event {
243 BufferEvent::Operation {
244 operation,
245 is_local: true,
246 } => cx
247 .background_executor()
248 .spawn(self.session.request(proto::UpdateBuffer {
249 project_id: SSH_PROJECT_ID,
250 buffer_id: buffer.read(cx).remote_id().to_proto(),
251 operations: vec![serialize_operation(operation)],
252 }))
253 .detach(),
254 _ => {}
255 }
256 }
257
258 fn on_lsp_store_event(
259 &mut self,
260 _lsp_store: Entity<LspStore>,
261 event: &LspStoreEvent,
262 cx: &mut Context<Self>,
263 ) {
264 match event {
265 LspStoreEvent::LanguageServerUpdate {
266 language_server_id,
267 message,
268 } => {
269 self.session
270 .send(proto::UpdateLanguageServer {
271 project_id: SSH_PROJECT_ID,
272 language_server_id: language_server_id.to_proto(),
273 variant: Some(message.clone()),
274 })
275 .log_err();
276 }
277 LspStoreEvent::Notification(message) => {
278 self.session
279 .send(proto::Toast {
280 project_id: SSH_PROJECT_ID,
281 notification_id: "lsp".to_string(),
282 message: message.clone(),
283 })
284 .log_err();
285 }
286 LspStoreEvent::LanguageServerLog(language_server_id, log_type, message) => {
287 self.session
288 .send(proto::LanguageServerLog {
289 project_id: SSH_PROJECT_ID,
290 language_server_id: language_server_id.to_proto(),
291 message: message.clone(),
292 log_type: Some(log_type.to_proto()),
293 })
294 .log_err();
295 }
296 LspStoreEvent::LanguageServerPrompt(prompt) => {
297 let request = self.session.request(proto::LanguageServerPromptRequest {
298 project_id: SSH_PROJECT_ID,
299 actions: prompt
300 .actions
301 .iter()
302 .map(|action| action.title.to_string())
303 .collect(),
304 level: Some(prompt_to_proto(&prompt)),
305 lsp_name: prompt.lsp_name.clone(),
306 message: prompt.message.clone(),
307 });
308 let prompt = prompt.clone();
309 cx.background_executor()
310 .spawn(async move {
311 let response = request.await?;
312 if let Some(action_response) = response.action_response {
313 prompt.respond(action_response as usize).await;
314 }
315 anyhow::Ok(())
316 })
317 .detach();
318 }
319 _ => {}
320 }
321 }
322
323 pub async fn handle_add_worktree(
324 this: Entity<Self>,
325 message: TypedEnvelope<proto::AddWorktree>,
326 mut cx: AsyncApp,
327 ) -> Result<proto::AddWorktreeResponse> {
328 use client::ErrorCodeExt;
329 let path = shellexpand::tilde(&message.payload.path).to_string();
330
331 let fs = this.read_with(&mut cx, |this, _| this.fs.clone())?;
332 let path = PathBuf::from(path);
333
334 let canonicalized = match fs.canonicalize(&path).await {
335 Ok(path) => path,
336 Err(e) => {
337 let mut parent = path
338 .parent()
339 .ok_or(e)
340 .map_err(|_| anyhow!("{:?} does not exist", path))?;
341 if parent == Path::new("") {
342 parent = util::paths::home_dir();
343 }
344 let parent = fs.canonicalize(parent).await.map_err(|_| {
345 anyhow!(proto::ErrorCode::DevServerProjectPathDoesNotExist
346 .with_tag("path", &path.to_string_lossy().as_ref()))
347 })?;
348 parent.join(path.file_name().unwrap())
349 }
350 };
351
352 let worktree = this
353 .update(&mut cx.clone(), |this, _| {
354 Worktree::local(
355 Arc::from(canonicalized.as_path()),
356 message.payload.visible,
357 this.fs.clone(),
358 this.next_entry_id.clone(),
359 &mut cx,
360 )
361 })?
362 .await?;
363
364 let response = this.update(&mut cx, |_, cx| {
365 worktree.update(cx, |worktree, _| proto::AddWorktreeResponse {
366 worktree_id: worktree.id().to_proto(),
367 canonicalized_path: canonicalized.to_string_lossy().to_string(),
368 })
369 })?;
370
371 // We spawn this asynchronously, so that we can send the response back
372 // *before* `worktree_store.add()` can send out UpdateProject requests
373 // to the client about the new worktree.
374 //
375 // That lets the client manage the reference/handles of the newly-added
376 // worktree, before getting interrupted by an UpdateProject request.
377 //
378 // This fixes the problem of the client sending the AddWorktree request,
379 // headless project sending out a project update, client receiving it
380 // and immediately dropping the reference of the new client, causing it
381 // to be dropped on the headless project, and the client only then
382 // receiving a response to AddWorktree.
383 cx.spawn(|mut cx| async move {
384 this.update(&mut cx, |this, cx| {
385 this.worktree_store.update(cx, |worktree_store, cx| {
386 worktree_store.add(&worktree, cx);
387 });
388 })
389 .log_err();
390 })
391 .detach();
392
393 Ok(response)
394 }
395
396 pub async fn handle_remove_worktree(
397 this: Entity<Self>,
398 envelope: TypedEnvelope<proto::RemoveWorktree>,
399 mut cx: AsyncApp,
400 ) -> Result<proto::Ack> {
401 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
402 this.update(&mut cx, |this, cx| {
403 this.worktree_store.update(cx, |worktree_store, cx| {
404 worktree_store.remove_worktree(worktree_id, cx);
405 });
406 })?;
407 Ok(proto::Ack {})
408 }
409
410 pub async fn handle_open_buffer_by_path(
411 this: Entity<Self>,
412 message: TypedEnvelope<proto::OpenBufferByPath>,
413 mut cx: AsyncApp,
414 ) -> Result<proto::OpenBufferResponse> {
415 let worktree_id = WorktreeId::from_proto(message.payload.worktree_id);
416 let (buffer_store, buffer) = this.update(&mut cx, |this, cx| {
417 let buffer_store = this.buffer_store.clone();
418 let buffer = this.buffer_store.update(cx, |buffer_store, cx| {
419 buffer_store.open_buffer(
420 ProjectPath {
421 worktree_id,
422 path: PathBuf::from(message.payload.path).into(),
423 },
424 message.payload.skip_file_contents,
425 cx,
426 )
427 });
428 anyhow::Ok((buffer_store, buffer))
429 })??;
430
431 let buffer = buffer.await?;
432 let buffer_id = buffer.read_with(&cx, |b, _| b.remote_id())?;
433 buffer_store.update(&mut cx, |buffer_store, cx| {
434 buffer_store
435 .create_buffer_for_peer(&buffer, SSH_PEER_ID, cx)
436 .detach_and_log_err(cx);
437 })?;
438
439 Ok(proto::OpenBufferResponse {
440 buffer_id: buffer_id.to_proto(),
441 })
442 }
443
444 pub async fn handle_open_new_buffer(
445 this: Entity<Self>,
446 _message: TypedEnvelope<proto::OpenNewBuffer>,
447 mut cx: AsyncApp,
448 ) -> Result<proto::OpenBufferResponse> {
449 let (buffer_store, buffer) = this.update(&mut cx, |this, cx| {
450 let buffer_store = this.buffer_store.clone();
451 let buffer = this
452 .buffer_store
453 .update(cx, |buffer_store, cx| buffer_store.create_buffer(cx));
454 anyhow::Ok((buffer_store, buffer))
455 })??;
456
457 let buffer = buffer.await?;
458 let buffer_id = buffer.read_with(&cx, |b, _| b.remote_id())?;
459 buffer_store.update(&mut cx, |buffer_store, cx| {
460 buffer_store
461 .create_buffer_for_peer(&buffer, SSH_PEER_ID, cx)
462 .detach_and_log_err(cx);
463 })?;
464
465 Ok(proto::OpenBufferResponse {
466 buffer_id: buffer_id.to_proto(),
467 })
468 }
469
470 pub async fn handle_open_server_settings(
471 this: Entity<Self>,
472 _: TypedEnvelope<proto::OpenServerSettings>,
473 mut cx: AsyncApp,
474 ) -> Result<proto::OpenBufferResponse> {
475 let settings_path = paths::settings_file();
476 let (worktree, path) = this
477 .update(&mut cx, |this, cx| {
478 this.worktree_store.update(cx, |worktree_store, cx| {
479 worktree_store.find_or_create_worktree(settings_path, false, cx)
480 })
481 })?
482 .await?;
483
484 let (buffer, buffer_store) = this.update(&mut cx, |this, cx| {
485 let buffer = this.buffer_store.update(cx, |buffer_store, cx| {
486 buffer_store.open_buffer(
487 ProjectPath {
488 worktree_id: worktree.read(cx).id(),
489 path: path.into(),
490 },
491 false,
492 cx,
493 )
494 });
495
496 (buffer, this.buffer_store.clone())
497 })?;
498
499 let buffer = buffer.await?;
500
501 let buffer_id = cx.update(|cx| {
502 if buffer.read(cx).is_empty() {
503 buffer.update(cx, |buffer, cx| {
504 buffer.edit([(0..0, initial_server_settings_content())], None, cx)
505 });
506 }
507
508 let buffer_id = buffer.read_with(cx, |b, _| b.remote_id());
509
510 buffer_store.update(cx, |buffer_store, cx| {
511 buffer_store
512 .create_buffer_for_peer(&buffer, SSH_PEER_ID, cx)
513 .detach_and_log_err(cx);
514 });
515
516 buffer_id
517 })?;
518
519 Ok(proto::OpenBufferResponse {
520 buffer_id: buffer_id.to_proto(),
521 })
522 }
523
524 pub async fn handle_find_search_candidates(
525 this: Entity<Self>,
526 envelope: TypedEnvelope<proto::FindSearchCandidates>,
527 mut cx: AsyncApp,
528 ) -> Result<proto::FindSearchCandidatesResponse> {
529 let message = envelope.payload;
530 let query = SearchQuery::from_proto(
531 message
532 .query
533 .ok_or_else(|| anyhow!("missing query field"))?,
534 )?;
535 let results = this.update(&mut cx, |this, cx| {
536 this.buffer_store.update(cx, |buffer_store, cx| {
537 buffer_store.find_search_candidates(&query, message.limit as _, this.fs.clone(), cx)
538 })
539 })?;
540
541 let mut response = proto::FindSearchCandidatesResponse {
542 buffer_ids: Vec::new(),
543 };
544
545 let buffer_store = this.read_with(&cx, |this, _| this.buffer_store.clone())?;
546
547 while let Ok(buffer) = results.recv().await {
548 let buffer_id = buffer.update(&mut cx, |this, _| this.remote_id())?;
549 response.buffer_ids.push(buffer_id.to_proto());
550 buffer_store
551 .update(&mut cx, |buffer_store, cx| {
552 buffer_store.create_buffer_for_peer(&buffer, SSH_PEER_ID, cx)
553 })?
554 .await?;
555 }
556
557 Ok(response)
558 }
559
560 pub async fn handle_list_remote_directory(
561 this: Entity<Self>,
562 envelope: TypedEnvelope<proto::ListRemoteDirectory>,
563 cx: AsyncApp,
564 ) -> Result<proto::ListRemoteDirectoryResponse> {
565 let expanded = shellexpand::tilde(&envelope.payload.path).to_string();
566 let fs = cx.read_entity(&this, |this, _| this.fs.clone())?;
567
568 let mut entries = Vec::new();
569 let mut response = fs.read_dir(Path::new(&expanded)).await?;
570 while let Some(path) = response.next().await {
571 if let Some(file_name) = path?.file_name() {
572 entries.push(file_name.to_string_lossy().to_string());
573 }
574 }
575 Ok(proto::ListRemoteDirectoryResponse { entries })
576 }
577
578 pub async fn handle_get_path_metadata(
579 this: Entity<Self>,
580 envelope: TypedEnvelope<proto::GetPathMetadata>,
581 cx: AsyncApp,
582 ) -> Result<proto::GetPathMetadataResponse> {
583 let fs = cx.read_entity(&this, |this, _| this.fs.clone())?;
584 let expanded = shellexpand::tilde(&envelope.payload.path).to_string();
585
586 let metadata = fs.metadata(&PathBuf::from(expanded.clone())).await?;
587 let is_dir = metadata.map(|metadata| metadata.is_dir).unwrap_or(false);
588
589 Ok(proto::GetPathMetadataResponse {
590 exists: metadata.is_some(),
591 is_dir,
592 path: expanded,
593 })
594 }
595
596 pub async fn handle_shutdown_remote_server(
597 _this: Entity<Self>,
598 _envelope: TypedEnvelope<proto::ShutdownRemoteServer>,
599 cx: AsyncApp,
600 ) -> Result<proto::Ack> {
601 cx.spawn(|cx| async move {
602 cx.update(|cx| {
603 // TODO: This is a hack, because in a headless project, shutdown isn't executed
604 // when calling quit, but it should be.
605 cx.shutdown();
606 cx.quit();
607 })
608 })
609 .detach();
610
611 Ok(proto::Ack {})
612 }
613
614 pub async fn handle_ping(
615 _this: Entity<Self>,
616 _envelope: TypedEnvelope<proto::Ping>,
617 _cx: AsyncApp,
618 ) -> Result<proto::Ack> {
619 log::debug!("Received ping from client");
620 Ok(proto::Ack {})
621 }
622
623 async fn handle_stage(
624 this: Entity<Self>,
625 envelope: TypedEnvelope<proto::Stage>,
626 mut cx: AsyncApp,
627 ) -> Result<proto::Ack> {
628 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
629 let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
630 let repository_handle =
631 Self::repository_for_request(&this, worktree_id, work_directory_id, &mut cx)?;
632
633 let entries = envelope
634 .payload
635 .paths
636 .into_iter()
637 .map(PathBuf::from)
638 .map(RepoPath::new)
639 .collect();
640 let (err_sender, mut err_receiver) = mpsc::channel(1);
641 repository_handle
642 .stage_entries(entries, err_sender)
643 .context("staging entries")?;
644 if let Some(error) = err_receiver.next().await {
645 Err(error.context("error during staging"))
646 } else {
647 Ok(proto::Ack {})
648 }
649 }
650
651 async fn handle_unstage(
652 this: Entity<Self>,
653 envelope: TypedEnvelope<proto::Unstage>,
654 mut cx: AsyncApp,
655 ) -> Result<proto::Ack> {
656 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
657 let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
658 let repository_handle =
659 Self::repository_for_request(&this, worktree_id, work_directory_id, &mut cx)?;
660
661 let entries = envelope
662 .payload
663 .paths
664 .into_iter()
665 .map(PathBuf::from)
666 .map(RepoPath::new)
667 .collect();
668 let (err_sender, mut err_receiver) = mpsc::channel(1);
669 repository_handle
670 .unstage_entries(entries, err_sender)
671 .context("unstaging entries")?;
672 if let Some(error) = err_receiver.next().await {
673 Err(error.context("error during unstaging"))
674 } else {
675 Ok(proto::Ack {})
676 }
677 }
678
679 async fn handle_commit(
680 this: Entity<Self>,
681 envelope: TypedEnvelope<proto::Commit>,
682 mut cx: AsyncApp,
683 ) -> Result<proto::Ack> {
684 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
685 let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
686 let repository_handle =
687 Self::repository_for_request(&this, worktree_id, work_directory_id, &mut cx)?;
688
689 let name = envelope.payload.name.map(SharedString::from);
690 let email = envelope.payload.email.map(SharedString::from);
691 let (err_sender, mut err_receiver) = mpsc::channel(1);
692 cx.update(|cx| {
693 repository_handle
694 .commit(name.zip(email), err_sender, cx)
695 .context("unstaging entries")
696 })??;
697 if let Some(error) = err_receiver.next().await {
698 Err(error.context("error during unstaging"))
699 } else {
700 Ok(proto::Ack {})
701 }
702 }
703
704 async fn handle_open_commit_message_buffer(
705 this: Entity<Self>,
706 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
707 mut cx: AsyncApp,
708 ) -> Result<proto::OpenBufferResponse> {
709 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
710 let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
711 let repository_handle =
712 Self::repository_for_request(&this, worktree_id, work_directory_id, &mut cx)?;
713 let git_repository = match &repository_handle.git_repo {
714 GitRepo::Local(git_repository) => git_repository.clone(),
715 GitRepo::Remote { .. } => {
716 anyhow::bail!("Cannot handle open commit message buffer for remote git repo")
717 }
718 };
719 let commit_message_file = git_repository.dot_git_dir().join(*COMMIT_MESSAGE);
720 let fs = this.update(&mut cx, |headless_project, _| headless_project.fs.clone())?;
721 fs.create_file(
722 &commit_message_file,
723 CreateOptions {
724 overwrite: false,
725 ignore_if_exists: true,
726 },
727 )
728 .await
729 .with_context(|| format!("creating commit message file {commit_message_file:?}"))?;
730
731 let (worktree, relative_path) = this
732 .update(&mut cx, |headless_project, cx| {
733 headless_project
734 .worktree_store
735 .update(cx, |worktree_store, cx| {
736 worktree_store.find_or_create_worktree(&commit_message_file, false, cx)
737 })
738 })?
739 .await
740 .with_context(|| {
741 format!("deriving worktree for commit message file {commit_message_file:?}")
742 })?;
743
744 let buffer = this
745 .update(&mut cx, |headless_project, cx| {
746 headless_project
747 .buffer_store
748 .update(cx, |buffer_store, cx| {
749 buffer_store.open_buffer(
750 ProjectPath {
751 worktree_id: worktree.read(cx).id(),
752 path: Arc::from(relative_path),
753 },
754 true,
755 cx,
756 )
757 })
758 })
759 .with_context(|| {
760 format!("opening buffer for commit message file {commit_message_file:?}")
761 })?
762 .await?;
763
764 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
765 this.update(&mut cx, |headless_project, cx| {
766 headless_project
767 .buffer_store
768 .update(cx, |buffer_store, cx| {
769 buffer_store
770 .create_buffer_for_peer(&buffer, SSH_PEER_ID, cx)
771 .detach_and_log_err(cx);
772 })
773 })?;
774
775 Ok(proto::OpenBufferResponse {
776 buffer_id: buffer_id.to_proto(),
777 })
778 }
779
780 fn repository_for_request(
781 this: &Entity<Self>,
782 worktree_id: WorktreeId,
783 work_directory_id: ProjectEntryId,
784 cx: &mut AsyncApp,
785 ) -> Result<RepositoryHandle> {
786 this.update(cx, |project, cx| {
787 let repository_handle = project
788 .git_state
789 .read(cx)
790 .all_repositories()
791 .into_iter()
792 .find(|repository_handle| {
793 repository_handle.worktree_id == worktree_id
794 && repository_handle.repository_entry.work_directory_id()
795 == work_directory_id
796 })
797 .context("missing repository handle")?;
798 anyhow::Ok(repository_handle)
799 })?
800 }
801}
802
803fn prompt_to_proto(
804 prompt: &project::LanguageServerPromptRequest,
805) -> proto::language_server_prompt_request::Level {
806 match prompt.level {
807 PromptLevel::Info => proto::language_server_prompt_request::Level::Info(
808 proto::language_server_prompt_request::Info {},
809 ),
810 PromptLevel::Warning => proto::language_server_prompt_request::Level::Warning(
811 proto::language_server_prompt_request::Warning {},
812 ),
813 PromptLevel::Critical => proto::language_server_prompt_request::Level::Critical(
814 proto::language_server_prompt_request::Critical {},
815 ),
816 }
817}