1use anyhow::{anyhow, Context as _, Result};
2use extension::ExtensionHostProxy;
3use extension_host::headless_host::HeadlessExtensionStore;
4use fs::Fs;
5use futures::channel::mpsc;
6use git::repository::RepoPath;
7use gpui::{App, AppContext as _, AsyncApp, Context, Entity, PromptLevel, SharedString};
8use http_client::HttpClient;
9use language::{proto::serialize_operation, Buffer, BufferEvent, LanguageRegistry};
10use node_runtime::NodeRuntime;
11use project::{
12 buffer_store::{BufferStore, BufferStoreEvent},
13 git::GitState,
14 project_settings::SettingsObserver,
15 search::SearchQuery,
16 task_store::TaskStore,
17 worktree_store::WorktreeStore,
18 LspStore, LspStoreEvent, PrettierStore, ProjectEntryId, ProjectPath, ToolchainStore,
19 WorktreeId,
20};
21use remote::ssh_session::ChannelClient;
22use rpc::{
23 proto::{self, SSH_PEER_ID, SSH_PROJECT_ID},
24 AnyProtoClient, TypedEnvelope,
25};
26
27use settings::initial_server_settings_content;
28use smol::stream::StreamExt;
29use std::{
30 path::{Path, PathBuf},
31 sync::{atomic::AtomicUsize, Arc},
32};
33use util::ResultExt;
34use worktree::Worktree;
35
36pub struct HeadlessProject {
37 pub fs: Arc<dyn Fs>,
38 pub session: AnyProtoClient,
39 pub worktree_store: Entity<WorktreeStore>,
40 pub buffer_store: Entity<BufferStore>,
41 pub lsp_store: Entity<LspStore>,
42 pub task_store: Entity<TaskStore>,
43 pub settings_observer: Entity<SettingsObserver>,
44 pub next_entry_id: Arc<AtomicUsize>,
45 pub languages: Arc<LanguageRegistry>,
46 pub extensions: Entity<HeadlessExtensionStore>,
47 pub git_state: Entity<GitState>,
48}
49
50pub struct HeadlessAppState {
51 pub session: Arc<ChannelClient>,
52 pub fs: Arc<dyn Fs>,
53 pub http_client: Arc<dyn HttpClient>,
54 pub node_runtime: NodeRuntime,
55 pub languages: Arc<LanguageRegistry>,
56 pub extension_host_proxy: Arc<ExtensionHostProxy>,
57}
58
59impl HeadlessProject {
60 pub fn init(cx: &mut App) {
61 settings::init(cx);
62 language::init(cx);
63 project::Project::init_settings(cx);
64 }
65
66 pub fn new(
67 HeadlessAppState {
68 session,
69 fs,
70 http_client,
71 node_runtime,
72 languages,
73 extension_host_proxy: proxy,
74 }: HeadlessAppState,
75 cx: &mut Context<Self>,
76 ) -> Self {
77 language_extension::init(proxy.clone(), languages.clone());
78 languages::init(languages.clone(), node_runtime.clone(), cx);
79
80 let worktree_store = cx.new(|cx| {
81 let mut store = WorktreeStore::local(true, fs.clone());
82 store.shared(SSH_PROJECT_ID, session.clone().into(), cx);
83 store
84 });
85
86 let git_state =
87 cx.new(|cx| GitState::new(&worktree_store, languages.clone(), None, None, cx));
88
89 let buffer_store = cx.new(|cx| {
90 let mut buffer_store = BufferStore::local(worktree_store.clone(), cx);
91 buffer_store.shared(SSH_PROJECT_ID, session.clone().into(), cx);
92 buffer_store
93 });
94 let prettier_store = cx.new(|cx| {
95 PrettierStore::new(
96 node_runtime.clone(),
97 fs.clone(),
98 languages.clone(),
99 worktree_store.clone(),
100 cx,
101 )
102 });
103 let environment = project::ProjectEnvironment::new(&worktree_store, None, cx);
104 let toolchain_store = cx.new(|cx| {
105 ToolchainStore::local(
106 languages.clone(),
107 worktree_store.clone(),
108 environment.clone(),
109 cx,
110 )
111 });
112
113 let task_store = cx.new(|cx| {
114 let mut task_store = TaskStore::local(
115 fs.clone(),
116 buffer_store.downgrade(),
117 worktree_store.clone(),
118 toolchain_store.read(cx).as_language_toolchain_store(),
119 environment.clone(),
120 cx,
121 );
122 task_store.shared(SSH_PROJECT_ID, session.clone().into(), cx);
123 task_store
124 });
125 let settings_observer = cx.new(|cx| {
126 let mut observer = SettingsObserver::new_local(
127 fs.clone(),
128 worktree_store.clone(),
129 task_store.clone(),
130 cx,
131 );
132 observer.shared(SSH_PROJECT_ID, session.clone().into(), cx);
133 observer
134 });
135
136 let lsp_store = cx.new(|cx| {
137 let mut lsp_store = LspStore::new_local(
138 buffer_store.clone(),
139 worktree_store.clone(),
140 prettier_store.clone(),
141 toolchain_store.clone(),
142 environment,
143 languages.clone(),
144 http_client.clone(),
145 fs.clone(),
146 cx,
147 );
148 lsp_store.shared(SSH_PROJECT_ID, session.clone().into(), cx);
149 lsp_store
150 });
151
152 cx.subscribe(&lsp_store, Self::on_lsp_store_event).detach();
153
154 cx.subscribe(
155 &buffer_store,
156 |_this, _buffer_store, event, cx| match event {
157 BufferStoreEvent::BufferAdded(buffer) => {
158 cx.subscribe(buffer, Self::on_buffer_event).detach();
159 }
160 _ => {}
161 },
162 )
163 .detach();
164
165 let extensions = HeadlessExtensionStore::new(
166 fs.clone(),
167 http_client.clone(),
168 paths::remote_extensions_dir().to_path_buf(),
169 proxy,
170 node_runtime,
171 cx,
172 );
173
174 let client: AnyProtoClient = session.clone().into();
175
176 // local_machine -> ssh handlers
177 session.subscribe_to_entity(SSH_PROJECT_ID, &worktree_store);
178 session.subscribe_to_entity(SSH_PROJECT_ID, &buffer_store);
179 session.subscribe_to_entity(SSH_PROJECT_ID, &cx.entity());
180 session.subscribe_to_entity(SSH_PROJECT_ID, &lsp_store);
181 session.subscribe_to_entity(SSH_PROJECT_ID, &task_store);
182 session.subscribe_to_entity(SSH_PROJECT_ID, &toolchain_store);
183 session.subscribe_to_entity(SSH_PROJECT_ID, &settings_observer);
184
185 client.add_request_handler(cx.weak_entity(), Self::handle_list_remote_directory);
186 client.add_request_handler(cx.weak_entity(), Self::handle_get_path_metadata);
187 client.add_request_handler(cx.weak_entity(), Self::handle_shutdown_remote_server);
188 client.add_request_handler(cx.weak_entity(), Self::handle_ping);
189
190 client.add_model_request_handler(Self::handle_add_worktree);
191 client.add_request_handler(cx.weak_entity(), Self::handle_remove_worktree);
192
193 client.add_model_request_handler(Self::handle_open_buffer_by_path);
194 client.add_model_request_handler(Self::handle_open_new_buffer);
195 client.add_model_request_handler(Self::handle_find_search_candidates);
196 client.add_model_request_handler(Self::handle_open_server_settings);
197
198 client.add_model_request_handler(BufferStore::handle_update_buffer);
199 client.add_model_message_handler(BufferStore::handle_close_buffer);
200
201 client.add_model_request_handler(Self::handle_stage);
202 client.add_model_request_handler(Self::handle_unstage);
203 client.add_model_request_handler(Self::handle_commit);
204
205 client.add_request_handler(
206 extensions.clone().downgrade(),
207 HeadlessExtensionStore::handle_sync_extensions,
208 );
209 client.add_request_handler(
210 extensions.clone().downgrade(),
211 HeadlessExtensionStore::handle_install_extension,
212 );
213
214 BufferStore::init(&client);
215 WorktreeStore::init(&client);
216 SettingsObserver::init(&client);
217 LspStore::init(&client);
218 TaskStore::init(Some(&client));
219 ToolchainStore::init(&client);
220
221 HeadlessProject {
222 session: client,
223 settings_observer,
224 fs,
225 worktree_store,
226 buffer_store,
227 lsp_store,
228 task_store,
229 next_entry_id: Default::default(),
230 languages,
231 extensions,
232 git_state,
233 }
234 }
235
236 fn on_buffer_event(
237 &mut self,
238 buffer: Entity<Buffer>,
239 event: &BufferEvent,
240 cx: &mut Context<Self>,
241 ) {
242 match event {
243 BufferEvent::Operation {
244 operation,
245 is_local: true,
246 } => cx
247 .background_executor()
248 .spawn(self.session.request(proto::UpdateBuffer {
249 project_id: SSH_PROJECT_ID,
250 buffer_id: buffer.read(cx).remote_id().to_proto(),
251 operations: vec![serialize_operation(operation)],
252 }))
253 .detach(),
254 _ => {}
255 }
256 }
257
258 fn on_lsp_store_event(
259 &mut self,
260 _lsp_store: Entity<LspStore>,
261 event: &LspStoreEvent,
262 cx: &mut Context<Self>,
263 ) {
264 match event {
265 LspStoreEvent::LanguageServerUpdate {
266 language_server_id,
267 message,
268 } => {
269 self.session
270 .send(proto::UpdateLanguageServer {
271 project_id: SSH_PROJECT_ID,
272 language_server_id: language_server_id.to_proto(),
273 variant: Some(message.clone()),
274 })
275 .log_err();
276 }
277 LspStoreEvent::Notification(message) => {
278 self.session
279 .send(proto::Toast {
280 project_id: SSH_PROJECT_ID,
281 notification_id: "lsp".to_string(),
282 message: message.clone(),
283 })
284 .log_err();
285 }
286 LspStoreEvent::LanguageServerLog(language_server_id, log_type, message) => {
287 self.session
288 .send(proto::LanguageServerLog {
289 project_id: SSH_PROJECT_ID,
290 language_server_id: language_server_id.to_proto(),
291 message: message.clone(),
292 log_type: Some(log_type.to_proto()),
293 })
294 .log_err();
295 }
296 LspStoreEvent::LanguageServerPrompt(prompt) => {
297 let request = self.session.request(proto::LanguageServerPromptRequest {
298 project_id: SSH_PROJECT_ID,
299 actions: prompt
300 .actions
301 .iter()
302 .map(|action| action.title.to_string())
303 .collect(),
304 level: Some(prompt_to_proto(&prompt)),
305 lsp_name: prompt.lsp_name.clone(),
306 message: prompt.message.clone(),
307 });
308 let prompt = prompt.clone();
309 cx.background_executor()
310 .spawn(async move {
311 let response = request.await?;
312 if let Some(action_response) = response.action_response {
313 prompt.respond(action_response as usize).await;
314 }
315 anyhow::Ok(())
316 })
317 .detach();
318 }
319 _ => {}
320 }
321 }
322
323 pub async fn handle_add_worktree(
324 this: Entity<Self>,
325 message: TypedEnvelope<proto::AddWorktree>,
326 mut cx: AsyncApp,
327 ) -> Result<proto::AddWorktreeResponse> {
328 use client::ErrorCodeExt;
329 let path = shellexpand::tilde(&message.payload.path).to_string();
330
331 let fs = this.read_with(&mut cx, |this, _| this.fs.clone())?;
332 let path = PathBuf::from(path);
333
334 let canonicalized = match fs.canonicalize(&path).await {
335 Ok(path) => path,
336 Err(e) => {
337 let mut parent = path
338 .parent()
339 .ok_or(e)
340 .map_err(|_| anyhow!("{:?} does not exist", path))?;
341 if parent == Path::new("") {
342 parent = util::paths::home_dir();
343 }
344 let parent = fs.canonicalize(parent).await.map_err(|_| {
345 anyhow!(proto::ErrorCode::DevServerProjectPathDoesNotExist
346 .with_tag("path", &path.to_string_lossy().as_ref()))
347 })?;
348 parent.join(path.file_name().unwrap())
349 }
350 };
351
352 let worktree = this
353 .update(&mut cx.clone(), |this, _| {
354 Worktree::local(
355 Arc::from(canonicalized.as_path()),
356 message.payload.visible,
357 this.fs.clone(),
358 this.next_entry_id.clone(),
359 &mut cx,
360 )
361 })?
362 .await?;
363
364 let response = this.update(&mut cx, |_, cx| {
365 worktree.update(cx, |worktree, _| proto::AddWorktreeResponse {
366 worktree_id: worktree.id().to_proto(),
367 canonicalized_path: canonicalized.to_string_lossy().to_string(),
368 })
369 })?;
370
371 // We spawn this asynchronously, so that we can send the response back
372 // *before* `worktree_store.add()` can send out UpdateProject requests
373 // to the client about the new worktree.
374 //
375 // That lets the client manage the reference/handles of the newly-added
376 // worktree, before getting interrupted by an UpdateProject request.
377 //
378 // This fixes the problem of the client sending the AddWorktree request,
379 // headless project sending out a project update, client receiving it
380 // and immediately dropping the reference of the new client, causing it
381 // to be dropped on the headless project, and the client only then
382 // receiving a response to AddWorktree.
383 cx.spawn(|mut cx| async move {
384 this.update(&mut cx, |this, cx| {
385 this.worktree_store.update(cx, |worktree_store, cx| {
386 worktree_store.add(&worktree, cx);
387 });
388 })
389 .log_err();
390 })
391 .detach();
392
393 Ok(response)
394 }
395
396 pub async fn handle_remove_worktree(
397 this: Entity<Self>,
398 envelope: TypedEnvelope<proto::RemoveWorktree>,
399 mut cx: AsyncApp,
400 ) -> Result<proto::Ack> {
401 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
402 this.update(&mut cx, |this, cx| {
403 this.worktree_store.update(cx, |worktree_store, cx| {
404 worktree_store.remove_worktree(worktree_id, cx);
405 });
406 })?;
407 Ok(proto::Ack {})
408 }
409
410 pub async fn handle_open_buffer_by_path(
411 this: Entity<Self>,
412 message: TypedEnvelope<proto::OpenBufferByPath>,
413 mut cx: AsyncApp,
414 ) -> Result<proto::OpenBufferResponse> {
415 let worktree_id = WorktreeId::from_proto(message.payload.worktree_id);
416 let (buffer_store, buffer) = this.update(&mut cx, |this, cx| {
417 let buffer_store = this.buffer_store.clone();
418 let buffer = this.buffer_store.update(cx, |buffer_store, cx| {
419 buffer_store.open_buffer(
420 ProjectPath {
421 worktree_id,
422 path: PathBuf::from(message.payload.path).into(),
423 },
424 cx,
425 )
426 });
427 anyhow::Ok((buffer_store, buffer))
428 })??;
429
430 let buffer = buffer.await?;
431 let buffer_id = buffer.read_with(&cx, |b, _| b.remote_id())?;
432 buffer_store.update(&mut cx, |buffer_store, cx| {
433 buffer_store
434 .create_buffer_for_peer(&buffer, SSH_PEER_ID, cx)
435 .detach_and_log_err(cx);
436 })?;
437
438 Ok(proto::OpenBufferResponse {
439 buffer_id: buffer_id.to_proto(),
440 })
441 }
442
443 pub async fn handle_open_new_buffer(
444 this: Entity<Self>,
445 _message: TypedEnvelope<proto::OpenNewBuffer>,
446 mut cx: AsyncApp,
447 ) -> Result<proto::OpenBufferResponse> {
448 let (buffer_store, buffer) = this.update(&mut cx, |this, cx| {
449 let buffer_store = this.buffer_store.clone();
450 let buffer = this
451 .buffer_store
452 .update(cx, |buffer_store, cx| buffer_store.create_buffer(cx));
453 anyhow::Ok((buffer_store, buffer))
454 })??;
455
456 let buffer = buffer.await?;
457 let buffer_id = buffer.read_with(&cx, |b, _| b.remote_id())?;
458 buffer_store.update(&mut cx, |buffer_store, cx| {
459 buffer_store
460 .create_buffer_for_peer(&buffer, SSH_PEER_ID, cx)
461 .detach_and_log_err(cx);
462 })?;
463
464 Ok(proto::OpenBufferResponse {
465 buffer_id: buffer_id.to_proto(),
466 })
467 }
468
469 pub async fn handle_open_server_settings(
470 this: Entity<Self>,
471 _: TypedEnvelope<proto::OpenServerSettings>,
472 mut cx: AsyncApp,
473 ) -> Result<proto::OpenBufferResponse> {
474 let settings_path = paths::settings_file();
475 let (worktree, path) = this
476 .update(&mut cx, |this, cx| {
477 this.worktree_store.update(cx, |worktree_store, cx| {
478 worktree_store.find_or_create_worktree(settings_path, false, cx)
479 })
480 })?
481 .await?;
482
483 let (buffer, buffer_store) = this.update(&mut cx, |this, cx| {
484 let buffer = this.buffer_store.update(cx, |buffer_store, cx| {
485 buffer_store.open_buffer(
486 ProjectPath {
487 worktree_id: worktree.read(cx).id(),
488 path: path.into(),
489 },
490 cx,
491 )
492 });
493
494 (buffer, this.buffer_store.clone())
495 })?;
496
497 let buffer = buffer.await?;
498
499 let buffer_id = cx.update(|cx| {
500 if buffer.read(cx).is_empty() {
501 buffer.update(cx, |buffer, cx| {
502 buffer.edit([(0..0, initial_server_settings_content())], None, cx)
503 });
504 }
505
506 let buffer_id = buffer.read_with(cx, |b, _| b.remote_id());
507
508 buffer_store.update(cx, |buffer_store, cx| {
509 buffer_store
510 .create_buffer_for_peer(&buffer, SSH_PEER_ID, cx)
511 .detach_and_log_err(cx);
512 });
513
514 buffer_id
515 })?;
516
517 Ok(proto::OpenBufferResponse {
518 buffer_id: buffer_id.to_proto(),
519 })
520 }
521
522 pub async fn handle_find_search_candidates(
523 this: Entity<Self>,
524 envelope: TypedEnvelope<proto::FindSearchCandidates>,
525 mut cx: AsyncApp,
526 ) -> Result<proto::FindSearchCandidatesResponse> {
527 let message = envelope.payload;
528 let query = SearchQuery::from_proto(
529 message
530 .query
531 .ok_or_else(|| anyhow!("missing query field"))?,
532 )?;
533 let results = this.update(&mut cx, |this, cx| {
534 this.buffer_store.update(cx, |buffer_store, cx| {
535 buffer_store.find_search_candidates(&query, message.limit as _, this.fs.clone(), cx)
536 })
537 })?;
538
539 let mut response = proto::FindSearchCandidatesResponse {
540 buffer_ids: Vec::new(),
541 };
542
543 let buffer_store = this.read_with(&cx, |this, _| this.buffer_store.clone())?;
544
545 while let Ok(buffer) = results.recv().await {
546 let buffer_id = buffer.update(&mut cx, |this, _| this.remote_id())?;
547 response.buffer_ids.push(buffer_id.to_proto());
548 buffer_store
549 .update(&mut cx, |buffer_store, cx| {
550 buffer_store.create_buffer_for_peer(&buffer, SSH_PEER_ID, cx)
551 })?
552 .await?;
553 }
554
555 Ok(response)
556 }
557
558 pub async fn handle_list_remote_directory(
559 this: Entity<Self>,
560 envelope: TypedEnvelope<proto::ListRemoteDirectory>,
561 cx: AsyncApp,
562 ) -> Result<proto::ListRemoteDirectoryResponse> {
563 let expanded = shellexpand::tilde(&envelope.payload.path).to_string();
564 let fs = cx.read_entity(&this, |this, _| this.fs.clone())?;
565
566 let mut entries = Vec::new();
567 let mut response = fs.read_dir(Path::new(&expanded)).await?;
568 while let Some(path) = response.next().await {
569 if let Some(file_name) = path?.file_name() {
570 entries.push(file_name.to_string_lossy().to_string());
571 }
572 }
573 Ok(proto::ListRemoteDirectoryResponse { entries })
574 }
575
576 pub async fn handle_get_path_metadata(
577 this: Entity<Self>,
578 envelope: TypedEnvelope<proto::GetPathMetadata>,
579 cx: AsyncApp,
580 ) -> Result<proto::GetPathMetadataResponse> {
581 let fs = cx.read_entity(&this, |this, _| this.fs.clone())?;
582 let expanded = shellexpand::tilde(&envelope.payload.path).to_string();
583
584 let metadata = fs.metadata(&PathBuf::from(expanded.clone())).await?;
585 let is_dir = metadata.map(|metadata| metadata.is_dir).unwrap_or(false);
586
587 Ok(proto::GetPathMetadataResponse {
588 exists: metadata.is_some(),
589 is_dir,
590 path: expanded,
591 })
592 }
593
594 pub async fn handle_shutdown_remote_server(
595 _this: Entity<Self>,
596 _envelope: TypedEnvelope<proto::ShutdownRemoteServer>,
597 cx: AsyncApp,
598 ) -> Result<proto::Ack> {
599 cx.spawn(|cx| async move {
600 cx.update(|cx| {
601 // TODO: This is a hack, because in a headless project, shutdown isn't executed
602 // when calling quit, but it should be.
603 cx.shutdown();
604 cx.quit();
605 })
606 })
607 .detach();
608
609 Ok(proto::Ack {})
610 }
611
612 pub async fn handle_ping(
613 _this: Entity<Self>,
614 _envelope: TypedEnvelope<proto::Ping>,
615 _cx: AsyncApp,
616 ) -> Result<proto::Ack> {
617 log::debug!("Received ping from client");
618 Ok(proto::Ack {})
619 }
620
621 async fn handle_stage(
622 this: Entity<Self>,
623 envelope: TypedEnvelope<proto::Stage>,
624 mut cx: AsyncApp,
625 ) -> Result<proto::Ack> {
626 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
627 let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
628 let repository_handle = this.update(&mut cx, |project, cx| {
629 let repository_handle = project
630 .git_state
631 .read(cx)
632 .all_repositories()
633 .into_iter()
634 .find(|repository_handle| {
635 repository_handle.worktree_id == worktree_id
636 && repository_handle.repository_entry.work_directory_id()
637 == work_directory_id
638 })
639 .context("missing repository handle")?;
640 anyhow::Ok(repository_handle)
641 })??;
642
643 let entries = envelope
644 .payload
645 .paths
646 .into_iter()
647 .map(PathBuf::from)
648 .map(RepoPath::new)
649 .collect();
650 let (err_sender, mut err_receiver) = mpsc::channel(1);
651 repository_handle
652 .stage_entries(entries, err_sender)
653 .context("staging entries")?;
654 if let Some(error) = err_receiver.next().await {
655 Err(error.context("error during staging"))
656 } else {
657 Ok(proto::Ack {})
658 }
659 }
660
661 async fn handle_unstage(
662 this: Entity<Self>,
663 envelope: TypedEnvelope<proto::Unstage>,
664 mut cx: AsyncApp,
665 ) -> Result<proto::Ack> {
666 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
667 let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
668 let repository_handle = this.update(&mut cx, |project, cx| {
669 let repository_handle = project
670 .git_state
671 .read(cx)
672 .all_repositories()
673 .into_iter()
674 .find(|repository_handle| {
675 repository_handle.worktree_id == worktree_id
676 && repository_handle.repository_entry.work_directory_id()
677 == work_directory_id
678 })
679 .context("missing repository handle")?;
680 anyhow::Ok(repository_handle)
681 })??;
682
683 let entries = envelope
684 .payload
685 .paths
686 .into_iter()
687 .map(PathBuf::from)
688 .map(RepoPath::new)
689 .collect();
690 let (err_sender, mut err_receiver) = mpsc::channel(1);
691 repository_handle
692 .unstage_entries(entries, err_sender)
693 .context("unstaging entries")?;
694 if let Some(error) = err_receiver.next().await {
695 Err(error.context("error during unstaging"))
696 } else {
697 Ok(proto::Ack {})
698 }
699 }
700
701 async fn handle_commit(
702 this: Entity<Self>,
703 envelope: TypedEnvelope<proto::Commit>,
704 mut cx: AsyncApp,
705 ) -> Result<proto::Ack> {
706 let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
707 let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
708 let repository_handle = this.update(&mut cx, |project, cx| {
709 let repository_handle = project
710 .git_state
711 .read(cx)
712 .all_repositories()
713 .into_iter()
714 .find(|repository_handle| {
715 repository_handle.worktree_id == worktree_id
716 && repository_handle.repository_entry.work_directory_id()
717 == work_directory_id
718 })
719 .context("missing repository handle")?;
720 anyhow::Ok(repository_handle)
721 })??;
722
723 let commit_message = envelope.payload.message;
724 let name = envelope.payload.name.map(SharedString::from);
725 let email = envelope.payload.email.map(SharedString::from);
726 let (err_sender, mut err_receiver) = mpsc::channel(1);
727 repository_handle
728 .commit_with_message(commit_message, name.zip(email), err_sender)
729 .context("unstaging entries")?;
730 if let Some(error) = err_receiver.next().await {
731 Err(error.context("error during unstaging"))
732 } else {
733 Ok(proto::Ack {})
734 }
735 }
736}
737
738fn prompt_to_proto(
739 prompt: &project::LanguageServerPromptRequest,
740) -> proto::language_server_prompt_request::Level {
741 match prompt.level {
742 PromptLevel::Info => proto::language_server_prompt_request::Level::Info(
743 proto::language_server_prompt_request::Info {},
744 ),
745 PromptLevel::Warning => proto::language_server_prompt_request::Level::Warning(
746 proto::language_server_prompt_request::Warning {},
747 ),
748 PromptLevel::Critical => proto::language_server_prompt_request::Level::Critical(
749 proto::language_server_prompt_request::Critical {},
750 ),
751 }
752}