1use crate::{
2 lsp_store::OpenLspBufferHandle,
3 search::SearchQuery,
4 worktree_store::{WorktreeStore, WorktreeStoreEvent},
5 ProjectItem as _, ProjectPath,
6};
7use ::git::{parse_git_remote_url, BuildPermalinkParams, GitHostingProviderRegistry};
8use anyhow::{anyhow, bail, Context as _, Result};
9use buffer_diff::BufferDiff;
10use client::Client;
11use collections::{hash_map, HashMap, HashSet};
12use fs::Fs;
13use futures::{channel::oneshot, future::Shared, Future, FutureExt as _, StreamExt};
14use git::{blame::Blame, repository::RepoPath};
15use gpui::{
16 App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Subscription, Task, WeakEntity,
17};
18use language::{
19 proto::{
20 deserialize_line_ending, deserialize_version, serialize_line_ending, serialize_version,
21 split_operations,
22 },
23 Buffer, BufferEvent, Capability, DiskState, File as _, Language, LanguageRegistry, Operation,
24};
25use rpc::{
26 proto::{self, ToProto},
27 AnyProtoClient, ErrorExt as _, TypedEnvelope,
28};
29use serde::Deserialize;
30use smol::channel::Receiver;
31use std::{
32 io,
33 ops::Range,
34 path::{Path, PathBuf},
35 pin::pin,
36 sync::Arc,
37 time::Instant,
38};
39use text::BufferId;
40use util::{debug_panic, maybe, ResultExt as _, TryFutureExt};
41use worktree::{File, PathChange, ProjectEntryId, UpdatedGitRepositoriesSet, Worktree, WorktreeId};
42
43#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
44enum DiffKind {
45 Unstaged,
46 Uncommitted,
47}
48
49/// A set of open buffers.
50pub struct BufferStore {
51 state: BufferStoreState,
52 #[allow(clippy::type_complexity)]
53 loading_buffers: HashMap<ProjectPath, Shared<Task<Result<Entity<Buffer>, Arc<anyhow::Error>>>>>,
54 #[allow(clippy::type_complexity)]
55 loading_diffs:
56 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
57 worktree_store: Entity<WorktreeStore>,
58 opened_buffers: HashMap<BufferId, OpenBuffer>,
59 downstream_client: Option<(AnyProtoClient, u64)>,
60 shared_buffers: HashMap<proto::PeerId, HashMap<BufferId, SharedBuffer>>,
61}
62
63#[derive(Hash, Eq, PartialEq, Clone)]
64struct SharedBuffer {
65 buffer: Entity<Buffer>,
66 diff: Option<Entity<BufferDiff>>,
67 lsp_handle: Option<OpenLspBufferHandle>,
68}
69
70#[derive(Default)]
71struct BufferDiffState {
72 unstaged_diff: Option<WeakEntity<BufferDiff>>,
73 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
74 recalculate_diff_task: Option<Task<Result<()>>>,
75 language: Option<Arc<Language>>,
76 language_registry: Option<Arc<LanguageRegistry>>,
77 diff_updated_futures: Vec<oneshot::Sender<()>>,
78
79 head_text: Option<Arc<String>>,
80 index_text: Option<Arc<String>>,
81 head_changed: bool,
82 index_changed: bool,
83 language_changed: bool,
84}
85
86#[derive(Clone, Debug)]
87enum DiffBasesChange {
88 SetIndex(Option<String>),
89 SetHead(Option<String>),
90 SetEach {
91 index: Option<String>,
92 head: Option<String>,
93 },
94 SetBoth(Option<String>),
95}
96
97impl BufferDiffState {
98 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
99 self.language = buffer.read(cx).language().cloned();
100 self.language_changed = true;
101 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
102 }
103
104 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
105 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
106 }
107
108 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
109 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
110 }
111
112 fn handle_base_texts_updated(
113 &mut self,
114 buffer: text::BufferSnapshot,
115 message: proto::UpdateDiffBases,
116 cx: &mut Context<Self>,
117 ) {
118 use proto::update_diff_bases::Mode;
119
120 let Some(mode) = Mode::from_i32(message.mode) else {
121 return;
122 };
123
124 let diff_bases_change = match mode {
125 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
126 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
127 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
128 Mode::IndexAndHead => DiffBasesChange::SetEach {
129 index: message.staged_text,
130 head: message.committed_text,
131 },
132 };
133
134 let _ = self.diff_bases_changed(buffer, diff_bases_change, cx);
135 }
136
137 pub fn wait_for_recalculation(&mut self) -> Option<oneshot::Receiver<()>> {
138 if self.diff_updated_futures.is_empty() {
139 return None;
140 }
141 let (tx, rx) = oneshot::channel();
142 self.diff_updated_futures.push(tx);
143 Some(rx)
144 }
145
146 fn diff_bases_changed(
147 &mut self,
148 buffer: text::BufferSnapshot,
149 diff_bases_change: DiffBasesChange,
150 cx: &mut Context<Self>,
151 ) -> oneshot::Receiver<()> {
152 match diff_bases_change {
153 DiffBasesChange::SetIndex(index) => {
154 self.index_text = index.map(|mut index| {
155 text::LineEnding::normalize(&mut index);
156 Arc::new(index)
157 });
158 self.index_changed = true;
159 }
160 DiffBasesChange::SetHead(head) => {
161 self.head_text = head.map(|mut head| {
162 text::LineEnding::normalize(&mut head);
163 Arc::new(head)
164 });
165 self.head_changed = true;
166 }
167 DiffBasesChange::SetBoth(text) => {
168 let text = text.map(|mut text| {
169 text::LineEnding::normalize(&mut text);
170 Arc::new(text)
171 });
172 self.head_text = text.clone();
173 self.index_text = text;
174 self.head_changed = true;
175 self.index_changed = true;
176 }
177 DiffBasesChange::SetEach { index, head } => {
178 self.index_text = index.map(|mut index| {
179 text::LineEnding::normalize(&mut index);
180 Arc::new(index)
181 });
182 self.index_changed = true;
183 self.head_text = head.map(|mut head| {
184 text::LineEnding::normalize(&mut head);
185 Arc::new(head)
186 });
187 self.head_changed = true;
188 }
189 }
190
191 self.recalculate_diffs(buffer, cx)
192 }
193
194 fn recalculate_diffs(
195 &mut self,
196 buffer: text::BufferSnapshot,
197 cx: &mut Context<Self>,
198 ) -> oneshot::Receiver<()> {
199 log::debug!("recalculate diffs");
200 let (tx, rx) = oneshot::channel();
201 self.diff_updated_futures.push(tx);
202
203 let language = self.language.clone();
204 let language_registry = self.language_registry.clone();
205 let unstaged_diff = self.unstaged_diff();
206 let uncommitted_diff = self.uncommitted_diff();
207 let head = self.head_text.clone();
208 let index = self.index_text.clone();
209 let index_changed = self.index_changed;
210 let head_changed = self.head_changed;
211 let language_changed = self.language_changed;
212 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
213 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
214 (None, None) => true,
215 _ => false,
216 };
217 self.recalculate_diff_task = Some(cx.spawn(|this, mut cx| async move {
218 let mut new_unstaged_diff = None;
219 if let Some(unstaged_diff) = &unstaged_diff {
220 new_unstaged_diff = Some(
221 BufferDiff::update_diff(
222 unstaged_diff.clone(),
223 buffer.clone(),
224 index,
225 index_changed,
226 language_changed,
227 language.clone(),
228 language_registry.clone(),
229 &mut cx,
230 )
231 .await?,
232 );
233 }
234
235 let mut new_uncommitted_diff = None;
236 if let Some(uncommitted_diff) = &uncommitted_diff {
237 new_uncommitted_diff = if index_matches_head {
238 new_unstaged_diff.clone()
239 } else {
240 Some(
241 BufferDiff::update_diff(
242 uncommitted_diff.clone(),
243 buffer.clone(),
244 head,
245 head_changed,
246 language_changed,
247 language.clone(),
248 language_registry.clone(),
249 &mut cx,
250 )
251 .await?,
252 )
253 }
254 }
255
256 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
257 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
258 {
259 unstaged_diff.update(&mut cx, |diff, cx| {
260 diff.set_snapshot(&buffer, new_unstaged_diff, language_changed, None, cx)
261 })?
262 } else {
263 None
264 };
265
266 if let Some((uncommitted_diff, new_uncommitted_diff)) =
267 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
268 {
269 uncommitted_diff.update(&mut cx, |uncommitted_diff, cx| {
270 uncommitted_diff.set_snapshot(
271 &buffer,
272 new_uncommitted_diff,
273 language_changed,
274 unstaged_changed_range,
275 cx,
276 );
277 })?;
278 }
279
280 if let Some(this) = this.upgrade() {
281 this.update(&mut cx, |this, _| {
282 this.index_changed = false;
283 this.head_changed = false;
284 this.language_changed = false;
285 for tx in this.diff_updated_futures.drain(..) {
286 tx.send(()).ok();
287 }
288 })?;
289 }
290
291 Ok(())
292 }));
293
294 rx
295 }
296}
297
298enum BufferStoreState {
299 Local(LocalBufferStore),
300 Remote(RemoteBufferStore),
301}
302
303struct RemoteBufferStore {
304 shared_with_me: HashSet<Entity<Buffer>>,
305 upstream_client: AnyProtoClient,
306 project_id: u64,
307 loading_remote_buffers_by_id: HashMap<BufferId, Entity<Buffer>>,
308 remote_buffer_listeners:
309 HashMap<BufferId, Vec<oneshot::Sender<Result<Entity<Buffer>, anyhow::Error>>>>,
310 worktree_store: Entity<WorktreeStore>,
311}
312
313struct LocalBufferStore {
314 local_buffer_ids_by_path: HashMap<ProjectPath, BufferId>,
315 local_buffer_ids_by_entry_id: HashMap<ProjectEntryId, BufferId>,
316 worktree_store: Entity<WorktreeStore>,
317 _subscription: Subscription,
318}
319
320enum OpenBuffer {
321 Complete {
322 buffer: WeakEntity<Buffer>,
323 diff_state: Entity<BufferDiffState>,
324 },
325 Operations(Vec<Operation>),
326}
327
328pub enum BufferStoreEvent {
329 BufferAdded(Entity<Buffer>),
330 BufferDiffAdded(Entity<BufferDiff>),
331 BufferDropped(BufferId),
332 BufferChangedFilePath {
333 buffer: Entity<Buffer>,
334 old_file: Option<Arc<dyn language::File>>,
335 },
336}
337
338#[derive(Default, Debug)]
339pub struct ProjectTransaction(pub HashMap<Entity<Buffer>, language::Transaction>);
340
341impl EventEmitter<BufferStoreEvent> for BufferStore {}
342
343impl RemoteBufferStore {
344 fn open_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Task<Result<Option<String>>> {
345 let project_id = self.project_id;
346 let client = self.upstream_client.clone();
347 cx.background_spawn(async move {
348 let response = client
349 .request(proto::OpenUnstagedDiff {
350 project_id,
351 buffer_id: buffer_id.to_proto(),
352 })
353 .await?;
354 Ok(response.staged_text)
355 })
356 }
357
358 fn open_uncommitted_diff(
359 &self,
360 buffer_id: BufferId,
361 cx: &App,
362 ) -> Task<Result<DiffBasesChange>> {
363 use proto::open_uncommitted_diff_response::Mode;
364
365 let project_id = self.project_id;
366 let client = self.upstream_client.clone();
367 cx.background_spawn(async move {
368 let response = client
369 .request(proto::OpenUncommittedDiff {
370 project_id,
371 buffer_id: buffer_id.to_proto(),
372 })
373 .await?;
374 let mode = Mode::from_i32(response.mode).ok_or_else(|| anyhow!("Invalid mode"))?;
375 let bases = match mode {
376 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
377 Mode::IndexAndHead => DiffBasesChange::SetEach {
378 head: response.committed_text,
379 index: response.staged_text,
380 },
381 };
382 Ok(bases)
383 })
384 }
385
386 pub fn wait_for_remote_buffer(
387 &mut self,
388 id: BufferId,
389 cx: &mut Context<BufferStore>,
390 ) -> Task<Result<Entity<Buffer>>> {
391 let (tx, rx) = oneshot::channel();
392 self.remote_buffer_listeners.entry(id).or_default().push(tx);
393
394 cx.spawn(|this, cx| async move {
395 if let Some(buffer) = this
396 .read_with(&cx, |buffer_store, _| buffer_store.get(id))
397 .ok()
398 .flatten()
399 {
400 return Ok(buffer);
401 }
402
403 cx.background_spawn(async move { rx.await? }).await
404 })
405 }
406
407 fn save_remote_buffer(
408 &self,
409 buffer_handle: Entity<Buffer>,
410 new_path: Option<proto::ProjectPath>,
411 cx: &Context<BufferStore>,
412 ) -> Task<Result<()>> {
413 let buffer = buffer_handle.read(cx);
414 let buffer_id = buffer.remote_id().into();
415 let version = buffer.version();
416 let rpc = self.upstream_client.clone();
417 let project_id = self.project_id;
418 cx.spawn(move |_, mut cx| async move {
419 let response = rpc
420 .request(proto::SaveBuffer {
421 project_id,
422 buffer_id,
423 new_path,
424 version: serialize_version(&version),
425 })
426 .await?;
427 let version = deserialize_version(&response.version);
428 let mtime = response.mtime.map(|mtime| mtime.into());
429
430 buffer_handle.update(&mut cx, |buffer, cx| {
431 buffer.did_save(version.clone(), mtime, cx);
432 })?;
433
434 Ok(())
435 })
436 }
437
438 pub fn handle_create_buffer_for_peer(
439 &mut self,
440 envelope: TypedEnvelope<proto::CreateBufferForPeer>,
441 replica_id: u16,
442 capability: Capability,
443 cx: &mut Context<BufferStore>,
444 ) -> Result<Option<Entity<Buffer>>> {
445 match envelope
446 .payload
447 .variant
448 .ok_or_else(|| anyhow!("missing variant"))?
449 {
450 proto::create_buffer_for_peer::Variant::State(mut state) => {
451 let buffer_id = BufferId::new(state.id)?;
452
453 let buffer_result = maybe!({
454 let mut buffer_file = None;
455 if let Some(file) = state.file.take() {
456 let worktree_id = worktree::WorktreeId::from_proto(file.worktree_id);
457 let worktree = self
458 .worktree_store
459 .read(cx)
460 .worktree_for_id(worktree_id, cx)
461 .ok_or_else(|| {
462 anyhow!("no worktree found for id {}", file.worktree_id)
463 })?;
464 buffer_file = Some(Arc::new(File::from_proto(file, worktree.clone(), cx)?)
465 as Arc<dyn language::File>);
466 }
467 Buffer::from_proto(replica_id, capability, state, buffer_file)
468 });
469
470 match buffer_result {
471 Ok(buffer) => {
472 let buffer = cx.new(|_| buffer);
473 self.loading_remote_buffers_by_id.insert(buffer_id, buffer);
474 }
475 Err(error) => {
476 if let Some(listeners) = self.remote_buffer_listeners.remove(&buffer_id) {
477 for listener in listeners {
478 listener.send(Err(anyhow!(error.cloned()))).ok();
479 }
480 }
481 }
482 }
483 }
484 proto::create_buffer_for_peer::Variant::Chunk(chunk) => {
485 let buffer_id = BufferId::new(chunk.buffer_id)?;
486 let buffer = self
487 .loading_remote_buffers_by_id
488 .get(&buffer_id)
489 .cloned()
490 .ok_or_else(|| {
491 anyhow!(
492 "received chunk for buffer {} without initial state",
493 chunk.buffer_id
494 )
495 })?;
496
497 let result = maybe!({
498 let operations = chunk
499 .operations
500 .into_iter()
501 .map(language::proto::deserialize_operation)
502 .collect::<Result<Vec<_>>>()?;
503 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx));
504 anyhow::Ok(())
505 });
506
507 if let Err(error) = result {
508 self.loading_remote_buffers_by_id.remove(&buffer_id);
509 if let Some(listeners) = self.remote_buffer_listeners.remove(&buffer_id) {
510 for listener in listeners {
511 listener.send(Err(error.cloned())).ok();
512 }
513 }
514 } else if chunk.is_last {
515 self.loading_remote_buffers_by_id.remove(&buffer_id);
516 if self.upstream_client.is_via_collab() {
517 // retain buffers sent by peers to avoid races.
518 self.shared_with_me.insert(buffer.clone());
519 }
520
521 if let Some(senders) = self.remote_buffer_listeners.remove(&buffer_id) {
522 for sender in senders {
523 sender.send(Ok(buffer.clone())).ok();
524 }
525 }
526 return Ok(Some(buffer));
527 }
528 }
529 }
530 return Ok(None);
531 }
532
533 pub fn incomplete_buffer_ids(&self) -> Vec<BufferId> {
534 self.loading_remote_buffers_by_id
535 .keys()
536 .copied()
537 .collect::<Vec<_>>()
538 }
539
540 pub fn deserialize_project_transaction(
541 &self,
542 message: proto::ProjectTransaction,
543 push_to_history: bool,
544 cx: &mut Context<BufferStore>,
545 ) -> Task<Result<ProjectTransaction>> {
546 cx.spawn(|this, mut cx| async move {
547 let mut project_transaction = ProjectTransaction::default();
548 for (buffer_id, transaction) in message.buffer_ids.into_iter().zip(message.transactions)
549 {
550 let buffer_id = BufferId::new(buffer_id)?;
551 let buffer = this
552 .update(&mut cx, |this, cx| {
553 this.wait_for_remote_buffer(buffer_id, cx)
554 })?
555 .await?;
556 let transaction = language::proto::deserialize_transaction(transaction)?;
557 project_transaction.0.insert(buffer, transaction);
558 }
559
560 for (buffer, transaction) in &project_transaction.0 {
561 buffer
562 .update(&mut cx, |buffer, _| {
563 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
564 })?
565 .await?;
566
567 if push_to_history {
568 buffer.update(&mut cx, |buffer, _| {
569 buffer.push_transaction(transaction.clone(), Instant::now());
570 })?;
571 }
572 }
573
574 Ok(project_transaction)
575 })
576 }
577
578 fn open_buffer(
579 &self,
580 path: Arc<Path>,
581 worktree: Entity<Worktree>,
582 cx: &mut Context<BufferStore>,
583 ) -> Task<Result<Entity<Buffer>>> {
584 let worktree_id = worktree.read(cx).id().to_proto();
585 let project_id = self.project_id;
586 let client = self.upstream_client.clone();
587 cx.spawn(move |this, mut cx| async move {
588 let response = client
589 .request(proto::OpenBufferByPath {
590 project_id,
591 worktree_id,
592 path: path.to_proto(),
593 })
594 .await?;
595 let buffer_id = BufferId::new(response.buffer_id)?;
596
597 let buffer = this
598 .update(&mut cx, {
599 |this, cx| this.wait_for_remote_buffer(buffer_id, cx)
600 })?
601 .await?;
602
603 Ok(buffer)
604 })
605 }
606
607 fn create_buffer(&self, cx: &mut Context<BufferStore>) -> Task<Result<Entity<Buffer>>> {
608 let create = self.upstream_client.request(proto::OpenNewBuffer {
609 project_id: self.project_id,
610 });
611 cx.spawn(|this, mut cx| async move {
612 let response = create.await?;
613 let buffer_id = BufferId::new(response.buffer_id)?;
614
615 this.update(&mut cx, |this, cx| {
616 this.wait_for_remote_buffer(buffer_id, cx)
617 })?
618 .await
619 })
620 }
621
622 fn reload_buffers(
623 &self,
624 buffers: HashSet<Entity<Buffer>>,
625 push_to_history: bool,
626 cx: &mut Context<BufferStore>,
627 ) -> Task<Result<ProjectTransaction>> {
628 let request = self.upstream_client.request(proto::ReloadBuffers {
629 project_id: self.project_id,
630 buffer_ids: buffers
631 .iter()
632 .map(|buffer| buffer.read(cx).remote_id().to_proto())
633 .collect(),
634 });
635
636 cx.spawn(|this, mut cx| async move {
637 let response = request
638 .await?
639 .transaction
640 .ok_or_else(|| anyhow!("missing transaction"))?;
641 this.update(&mut cx, |this, cx| {
642 this.deserialize_project_transaction(response, push_to_history, cx)
643 })?
644 .await
645 })
646 }
647}
648
649impl LocalBufferStore {
650 fn worktree_for_buffer(
651 &self,
652 buffer: &Entity<Buffer>,
653 cx: &App,
654 ) -> Option<(Entity<Worktree>, Arc<Path>)> {
655 let file = buffer.read(cx).file()?;
656 let worktree_id = file.worktree_id(cx);
657 let path = file.path().clone();
658 let worktree = self
659 .worktree_store
660 .read(cx)
661 .worktree_for_id(worktree_id, cx)?;
662 Some((worktree, path))
663 }
664
665 fn load_staged_text(&self, buffer: &Entity<Buffer>, cx: &App) -> Task<Result<Option<String>>> {
666 if let Some((worktree, path)) = self.worktree_for_buffer(buffer, cx) {
667 worktree.read(cx).load_staged_file(path.as_ref(), cx)
668 } else {
669 return Task::ready(Err(anyhow!("no such worktree")));
670 }
671 }
672
673 fn load_committed_text(
674 &self,
675 buffer: &Entity<Buffer>,
676 cx: &App,
677 ) -> Task<Result<Option<String>>> {
678 if let Some((worktree, path)) = self.worktree_for_buffer(buffer, cx) {
679 worktree.read(cx).load_committed_file(path.as_ref(), cx)
680 } else {
681 Task::ready(Err(anyhow!("no such worktree")))
682 }
683 }
684
685 fn save_local_buffer(
686 &self,
687 buffer_handle: Entity<Buffer>,
688 worktree: Entity<Worktree>,
689 path: Arc<Path>,
690 mut has_changed_file: bool,
691 cx: &mut Context<BufferStore>,
692 ) -> Task<Result<()>> {
693 let buffer = buffer_handle.read(cx);
694
695 let text = buffer.as_rope().clone();
696 let line_ending = buffer.line_ending();
697 let version = buffer.version();
698 let buffer_id = buffer.remote_id();
699 if buffer
700 .file()
701 .is_some_and(|file| file.disk_state() == DiskState::New)
702 {
703 has_changed_file = true;
704 }
705
706 let save = worktree.update(cx, |worktree, cx| {
707 worktree.write_file(path.as_ref(), text, line_ending, cx)
708 });
709
710 cx.spawn(move |this, mut cx| async move {
711 let new_file = save.await?;
712 let mtime = new_file.disk_state().mtime();
713 this.update(&mut cx, |this, cx| {
714 if let Some((downstream_client, project_id)) = this.downstream_client.clone() {
715 if has_changed_file {
716 downstream_client
717 .send(proto::UpdateBufferFile {
718 project_id,
719 buffer_id: buffer_id.to_proto(),
720 file: Some(language::File::to_proto(&*new_file, cx)),
721 })
722 .log_err();
723 }
724 downstream_client
725 .send(proto::BufferSaved {
726 project_id,
727 buffer_id: buffer_id.to_proto(),
728 version: serialize_version(&version),
729 mtime: mtime.map(|time| time.into()),
730 })
731 .log_err();
732 }
733 })?;
734 buffer_handle.update(&mut cx, |buffer, cx| {
735 if has_changed_file {
736 buffer.file_updated(new_file, cx);
737 }
738 buffer.did_save(version.clone(), mtime, cx);
739 })
740 })
741 }
742
743 fn subscribe_to_worktree(
744 &mut self,
745 worktree: &Entity<Worktree>,
746 cx: &mut Context<BufferStore>,
747 ) {
748 cx.subscribe(worktree, |this, worktree, event, cx| {
749 if worktree.read(cx).is_local() {
750 match event {
751 worktree::Event::UpdatedEntries(changes) => {
752 Self::local_worktree_entries_changed(this, &worktree, changes, cx);
753 }
754 worktree::Event::UpdatedGitRepositories(updated_repos) => {
755 Self::local_worktree_git_repos_changed(
756 this,
757 worktree.clone(),
758 updated_repos,
759 cx,
760 )
761 }
762 _ => {}
763 }
764 }
765 })
766 .detach();
767 }
768
769 fn local_worktree_entries_changed(
770 this: &mut BufferStore,
771 worktree_handle: &Entity<Worktree>,
772 changes: &[(Arc<Path>, ProjectEntryId, PathChange)],
773 cx: &mut Context<BufferStore>,
774 ) {
775 let snapshot = worktree_handle.read(cx).snapshot();
776 for (path, entry_id, _) in changes {
777 Self::local_worktree_entry_changed(
778 this,
779 *entry_id,
780 path,
781 worktree_handle,
782 &snapshot,
783 cx,
784 );
785 }
786 }
787
788 fn local_worktree_git_repos_changed(
789 this: &mut BufferStore,
790 worktree_handle: Entity<Worktree>,
791 changed_repos: &UpdatedGitRepositoriesSet,
792 cx: &mut Context<BufferStore>,
793 ) {
794 debug_assert!(worktree_handle.read(cx).is_local());
795
796 let mut diff_state_updates = Vec::new();
797 for buffer in this.opened_buffers.values() {
798 let OpenBuffer::Complete { buffer, diff_state } = buffer else {
799 continue;
800 };
801 let Some(buffer) = buffer.upgrade() else {
802 continue;
803 };
804 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
805 continue;
806 };
807 if file.worktree != worktree_handle {
808 continue;
809 }
810 let diff_state = diff_state.read(cx);
811 if changed_repos
812 .iter()
813 .any(|(work_dir, _)| file.path.starts_with(work_dir))
814 {
815 let has_unstaged_diff = diff_state
816 .unstaged_diff
817 .as_ref()
818 .is_some_and(|diff| diff.is_upgradable());
819 let has_uncommitted_diff = diff_state
820 .uncommitted_diff
821 .as_ref()
822 .is_some_and(|set| set.is_upgradable());
823 diff_state_updates.push((
824 buffer,
825 file.path.clone(),
826 has_unstaged_diff.then(|| diff_state.index_text.clone()),
827 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
828 ));
829 }
830 }
831
832 if diff_state_updates.is_empty() {
833 return;
834 }
835
836 cx.spawn(move |this, mut cx| async move {
837 let snapshot =
838 worktree_handle.update(&mut cx, |tree, _| tree.as_local().unwrap().snapshot())?;
839 let diff_bases_changes_by_buffer = cx
840 .background_spawn(async move {
841 diff_state_updates
842 .into_iter()
843 .filter_map(|(buffer, path, current_index_text, current_head_text)| {
844 let local_repo = snapshot.local_repo_for_path(&path)?;
845 let relative_path = local_repo.relativize(&path).ok()?;
846 let index_text = if current_index_text.is_some() {
847 local_repo.repo().load_index_text(&relative_path)
848 } else {
849 None
850 };
851 let head_text = if current_head_text.is_some() {
852 local_repo.repo().load_committed_text(&relative_path)
853 } else {
854 None
855 };
856
857 // Avoid triggering a diff update if the base text has not changed.
858 if let Some((current_index, current_head)) =
859 current_index_text.as_ref().zip(current_head_text.as_ref())
860 {
861 if current_index.as_deref() == index_text.as_ref()
862 && current_head.as_deref() == head_text.as_ref()
863 {
864 return None;
865 }
866 }
867
868 let diff_bases_change =
869 match (current_index_text.is_some(), current_head_text.is_some()) {
870 (true, true) => Some(if index_text == head_text {
871 DiffBasesChange::SetBoth(head_text)
872 } else {
873 DiffBasesChange::SetEach {
874 index: index_text,
875 head: head_text,
876 }
877 }),
878 (true, false) => Some(DiffBasesChange::SetIndex(index_text)),
879 (false, true) => Some(DiffBasesChange::SetHead(head_text)),
880 (false, false) => None,
881 };
882
883 Some((buffer, diff_bases_change))
884 })
885 .collect::<Vec<_>>()
886 })
887 .await;
888
889 this.update(&mut cx, |this, cx| {
890 for (buffer, diff_bases_change) in diff_bases_changes_by_buffer {
891 let Some(OpenBuffer::Complete { diff_state, .. }) =
892 this.opened_buffers.get_mut(&buffer.read(cx).remote_id())
893 else {
894 continue;
895 };
896 let Some(diff_bases_change) = diff_bases_change else {
897 continue;
898 };
899
900 diff_state.update(cx, |diff_state, cx| {
901 use proto::update_diff_bases::Mode;
902
903 let buffer = buffer.read(cx);
904 if let Some((client, project_id)) = this.downstream_client.as_ref() {
905 let buffer_id = buffer.remote_id().to_proto();
906 let (staged_text, committed_text, mode) = match diff_bases_change
907 .clone()
908 {
909 DiffBasesChange::SetIndex(index) => (index, None, Mode::IndexOnly),
910 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
911 DiffBasesChange::SetEach { index, head } => {
912 (index, head, Mode::IndexAndHead)
913 }
914 DiffBasesChange::SetBoth(text) => {
915 (None, text, Mode::IndexMatchesHead)
916 }
917 };
918 let message = proto::UpdateDiffBases {
919 project_id: *project_id,
920 buffer_id,
921 staged_text,
922 committed_text,
923 mode: mode as i32,
924 };
925
926 client.send(message).log_err();
927 }
928
929 let _ = diff_state.diff_bases_changed(
930 buffer.text_snapshot(),
931 diff_bases_change,
932 cx,
933 );
934 });
935 }
936 })
937 })
938 .detach_and_log_err(cx);
939 }
940
941 fn local_worktree_entry_changed(
942 this: &mut BufferStore,
943 entry_id: ProjectEntryId,
944 path: &Arc<Path>,
945 worktree: &Entity<worktree::Worktree>,
946 snapshot: &worktree::Snapshot,
947 cx: &mut Context<BufferStore>,
948 ) -> Option<()> {
949 let project_path = ProjectPath {
950 worktree_id: snapshot.id(),
951 path: path.clone(),
952 };
953
954 let buffer_id = {
955 let local = this.as_local_mut()?;
956 match local.local_buffer_ids_by_entry_id.get(&entry_id) {
957 Some(&buffer_id) => buffer_id,
958 None => local.local_buffer_ids_by_path.get(&project_path).copied()?,
959 }
960 };
961
962 let buffer = if let Some(buffer) = this.get(buffer_id) {
963 Some(buffer)
964 } else {
965 this.opened_buffers.remove(&buffer_id);
966 None
967 };
968
969 let buffer = if let Some(buffer) = buffer {
970 buffer
971 } else {
972 let this = this.as_local_mut()?;
973 this.local_buffer_ids_by_path.remove(&project_path);
974 this.local_buffer_ids_by_entry_id.remove(&entry_id);
975 return None;
976 };
977
978 let events = buffer.update(cx, |buffer, cx| {
979 let local = this.as_local_mut()?;
980 let file = buffer.file()?;
981 let old_file = File::from_dyn(Some(file))?;
982 if old_file.worktree != *worktree {
983 return None;
984 }
985
986 let snapshot_entry = old_file
987 .entry_id
988 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
989 .or_else(|| snapshot.entry_for_path(old_file.path.as_ref()));
990
991 let new_file = if let Some(entry) = snapshot_entry {
992 File {
993 disk_state: match entry.mtime {
994 Some(mtime) => DiskState::Present { mtime },
995 None => old_file.disk_state,
996 },
997 is_local: true,
998 entry_id: Some(entry.id),
999 path: entry.path.clone(),
1000 worktree: worktree.clone(),
1001 is_private: entry.is_private,
1002 }
1003 } else {
1004 File {
1005 disk_state: DiskState::Deleted,
1006 is_local: true,
1007 entry_id: old_file.entry_id,
1008 path: old_file.path.clone(),
1009 worktree: worktree.clone(),
1010 is_private: old_file.is_private,
1011 }
1012 };
1013
1014 if new_file == *old_file {
1015 return None;
1016 }
1017
1018 let mut events = Vec::new();
1019 if new_file.path != old_file.path {
1020 local.local_buffer_ids_by_path.remove(&ProjectPath {
1021 path: old_file.path.clone(),
1022 worktree_id: old_file.worktree_id(cx),
1023 });
1024 local.local_buffer_ids_by_path.insert(
1025 ProjectPath {
1026 worktree_id: new_file.worktree_id(cx),
1027 path: new_file.path.clone(),
1028 },
1029 buffer_id,
1030 );
1031 events.push(BufferStoreEvent::BufferChangedFilePath {
1032 buffer: cx.entity(),
1033 old_file: buffer.file().cloned(),
1034 });
1035 }
1036
1037 if new_file.entry_id != old_file.entry_id {
1038 if let Some(entry_id) = old_file.entry_id {
1039 local.local_buffer_ids_by_entry_id.remove(&entry_id);
1040 }
1041 if let Some(entry_id) = new_file.entry_id {
1042 local
1043 .local_buffer_ids_by_entry_id
1044 .insert(entry_id, buffer_id);
1045 }
1046 }
1047
1048 if let Some((client, project_id)) = &this.downstream_client {
1049 client
1050 .send(proto::UpdateBufferFile {
1051 project_id: *project_id,
1052 buffer_id: buffer_id.to_proto(),
1053 file: Some(new_file.to_proto(cx)),
1054 })
1055 .ok();
1056 }
1057
1058 buffer.file_updated(Arc::new(new_file), cx);
1059 Some(events)
1060 })?;
1061
1062 for event in events {
1063 cx.emit(event);
1064 }
1065
1066 None
1067 }
1068
1069 fn buffer_changed_file(&mut self, buffer: Entity<Buffer>, cx: &mut App) -> Option<()> {
1070 let file = File::from_dyn(buffer.read(cx).file())?;
1071
1072 let remote_id = buffer.read(cx).remote_id();
1073 if let Some(entry_id) = file.entry_id {
1074 match self.local_buffer_ids_by_entry_id.get(&entry_id) {
1075 Some(_) => {
1076 return None;
1077 }
1078 None => {
1079 self.local_buffer_ids_by_entry_id
1080 .insert(entry_id, remote_id);
1081 }
1082 }
1083 };
1084 self.local_buffer_ids_by_path.insert(
1085 ProjectPath {
1086 worktree_id: file.worktree_id(cx),
1087 path: file.path.clone(),
1088 },
1089 remote_id,
1090 );
1091
1092 Some(())
1093 }
1094
1095 fn save_buffer(
1096 &self,
1097 buffer: Entity<Buffer>,
1098 cx: &mut Context<BufferStore>,
1099 ) -> Task<Result<()>> {
1100 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1101 return Task::ready(Err(anyhow!("buffer doesn't have a file")));
1102 };
1103 let worktree = file.worktree.clone();
1104 self.save_local_buffer(buffer, worktree, file.path.clone(), false, cx)
1105 }
1106
1107 fn save_buffer_as(
1108 &self,
1109 buffer: Entity<Buffer>,
1110 path: ProjectPath,
1111 cx: &mut Context<BufferStore>,
1112 ) -> Task<Result<()>> {
1113 let Some(worktree) = self
1114 .worktree_store
1115 .read(cx)
1116 .worktree_for_id(path.worktree_id, cx)
1117 else {
1118 return Task::ready(Err(anyhow!("no such worktree")));
1119 };
1120 self.save_local_buffer(buffer, worktree, path.path.clone(), true, cx)
1121 }
1122
1123 fn open_buffer(
1124 &self,
1125 path: Arc<Path>,
1126 worktree: Entity<Worktree>,
1127 cx: &mut Context<BufferStore>,
1128 ) -> Task<Result<Entity<Buffer>>> {
1129 let load_buffer = worktree.update(cx, |worktree, cx| {
1130 let load_file = worktree.load_file(path.as_ref(), cx);
1131 let reservation = cx.reserve_entity();
1132 let buffer_id = BufferId::from(reservation.entity_id().as_non_zero_u64());
1133 cx.spawn(move |_, mut cx| async move {
1134 let loaded = load_file.await?;
1135 let text_buffer = cx
1136 .background_spawn(async move { text::Buffer::new(0, buffer_id, loaded.text) })
1137 .await;
1138 cx.insert_entity(reservation, |_| {
1139 Buffer::build(text_buffer, Some(loaded.file), Capability::ReadWrite)
1140 })
1141 })
1142 });
1143
1144 cx.spawn(move |this, mut cx| async move {
1145 let buffer = match load_buffer.await {
1146 Ok(buffer) => Ok(buffer),
1147 Err(error) if is_not_found_error(&error) => cx.new(|cx| {
1148 let buffer_id = BufferId::from(cx.entity_id().as_non_zero_u64());
1149 let text_buffer = text::Buffer::new(0, buffer_id, "".into());
1150 Buffer::build(
1151 text_buffer,
1152 Some(Arc::new(File {
1153 worktree,
1154 path,
1155 disk_state: DiskState::New,
1156 entry_id: None,
1157 is_local: true,
1158 is_private: false,
1159 })),
1160 Capability::ReadWrite,
1161 )
1162 }),
1163 Err(e) => Err(e),
1164 }?;
1165 this.update(&mut cx, |this, cx| {
1166 this.add_buffer(buffer.clone(), cx)?;
1167 let buffer_id = buffer.read(cx).remote_id();
1168 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
1169 let this = this.as_local_mut().unwrap();
1170 this.local_buffer_ids_by_path.insert(
1171 ProjectPath {
1172 worktree_id: file.worktree_id(cx),
1173 path: file.path.clone(),
1174 },
1175 buffer_id,
1176 );
1177
1178 if let Some(entry_id) = file.entry_id {
1179 this.local_buffer_ids_by_entry_id
1180 .insert(entry_id, buffer_id);
1181 }
1182 }
1183
1184 anyhow::Ok(())
1185 })??;
1186
1187 Ok(buffer)
1188 })
1189 }
1190
1191 fn create_buffer(&self, cx: &mut Context<BufferStore>) -> Task<Result<Entity<Buffer>>> {
1192 cx.spawn(|buffer_store, mut cx| async move {
1193 let buffer =
1194 cx.new(|cx| Buffer::local("", cx).with_language(language::PLAIN_TEXT.clone(), cx))?;
1195 buffer_store.update(&mut cx, |buffer_store, cx| {
1196 buffer_store.add_buffer(buffer.clone(), cx).log_err();
1197 })?;
1198 Ok(buffer)
1199 })
1200 }
1201
1202 fn reload_buffers(
1203 &self,
1204 buffers: HashSet<Entity<Buffer>>,
1205 push_to_history: bool,
1206 cx: &mut Context<BufferStore>,
1207 ) -> Task<Result<ProjectTransaction>> {
1208 cx.spawn(move |_, mut cx| async move {
1209 let mut project_transaction = ProjectTransaction::default();
1210 for buffer in buffers {
1211 let transaction = buffer
1212 .update(&mut cx, |buffer, cx| buffer.reload(cx))?
1213 .await?;
1214 buffer.update(&mut cx, |buffer, cx| {
1215 if let Some(transaction) = transaction {
1216 if !push_to_history {
1217 buffer.forget_transaction(transaction.id);
1218 }
1219 project_transaction.0.insert(cx.entity(), transaction);
1220 }
1221 })?;
1222 }
1223
1224 Ok(project_transaction)
1225 })
1226 }
1227}
1228
1229impl BufferStore {
1230 pub fn init(client: &AnyProtoClient) {
1231 client.add_entity_message_handler(Self::handle_buffer_reloaded);
1232 client.add_entity_message_handler(Self::handle_buffer_saved);
1233 client.add_entity_message_handler(Self::handle_update_buffer_file);
1234 client.add_entity_request_handler(Self::handle_save_buffer);
1235 client.add_entity_request_handler(Self::handle_blame_buffer);
1236 client.add_entity_request_handler(Self::handle_reload_buffers);
1237 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
1238 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
1239 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
1240 client.add_entity_message_handler(Self::handle_update_diff_bases);
1241 }
1242
1243 /// Creates a buffer store, optionally retaining its buffers.
1244 pub fn local(worktree_store: Entity<WorktreeStore>, cx: &mut Context<Self>) -> Self {
1245 Self {
1246 state: BufferStoreState::Local(LocalBufferStore {
1247 local_buffer_ids_by_path: Default::default(),
1248 local_buffer_ids_by_entry_id: Default::default(),
1249 worktree_store: worktree_store.clone(),
1250 _subscription: cx.subscribe(&worktree_store, |this, _, event, cx| {
1251 if let WorktreeStoreEvent::WorktreeAdded(worktree) = event {
1252 let this = this.as_local_mut().unwrap();
1253 this.subscribe_to_worktree(worktree, cx);
1254 }
1255 }),
1256 }),
1257 downstream_client: None,
1258 opened_buffers: Default::default(),
1259 shared_buffers: Default::default(),
1260 loading_buffers: Default::default(),
1261 loading_diffs: Default::default(),
1262 worktree_store,
1263 }
1264 }
1265
1266 pub fn remote(
1267 worktree_store: Entity<WorktreeStore>,
1268 upstream_client: AnyProtoClient,
1269 remote_id: u64,
1270 _cx: &mut Context<Self>,
1271 ) -> Self {
1272 Self {
1273 state: BufferStoreState::Remote(RemoteBufferStore {
1274 shared_with_me: Default::default(),
1275 loading_remote_buffers_by_id: Default::default(),
1276 remote_buffer_listeners: Default::default(),
1277 project_id: remote_id,
1278 upstream_client,
1279 worktree_store: worktree_store.clone(),
1280 }),
1281 downstream_client: None,
1282 opened_buffers: Default::default(),
1283 loading_buffers: Default::default(),
1284 loading_diffs: Default::default(),
1285 shared_buffers: Default::default(),
1286 worktree_store,
1287 }
1288 }
1289
1290 fn as_local_mut(&mut self) -> Option<&mut LocalBufferStore> {
1291 match &mut self.state {
1292 BufferStoreState::Local(state) => Some(state),
1293 _ => None,
1294 }
1295 }
1296
1297 fn as_remote_mut(&mut self) -> Option<&mut RemoteBufferStore> {
1298 match &mut self.state {
1299 BufferStoreState::Remote(state) => Some(state),
1300 _ => None,
1301 }
1302 }
1303
1304 fn as_remote(&self) -> Option<&RemoteBufferStore> {
1305 match &self.state {
1306 BufferStoreState::Remote(state) => Some(state),
1307 _ => None,
1308 }
1309 }
1310
1311 pub fn open_buffer(
1312 &mut self,
1313 project_path: ProjectPath,
1314 cx: &mut Context<Self>,
1315 ) -> Task<Result<Entity<Buffer>>> {
1316 if let Some(buffer) = self.get_by_path(&project_path, cx) {
1317 return Task::ready(Ok(buffer));
1318 }
1319
1320 let task = match self.loading_buffers.entry(project_path.clone()) {
1321 hash_map::Entry::Occupied(e) => e.get().clone(),
1322 hash_map::Entry::Vacant(entry) => {
1323 let path = project_path.path.clone();
1324 let Some(worktree) = self
1325 .worktree_store
1326 .read(cx)
1327 .worktree_for_id(project_path.worktree_id, cx)
1328 else {
1329 return Task::ready(Err(anyhow!("no such worktree")));
1330 };
1331 let load_buffer = match &self.state {
1332 BufferStoreState::Local(this) => this.open_buffer(path, worktree, cx),
1333 BufferStoreState::Remote(this) => this.open_buffer(path, worktree, cx),
1334 };
1335
1336 entry
1337 .insert(
1338 cx.spawn(move |this, mut cx| async move {
1339 let load_result = load_buffer.await;
1340 this.update(&mut cx, |this, _cx| {
1341 // Record the fact that the buffer is no longer loading.
1342 this.loading_buffers.remove(&project_path);
1343 })
1344 .ok();
1345 load_result.map_err(Arc::new)
1346 })
1347 .shared(),
1348 )
1349 .clone()
1350 }
1351 };
1352
1353 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
1354 }
1355
1356 pub fn open_unstaged_diff(
1357 &mut self,
1358 buffer: Entity<Buffer>,
1359 cx: &mut Context<Self>,
1360 ) -> Task<Result<Entity<BufferDiff>>> {
1361 let buffer_id = buffer.read(cx).remote_id();
1362 if let Some(OpenBuffer::Complete { diff_state, .. }) = self.opened_buffers.get(&buffer_id) {
1363 if let Some(unstaged_diff) = diff_state
1364 .read(cx)
1365 .unstaged_diff
1366 .as_ref()
1367 .and_then(|weak| weak.upgrade())
1368 {
1369 if let Some(task) =
1370 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
1371 {
1372 return cx.background_executor().spawn(async move {
1373 task.await?;
1374 Ok(unstaged_diff)
1375 });
1376 }
1377 return Task::ready(Ok(unstaged_diff));
1378 }
1379 }
1380
1381 let task = match self.loading_diffs.entry((buffer_id, DiffKind::Unstaged)) {
1382 hash_map::Entry::Occupied(e) => e.get().clone(),
1383 hash_map::Entry::Vacant(entry) => {
1384 let staged_text = match &self.state {
1385 BufferStoreState::Local(this) => this.load_staged_text(&buffer, cx),
1386 BufferStoreState::Remote(this) => this.open_unstaged_diff(buffer_id, cx),
1387 };
1388
1389 entry
1390 .insert(
1391 cx.spawn(move |this, cx| async move {
1392 Self::open_diff_internal(
1393 this,
1394 DiffKind::Unstaged,
1395 staged_text.await.map(DiffBasesChange::SetIndex),
1396 buffer,
1397 cx,
1398 )
1399 .await
1400 .map_err(Arc::new)
1401 })
1402 .shared(),
1403 )
1404 .clone()
1405 }
1406 };
1407
1408 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
1409 }
1410
1411 pub fn open_uncommitted_diff(
1412 &mut self,
1413 buffer: Entity<Buffer>,
1414 cx: &mut Context<Self>,
1415 ) -> Task<Result<Entity<BufferDiff>>> {
1416 let buffer_id = buffer.read(cx).remote_id();
1417
1418 if let Some(OpenBuffer::Complete { diff_state, .. }) = self.opened_buffers.get(&buffer_id) {
1419 if let Some(uncommitted_diff) = diff_state
1420 .read(cx)
1421 .uncommitted_diff
1422 .as_ref()
1423 .and_then(|weak| weak.upgrade())
1424 {
1425 if let Some(task) =
1426 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
1427 {
1428 return cx.background_executor().spawn(async move {
1429 task.await?;
1430 Ok(uncommitted_diff)
1431 });
1432 }
1433 return Task::ready(Ok(uncommitted_diff));
1434 }
1435 }
1436
1437 let task = match self.loading_diffs.entry((buffer_id, DiffKind::Uncommitted)) {
1438 hash_map::Entry::Occupied(e) => e.get().clone(),
1439 hash_map::Entry::Vacant(entry) => {
1440 let changes = match &self.state {
1441 BufferStoreState::Local(this) => {
1442 let committed_text = this.load_committed_text(&buffer, cx);
1443 let staged_text = this.load_staged_text(&buffer, cx);
1444 cx.background_spawn(async move {
1445 let committed_text = committed_text.await?;
1446 let staged_text = staged_text.await?;
1447 let diff_bases_change = if committed_text == staged_text {
1448 DiffBasesChange::SetBoth(committed_text)
1449 } else {
1450 DiffBasesChange::SetEach {
1451 index: staged_text,
1452 head: committed_text,
1453 }
1454 };
1455 Ok(diff_bases_change)
1456 })
1457 }
1458 BufferStoreState::Remote(this) => this.open_uncommitted_diff(buffer_id, cx),
1459 };
1460
1461 entry
1462 .insert(
1463 cx.spawn(move |this, cx| async move {
1464 Self::open_diff_internal(
1465 this,
1466 DiffKind::Uncommitted,
1467 changes.await,
1468 buffer,
1469 cx,
1470 )
1471 .await
1472 .map_err(Arc::new)
1473 })
1474 .shared(),
1475 )
1476 .clone()
1477 }
1478 };
1479
1480 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
1481 }
1482
1483 async fn open_diff_internal(
1484 this: WeakEntity<Self>,
1485 kind: DiffKind,
1486 texts: Result<DiffBasesChange>,
1487 buffer_entity: Entity<Buffer>,
1488 mut cx: AsyncApp,
1489 ) -> Result<Entity<BufferDiff>> {
1490 let diff_bases_change = match texts {
1491 Err(e) => {
1492 this.update(&mut cx, |this, cx| {
1493 let buffer = buffer_entity.read(cx);
1494 let buffer_id = buffer.remote_id();
1495 this.loading_diffs.remove(&(buffer_id, kind));
1496 })?;
1497 return Err(e);
1498 }
1499 Ok(change) => change,
1500 };
1501
1502 this.update(&mut cx, |this, cx| {
1503 let buffer = buffer_entity.read(cx);
1504 let buffer_id = buffer.remote_id();
1505 let language = buffer.language().cloned();
1506 let language_registry = buffer.language_registry();
1507 let text_snapshot = buffer.text_snapshot();
1508 this.loading_diffs.remove(&(buffer_id, kind));
1509
1510 if let Some(OpenBuffer::Complete { diff_state, .. }) =
1511 this.opened_buffers.get_mut(&buffer_id)
1512 {
1513 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
1514 cx.emit(BufferStoreEvent::BufferDiffAdded(diff.clone()));
1515 diff_state.update(cx, |diff_state, cx| {
1516 diff_state.language = language;
1517 diff_state.language_registry = language_registry;
1518
1519 match kind {
1520 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
1521 DiffKind::Uncommitted => {
1522 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
1523 diff
1524 } else {
1525 let unstaged_diff =
1526 cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
1527 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
1528 unstaged_diff
1529 };
1530
1531 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
1532 diff_state.uncommitted_diff = Some(diff.downgrade())
1533 }
1534 };
1535
1536 let rx = diff_state.diff_bases_changed(text_snapshot, diff_bases_change, cx);
1537
1538 Ok(async move {
1539 rx.await.ok();
1540 Ok(diff)
1541 })
1542 })
1543 } else {
1544 Err(anyhow!("buffer was closed"))
1545 }
1546 })??
1547 .await
1548 }
1549
1550 pub fn create_buffer(&mut self, cx: &mut Context<Self>) -> Task<Result<Entity<Buffer>>> {
1551 match &self.state {
1552 BufferStoreState::Local(this) => this.create_buffer(cx),
1553 BufferStoreState::Remote(this) => this.create_buffer(cx),
1554 }
1555 }
1556
1557 pub fn save_buffer(
1558 &mut self,
1559 buffer: Entity<Buffer>,
1560 cx: &mut Context<Self>,
1561 ) -> Task<Result<()>> {
1562 match &mut self.state {
1563 BufferStoreState::Local(this) => this.save_buffer(buffer, cx),
1564 BufferStoreState::Remote(this) => this.save_remote_buffer(buffer.clone(), None, cx),
1565 }
1566 }
1567
1568 pub fn save_buffer_as(
1569 &mut self,
1570 buffer: Entity<Buffer>,
1571 path: ProjectPath,
1572 cx: &mut Context<Self>,
1573 ) -> Task<Result<()>> {
1574 let old_file = buffer.read(cx).file().cloned();
1575 let task = match &self.state {
1576 BufferStoreState::Local(this) => this.save_buffer_as(buffer.clone(), path, cx),
1577 BufferStoreState::Remote(this) => {
1578 this.save_remote_buffer(buffer.clone(), Some(path.to_proto()), cx)
1579 }
1580 };
1581 cx.spawn(|this, mut cx| async move {
1582 task.await?;
1583 this.update(&mut cx, |_, cx| {
1584 cx.emit(BufferStoreEvent::BufferChangedFilePath { buffer, old_file });
1585 })
1586 })
1587 }
1588
1589 pub fn blame_buffer(
1590 &self,
1591 buffer: &Entity<Buffer>,
1592 version: Option<clock::Global>,
1593 cx: &App,
1594 ) -> Task<Result<Option<Blame>>> {
1595 let buffer = buffer.read(cx);
1596 let Some(file) = File::from_dyn(buffer.file()) else {
1597 return Task::ready(Err(anyhow!("buffer has no file")));
1598 };
1599
1600 match file.worktree.clone().read(cx) {
1601 Worktree::Local(worktree) => {
1602 let worktree = worktree.snapshot();
1603 let blame_params = maybe!({
1604 let local_repo = match worktree.local_repo_for_path(&file.path) {
1605 Some(repo_for_path) => repo_for_path,
1606 None => return Ok(None),
1607 };
1608
1609 let relative_path = local_repo
1610 .relativize(&file.path)
1611 .context("failed to relativize buffer path")?;
1612
1613 let repo = local_repo.repo().clone();
1614
1615 let content = match version {
1616 Some(version) => buffer.rope_for_version(&version).clone(),
1617 None => buffer.as_rope().clone(),
1618 };
1619
1620 anyhow::Ok(Some((repo, relative_path, content)))
1621 });
1622
1623 cx.background_spawn(async move {
1624 let Some((repo, relative_path, content)) = blame_params? else {
1625 return Ok(None);
1626 };
1627 repo.blame(&relative_path, content)
1628 .with_context(|| format!("Failed to blame {:?}", relative_path.0))
1629 .map(Some)
1630 })
1631 }
1632 Worktree::Remote(worktree) => {
1633 let buffer_id = buffer.remote_id();
1634 let version = buffer.version();
1635 let project_id = worktree.project_id();
1636 let client = worktree.client();
1637 cx.spawn(|_| async move {
1638 let response = client
1639 .request(proto::BlameBuffer {
1640 project_id,
1641 buffer_id: buffer_id.into(),
1642 version: serialize_version(&version),
1643 })
1644 .await?;
1645 Ok(deserialize_blame_buffer_response(response))
1646 })
1647 }
1648 }
1649 }
1650
1651 pub fn get_permalink_to_line(
1652 &self,
1653 buffer: &Entity<Buffer>,
1654 selection: Range<u32>,
1655 cx: &App,
1656 ) -> Task<Result<url::Url>> {
1657 let buffer = buffer.read(cx);
1658 let Some(file) = File::from_dyn(buffer.file()) else {
1659 return Task::ready(Err(anyhow!("buffer has no file")));
1660 };
1661
1662 match file.worktree.read(cx) {
1663 Worktree::Local(worktree) => {
1664 let worktree_path = worktree.abs_path().clone();
1665 let Some((repo_entry, repo)) =
1666 worktree.repository_for_path(file.path()).and_then(|entry| {
1667 let repo = worktree.get_local_repo(&entry)?.repo().clone();
1668 Some((entry, repo))
1669 })
1670 else {
1671 // If we're not in a Git repo, check whether this is a Rust source
1672 // file in the Cargo registry (presumably opened with go-to-definition
1673 // from a normal Rust file). If so, we can put together a permalink
1674 // using crate metadata.
1675 if buffer
1676 .language()
1677 .is_none_or(|lang| lang.name() != "Rust".into())
1678 {
1679 return Task::ready(Err(anyhow!("no permalink available")));
1680 }
1681 let file_path = worktree_path.join(file.path());
1682 return cx.spawn(|cx| async move {
1683 let provider_registry =
1684 cx.update(GitHostingProviderRegistry::default_global)?;
1685 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1686 .map_err(|_| anyhow!("no permalink available"))
1687 });
1688 };
1689
1690 let path = match repo_entry.relativize(file.path()) {
1691 Ok(RepoPath(path)) => path,
1692 Err(e) => return Task::ready(Err(e)),
1693 };
1694
1695 cx.spawn(|cx| async move {
1696 const REMOTE_NAME: &str = "origin";
1697 let origin_url = repo
1698 .remote_url(REMOTE_NAME)
1699 .ok_or_else(|| anyhow!("remote \"{REMOTE_NAME}\" not found"))?;
1700
1701 let sha = repo
1702 .head_sha()
1703 .ok_or_else(|| anyhow!("failed to read HEAD SHA"))?;
1704
1705 let provider_registry =
1706 cx.update(GitHostingProviderRegistry::default_global)?;
1707
1708 let (provider, remote) =
1709 parse_git_remote_url(provider_registry, &origin_url)
1710 .ok_or_else(|| anyhow!("failed to parse Git remote URL"))?;
1711
1712 let path = path
1713 .to_str()
1714 .ok_or_else(|| anyhow!("failed to convert path to string"))?;
1715
1716 Ok(provider.build_permalink(
1717 remote,
1718 BuildPermalinkParams {
1719 sha: &sha,
1720 path,
1721 selection: Some(selection),
1722 },
1723 ))
1724 })
1725 }
1726 Worktree::Remote(worktree) => {
1727 let buffer_id = buffer.remote_id();
1728 let project_id = worktree.project_id();
1729 let client = worktree.client();
1730 cx.spawn(|_| async move {
1731 let response = client
1732 .request(proto::GetPermalinkToLine {
1733 project_id,
1734 buffer_id: buffer_id.into(),
1735 selection: Some(proto::Range {
1736 start: selection.start as u64,
1737 end: selection.end as u64,
1738 }),
1739 })
1740 .await?;
1741
1742 url::Url::parse(&response.permalink).context("failed to parse permalink")
1743 })
1744 }
1745 }
1746 }
1747
1748 fn add_buffer(&mut self, buffer_entity: Entity<Buffer>, cx: &mut Context<Self>) -> Result<()> {
1749 let buffer = buffer_entity.read(cx);
1750 let language = buffer.language().cloned();
1751 let language_registry = buffer.language_registry();
1752 let remote_id = buffer.remote_id();
1753 let is_remote = buffer.replica_id() != 0;
1754 let open_buffer = OpenBuffer::Complete {
1755 buffer: buffer_entity.downgrade(),
1756 diff_state: cx.new(|_| BufferDiffState {
1757 language,
1758 language_registry,
1759 ..Default::default()
1760 }),
1761 };
1762
1763 let handle = cx.entity().downgrade();
1764 buffer_entity.update(cx, move |_, cx| {
1765 cx.on_release(move |buffer, cx| {
1766 handle
1767 .update(cx, |_, cx| {
1768 cx.emit(BufferStoreEvent::BufferDropped(buffer.remote_id()))
1769 })
1770 .ok();
1771 })
1772 .detach()
1773 });
1774
1775 match self.opened_buffers.entry(remote_id) {
1776 hash_map::Entry::Vacant(entry) => {
1777 entry.insert(open_buffer);
1778 }
1779 hash_map::Entry::Occupied(mut entry) => {
1780 if let OpenBuffer::Operations(operations) = entry.get_mut() {
1781 buffer_entity.update(cx, |b, cx| b.apply_ops(operations.drain(..), cx));
1782 } else if entry.get().upgrade().is_some() {
1783 if is_remote {
1784 return Ok(());
1785 } else {
1786 debug_panic!("buffer {} was already registered", remote_id);
1787 Err(anyhow!("buffer {} was already registered", remote_id))?;
1788 }
1789 }
1790 entry.insert(open_buffer);
1791 }
1792 }
1793
1794 cx.subscribe(&buffer_entity, Self::on_buffer_event).detach();
1795 cx.emit(BufferStoreEvent::BufferAdded(buffer_entity));
1796 Ok(())
1797 }
1798
1799 pub fn buffers(&self) -> impl '_ + Iterator<Item = Entity<Buffer>> {
1800 self.opened_buffers
1801 .values()
1802 .filter_map(|buffer| buffer.upgrade())
1803 }
1804
1805 pub fn loading_buffers(
1806 &self,
1807 ) -> impl Iterator<Item = (&ProjectPath, impl Future<Output = Result<Entity<Buffer>>>)> {
1808 self.loading_buffers.iter().map(|(path, task)| {
1809 let task = task.clone();
1810 (path, async move { task.await.map_err(|e| anyhow!("{e}")) })
1811 })
1812 }
1813
1814 pub fn get_by_path(&self, path: &ProjectPath, cx: &App) -> Option<Entity<Buffer>> {
1815 self.buffers().find_map(|buffer| {
1816 let file = File::from_dyn(buffer.read(cx).file())?;
1817 if file.worktree_id(cx) == path.worktree_id && file.path == path.path {
1818 Some(buffer)
1819 } else {
1820 None
1821 }
1822 })
1823 }
1824
1825 pub fn get(&self, buffer_id: BufferId) -> Option<Entity<Buffer>> {
1826 self.opened_buffers.get(&buffer_id)?.upgrade()
1827 }
1828
1829 pub fn get_existing(&self, buffer_id: BufferId) -> Result<Entity<Buffer>> {
1830 self.get(buffer_id)
1831 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
1832 }
1833
1834 pub fn get_possibly_incomplete(&self, buffer_id: BufferId) -> Option<Entity<Buffer>> {
1835 self.get(buffer_id).or_else(|| {
1836 self.as_remote()
1837 .and_then(|remote| remote.loading_remote_buffers_by_id.get(&buffer_id).cloned())
1838 })
1839 }
1840
1841 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
1842 if let OpenBuffer::Complete { diff_state, .. } = self.opened_buffers.get(&buffer_id)? {
1843 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
1844 } else {
1845 None
1846 }
1847 }
1848
1849 pub fn get_uncommitted_diff(
1850 &self,
1851 buffer_id: BufferId,
1852 cx: &App,
1853 ) -> Option<Entity<BufferDiff>> {
1854 if let OpenBuffer::Complete { diff_state, .. } = self.opened_buffers.get(&buffer_id)? {
1855 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
1856 } else {
1857 None
1858 }
1859 }
1860
1861 pub fn buffer_version_info(&self, cx: &App) -> (Vec<proto::BufferVersion>, Vec<BufferId>) {
1862 let buffers = self
1863 .buffers()
1864 .map(|buffer| {
1865 let buffer = buffer.read(cx);
1866 proto::BufferVersion {
1867 id: buffer.remote_id().into(),
1868 version: language::proto::serialize_version(&buffer.version),
1869 }
1870 })
1871 .collect();
1872 let incomplete_buffer_ids = self
1873 .as_remote()
1874 .map(|remote| remote.incomplete_buffer_ids())
1875 .unwrap_or_default();
1876 (buffers, incomplete_buffer_ids)
1877 }
1878
1879 pub fn disconnected_from_host(&mut self, cx: &mut App) {
1880 for open_buffer in self.opened_buffers.values_mut() {
1881 if let Some(buffer) = open_buffer.upgrade() {
1882 buffer.update(cx, |buffer, _| buffer.give_up_waiting());
1883 }
1884 }
1885
1886 for buffer in self.buffers() {
1887 buffer.update(cx, |buffer, cx| {
1888 buffer.set_capability(Capability::ReadOnly, cx)
1889 });
1890 }
1891
1892 if let Some(remote) = self.as_remote_mut() {
1893 // Wake up all futures currently waiting on a buffer to get opened,
1894 // to give them a chance to fail now that we've disconnected.
1895 remote.remote_buffer_listeners.clear()
1896 }
1897 }
1898
1899 pub fn shared(&mut self, remote_id: u64, downstream_client: AnyProtoClient, _cx: &mut App) {
1900 self.downstream_client = Some((downstream_client, remote_id));
1901 }
1902
1903 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
1904 self.downstream_client.take();
1905 self.forget_shared_buffers();
1906 }
1907
1908 pub fn discard_incomplete(&mut self) {
1909 self.opened_buffers
1910 .retain(|_, buffer| !matches!(buffer, OpenBuffer::Operations(_)));
1911 }
1912
1913 pub fn find_search_candidates(
1914 &mut self,
1915 query: &SearchQuery,
1916 mut limit: usize,
1917 fs: Arc<dyn Fs>,
1918 cx: &mut Context<Self>,
1919 ) -> Receiver<Entity<Buffer>> {
1920 let (tx, rx) = smol::channel::unbounded();
1921 let mut open_buffers = HashSet::default();
1922 let mut unnamed_buffers = Vec::new();
1923 for handle in self.buffers() {
1924 let buffer = handle.read(cx);
1925 if let Some(entry_id) = buffer.entry_id(cx) {
1926 open_buffers.insert(entry_id);
1927 } else {
1928 limit = limit.saturating_sub(1);
1929 unnamed_buffers.push(handle)
1930 };
1931 }
1932
1933 const MAX_CONCURRENT_BUFFER_OPENS: usize = 64;
1934 let project_paths_rx = self
1935 .worktree_store
1936 .update(cx, |worktree_store, cx| {
1937 worktree_store.find_search_candidates(query.clone(), limit, open_buffers, fs, cx)
1938 })
1939 .chunks(MAX_CONCURRENT_BUFFER_OPENS);
1940
1941 cx.spawn(|this, mut cx| async move {
1942 for buffer in unnamed_buffers {
1943 tx.send(buffer).await.ok();
1944 }
1945
1946 let mut project_paths_rx = pin!(project_paths_rx);
1947 while let Some(project_paths) = project_paths_rx.next().await {
1948 let buffers = this.update(&mut cx, |this, cx| {
1949 project_paths
1950 .into_iter()
1951 .map(|project_path| this.open_buffer(project_path, cx))
1952 .collect::<Vec<_>>()
1953 })?;
1954 for buffer_task in buffers {
1955 if let Some(buffer) = buffer_task.await.log_err() {
1956 if tx.send(buffer).await.is_err() {
1957 return anyhow::Ok(());
1958 }
1959 }
1960 }
1961 }
1962 anyhow::Ok(())
1963 })
1964 .detach();
1965 rx
1966 }
1967
1968 pub fn recalculate_buffer_diffs(
1969 &mut self,
1970 buffers: Vec<Entity<Buffer>>,
1971 cx: &mut Context<Self>,
1972 ) -> impl Future<Output = ()> {
1973 let mut futures = Vec::new();
1974 for buffer in buffers {
1975 if let Some(OpenBuffer::Complete { diff_state, .. }) =
1976 self.opened_buffers.get_mut(&buffer.read(cx).remote_id())
1977 {
1978 let buffer = buffer.read(cx).text_snapshot();
1979 futures.push(diff_state.update(cx, |diff_state, cx| {
1980 diff_state.recalculate_diffs(buffer, cx)
1981 }));
1982 }
1983 }
1984 async move {
1985 futures::future::join_all(futures).await;
1986 }
1987 }
1988
1989 fn on_buffer_event(
1990 &mut self,
1991 buffer: Entity<Buffer>,
1992 event: &BufferEvent,
1993 cx: &mut Context<Self>,
1994 ) {
1995 match event {
1996 BufferEvent::FileHandleChanged => {
1997 if let Some(local) = self.as_local_mut() {
1998 local.buffer_changed_file(buffer, cx);
1999 }
2000 }
2001 BufferEvent::Reloaded => {
2002 let Some((downstream_client, project_id)) = self.downstream_client.as_ref() else {
2003 return;
2004 };
2005 let buffer = buffer.read(cx);
2006 downstream_client
2007 .send(proto::BufferReloaded {
2008 project_id: *project_id,
2009 buffer_id: buffer.remote_id().to_proto(),
2010 version: serialize_version(&buffer.version()),
2011 mtime: buffer.saved_mtime().map(|t| t.into()),
2012 line_ending: serialize_line_ending(buffer.line_ending()) as i32,
2013 })
2014 .log_err();
2015 }
2016 BufferEvent::LanguageChanged => {
2017 let buffer_id = buffer.read(cx).remote_id();
2018 if let Some(OpenBuffer::Complete { diff_state, .. }) =
2019 self.opened_buffers.get(&buffer_id)
2020 {
2021 diff_state.update(cx, |diff_state, cx| {
2022 diff_state.buffer_language_changed(buffer, cx);
2023 });
2024 }
2025 }
2026 _ => {}
2027 }
2028 }
2029
2030 pub async fn handle_update_buffer(
2031 this: Entity<Self>,
2032 envelope: TypedEnvelope<proto::UpdateBuffer>,
2033 mut cx: AsyncApp,
2034 ) -> Result<proto::Ack> {
2035 let payload = envelope.payload.clone();
2036 let buffer_id = BufferId::new(payload.buffer_id)?;
2037 let ops = payload
2038 .operations
2039 .into_iter()
2040 .map(language::proto::deserialize_operation)
2041 .collect::<Result<Vec<_>, _>>()?;
2042 this.update(&mut cx, |this, cx| {
2043 match this.opened_buffers.entry(buffer_id) {
2044 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
2045 OpenBuffer::Operations(operations) => operations.extend_from_slice(&ops),
2046 OpenBuffer::Complete { buffer, .. } => {
2047 if let Some(buffer) = buffer.upgrade() {
2048 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx));
2049 }
2050 }
2051 },
2052 hash_map::Entry::Vacant(e) => {
2053 e.insert(OpenBuffer::Operations(ops));
2054 }
2055 }
2056 Ok(proto::Ack {})
2057 })?
2058 }
2059
2060 pub fn register_shared_lsp_handle(
2061 &mut self,
2062 peer_id: proto::PeerId,
2063 buffer_id: BufferId,
2064 handle: OpenLspBufferHandle,
2065 ) {
2066 if let Some(shared_buffers) = self.shared_buffers.get_mut(&peer_id) {
2067 if let Some(buffer) = shared_buffers.get_mut(&buffer_id) {
2068 buffer.lsp_handle = Some(handle);
2069 return;
2070 }
2071 }
2072 debug_panic!("tried to register shared lsp handle, but buffer was not shared")
2073 }
2074
2075 pub fn handle_synchronize_buffers(
2076 &mut self,
2077 envelope: TypedEnvelope<proto::SynchronizeBuffers>,
2078 cx: &mut Context<Self>,
2079 client: Arc<Client>,
2080 ) -> Result<proto::SynchronizeBuffersResponse> {
2081 let project_id = envelope.payload.project_id;
2082 let mut response = proto::SynchronizeBuffersResponse {
2083 buffers: Default::default(),
2084 };
2085 let Some(guest_id) = envelope.original_sender_id else {
2086 anyhow::bail!("missing original_sender_id on SynchronizeBuffers request");
2087 };
2088
2089 self.shared_buffers.entry(guest_id).or_default().clear();
2090 for buffer in envelope.payload.buffers {
2091 let buffer_id = BufferId::new(buffer.id)?;
2092 let remote_version = language::proto::deserialize_version(&buffer.version);
2093 if let Some(buffer) = self.get(buffer_id) {
2094 self.shared_buffers
2095 .entry(guest_id)
2096 .or_default()
2097 .entry(buffer_id)
2098 .or_insert_with(|| SharedBuffer {
2099 buffer: buffer.clone(),
2100 diff: None,
2101 lsp_handle: None,
2102 });
2103
2104 let buffer = buffer.read(cx);
2105 response.buffers.push(proto::BufferVersion {
2106 id: buffer_id.into(),
2107 version: language::proto::serialize_version(&buffer.version),
2108 });
2109
2110 let operations = buffer.serialize_ops(Some(remote_version), cx);
2111 let client = client.clone();
2112 if let Some(file) = buffer.file() {
2113 client
2114 .send(proto::UpdateBufferFile {
2115 project_id,
2116 buffer_id: buffer_id.into(),
2117 file: Some(file.to_proto(cx)),
2118 })
2119 .log_err();
2120 }
2121
2122 // TODO(max): do something
2123 // client
2124 // .send(proto::UpdateStagedText {
2125 // project_id,
2126 // buffer_id: buffer_id.into(),
2127 // diff_base: buffer.diff_base().map(ToString::to_string),
2128 // })
2129 // .log_err();
2130
2131 client
2132 .send(proto::BufferReloaded {
2133 project_id,
2134 buffer_id: buffer_id.into(),
2135 version: language::proto::serialize_version(buffer.saved_version()),
2136 mtime: buffer.saved_mtime().map(|time| time.into()),
2137 line_ending: language::proto::serialize_line_ending(buffer.line_ending())
2138 as i32,
2139 })
2140 .log_err();
2141
2142 cx.background_spawn(
2143 async move {
2144 let operations = operations.await;
2145 for chunk in split_operations(operations) {
2146 client
2147 .request(proto::UpdateBuffer {
2148 project_id,
2149 buffer_id: buffer_id.into(),
2150 operations: chunk,
2151 })
2152 .await?;
2153 }
2154 anyhow::Ok(())
2155 }
2156 .log_err(),
2157 )
2158 .detach();
2159 }
2160 }
2161 Ok(response)
2162 }
2163
2164 pub fn handle_create_buffer_for_peer(
2165 &mut self,
2166 envelope: TypedEnvelope<proto::CreateBufferForPeer>,
2167 replica_id: u16,
2168 capability: Capability,
2169 cx: &mut Context<Self>,
2170 ) -> Result<()> {
2171 let Some(remote) = self.as_remote_mut() else {
2172 return Err(anyhow!("buffer store is not a remote"));
2173 };
2174
2175 if let Some(buffer) =
2176 remote.handle_create_buffer_for_peer(envelope, replica_id, capability, cx)?
2177 {
2178 self.add_buffer(buffer, cx)?;
2179 }
2180
2181 Ok(())
2182 }
2183
2184 pub async fn handle_update_buffer_file(
2185 this: Entity<Self>,
2186 envelope: TypedEnvelope<proto::UpdateBufferFile>,
2187 mut cx: AsyncApp,
2188 ) -> Result<()> {
2189 let buffer_id = envelope.payload.buffer_id;
2190 let buffer_id = BufferId::new(buffer_id)?;
2191
2192 this.update(&mut cx, |this, cx| {
2193 let payload = envelope.payload.clone();
2194 if let Some(buffer) = this.get_possibly_incomplete(buffer_id) {
2195 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
2196 let worktree = this
2197 .worktree_store
2198 .read(cx)
2199 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
2200 .ok_or_else(|| anyhow!("no such worktree"))?;
2201 let file = File::from_proto(file, worktree, cx)?;
2202 let old_file = buffer.update(cx, |buffer, cx| {
2203 let old_file = buffer.file().cloned();
2204 let new_path = file.path.clone();
2205 buffer.file_updated(Arc::new(file), cx);
2206 if old_file
2207 .as_ref()
2208 .map_or(true, |old| *old.path() != new_path)
2209 {
2210 Some(old_file)
2211 } else {
2212 None
2213 }
2214 });
2215 if let Some(old_file) = old_file {
2216 cx.emit(BufferStoreEvent::BufferChangedFilePath { buffer, old_file });
2217 }
2218 }
2219 if let Some((downstream_client, project_id)) = this.downstream_client.as_ref() {
2220 downstream_client
2221 .send(proto::UpdateBufferFile {
2222 project_id: *project_id,
2223 buffer_id: buffer_id.into(),
2224 file: envelope.payload.file,
2225 })
2226 .log_err();
2227 }
2228 Ok(())
2229 })?
2230 }
2231
2232 pub async fn handle_save_buffer(
2233 this: Entity<Self>,
2234 envelope: TypedEnvelope<proto::SaveBuffer>,
2235 mut cx: AsyncApp,
2236 ) -> Result<proto::BufferSaved> {
2237 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2238 let (buffer, project_id) = this.update(&mut cx, |this, _| {
2239 anyhow::Ok((
2240 this.get_existing(buffer_id)?,
2241 this.downstream_client
2242 .as_ref()
2243 .map(|(_, project_id)| *project_id)
2244 .context("project is not shared")?,
2245 ))
2246 })??;
2247 buffer
2248 .update(&mut cx, |buffer, _| {
2249 buffer.wait_for_version(deserialize_version(&envelope.payload.version))
2250 })?
2251 .await?;
2252 let buffer_id = buffer.update(&mut cx, |buffer, _| buffer.remote_id())?;
2253
2254 if let Some(new_path) = envelope.payload.new_path {
2255 let new_path = ProjectPath::from_proto(new_path);
2256 this.update(&mut cx, |this, cx| {
2257 this.save_buffer_as(buffer.clone(), new_path, cx)
2258 })?
2259 .await?;
2260 } else {
2261 this.update(&mut cx, |this, cx| this.save_buffer(buffer.clone(), cx))?
2262 .await?;
2263 }
2264
2265 buffer.update(&mut cx, |buffer, _| proto::BufferSaved {
2266 project_id,
2267 buffer_id: buffer_id.into(),
2268 version: serialize_version(buffer.saved_version()),
2269 mtime: buffer.saved_mtime().map(|time| time.into()),
2270 })
2271 }
2272
2273 pub async fn handle_close_buffer(
2274 this: Entity<Self>,
2275 envelope: TypedEnvelope<proto::CloseBuffer>,
2276 mut cx: AsyncApp,
2277 ) -> Result<()> {
2278 let peer_id = envelope.sender_id;
2279 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2280 this.update(&mut cx, |this, _| {
2281 if let Some(shared) = this.shared_buffers.get_mut(&peer_id) {
2282 if shared.remove(&buffer_id).is_some() {
2283 if shared.is_empty() {
2284 this.shared_buffers.remove(&peer_id);
2285 }
2286 return;
2287 }
2288 }
2289 debug_panic!(
2290 "peer_id {} closed buffer_id {} which was either not open or already closed",
2291 peer_id,
2292 buffer_id
2293 )
2294 })
2295 }
2296
2297 pub async fn handle_buffer_saved(
2298 this: Entity<Self>,
2299 envelope: TypedEnvelope<proto::BufferSaved>,
2300 mut cx: AsyncApp,
2301 ) -> Result<()> {
2302 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2303 let version = deserialize_version(&envelope.payload.version);
2304 let mtime = envelope.payload.mtime.clone().map(|time| time.into());
2305 this.update(&mut cx, move |this, cx| {
2306 if let Some(buffer) = this.get_possibly_incomplete(buffer_id) {
2307 buffer.update(cx, |buffer, cx| {
2308 buffer.did_save(version, mtime, cx);
2309 });
2310 }
2311
2312 if let Some((downstream_client, project_id)) = this.downstream_client.as_ref() {
2313 downstream_client
2314 .send(proto::BufferSaved {
2315 project_id: *project_id,
2316 buffer_id: buffer_id.into(),
2317 mtime: envelope.payload.mtime,
2318 version: envelope.payload.version,
2319 })
2320 .log_err();
2321 }
2322 })
2323 }
2324
2325 pub async fn handle_buffer_reloaded(
2326 this: Entity<Self>,
2327 envelope: TypedEnvelope<proto::BufferReloaded>,
2328 mut cx: AsyncApp,
2329 ) -> Result<()> {
2330 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2331 let version = deserialize_version(&envelope.payload.version);
2332 let mtime = envelope.payload.mtime.clone().map(|time| time.into());
2333 let line_ending = deserialize_line_ending(
2334 proto::LineEnding::from_i32(envelope.payload.line_ending)
2335 .ok_or_else(|| anyhow!("missing line ending"))?,
2336 );
2337 this.update(&mut cx, |this, cx| {
2338 if let Some(buffer) = this.get_possibly_incomplete(buffer_id) {
2339 buffer.update(cx, |buffer, cx| {
2340 buffer.did_reload(version, line_ending, mtime, cx);
2341 });
2342 }
2343
2344 if let Some((downstream_client, project_id)) = this.downstream_client.as_ref() {
2345 downstream_client
2346 .send(proto::BufferReloaded {
2347 project_id: *project_id,
2348 buffer_id: buffer_id.into(),
2349 mtime: envelope.payload.mtime,
2350 version: envelope.payload.version,
2351 line_ending: envelope.payload.line_ending,
2352 })
2353 .log_err();
2354 }
2355 })
2356 }
2357
2358 pub async fn handle_blame_buffer(
2359 this: Entity<Self>,
2360 envelope: TypedEnvelope<proto::BlameBuffer>,
2361 mut cx: AsyncApp,
2362 ) -> Result<proto::BlameBufferResponse> {
2363 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2364 let version = deserialize_version(&envelope.payload.version);
2365 let buffer = this.read_with(&cx, |this, _| this.get_existing(buffer_id))??;
2366 buffer
2367 .update(&mut cx, |buffer, _| {
2368 buffer.wait_for_version(version.clone())
2369 })?
2370 .await?;
2371 let blame = this
2372 .update(&mut cx, |this, cx| {
2373 this.blame_buffer(&buffer, Some(version), cx)
2374 })?
2375 .await?;
2376 Ok(serialize_blame_buffer_response(blame))
2377 }
2378
2379 pub async fn handle_get_permalink_to_line(
2380 this: Entity<Self>,
2381 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2382 mut cx: AsyncApp,
2383 ) -> Result<proto::GetPermalinkToLineResponse> {
2384 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2385 // let version = deserialize_version(&envelope.payload.version);
2386 let selection = {
2387 let proto_selection = envelope
2388 .payload
2389 .selection
2390 .context("no selection to get permalink for defined")?;
2391 proto_selection.start as u32..proto_selection.end as u32
2392 };
2393 let buffer = this.read_with(&cx, |this, _| this.get_existing(buffer_id))??;
2394 let permalink = this
2395 .update(&mut cx, |this, cx| {
2396 this.get_permalink_to_line(&buffer, selection, cx)
2397 })?
2398 .await?;
2399 Ok(proto::GetPermalinkToLineResponse {
2400 permalink: permalink.to_string(),
2401 })
2402 }
2403
2404 pub async fn handle_open_unstaged_diff(
2405 this: Entity<Self>,
2406 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2407 mut cx: AsyncApp,
2408 ) -> Result<proto::OpenUnstagedDiffResponse> {
2409 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2410 let diff = this
2411 .update(&mut cx, |this, cx| {
2412 let buffer = this.get(buffer_id)?;
2413 Some(this.open_unstaged_diff(buffer, cx))
2414 })?
2415 .ok_or_else(|| anyhow!("no such buffer"))?
2416 .await?;
2417 this.update(&mut cx, |this, _| {
2418 let shared_buffers = this
2419 .shared_buffers
2420 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2421 .or_default();
2422 debug_assert!(shared_buffers.contains_key(&buffer_id));
2423 if let Some(shared) = shared_buffers.get_mut(&buffer_id) {
2424 shared.diff = Some(diff.clone());
2425 }
2426 })?;
2427 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
2428 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2429 }
2430
2431 pub async fn handle_open_uncommitted_diff(
2432 this: Entity<Self>,
2433 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2434 mut cx: AsyncApp,
2435 ) -> Result<proto::OpenUncommittedDiffResponse> {
2436 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2437 let diff = this
2438 .update(&mut cx, |this, cx| {
2439 let buffer = this.get(buffer_id)?;
2440 Some(this.open_uncommitted_diff(buffer, cx))
2441 })?
2442 .ok_or_else(|| anyhow!("no such buffer"))?
2443 .await?;
2444 this.update(&mut cx, |this, _| {
2445 let shared_buffers = this
2446 .shared_buffers
2447 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2448 .or_default();
2449 debug_assert!(shared_buffers.contains_key(&buffer_id));
2450 if let Some(shared) = shared_buffers.get_mut(&buffer_id) {
2451 shared.diff = Some(diff.clone());
2452 }
2453 })?;
2454 diff.read_with(&cx, |diff, cx| {
2455 use proto::open_uncommitted_diff_response::Mode;
2456
2457 let unstaged_diff = diff.secondary_diff();
2458 let index_snapshot = unstaged_diff.and_then(|diff| {
2459 let diff = diff.read(cx);
2460 diff.base_text_exists().then(|| diff.base_text())
2461 });
2462
2463 let mode;
2464 let staged_text;
2465 let committed_text;
2466 if diff.base_text_exists() {
2467 let committed_snapshot = diff.base_text();
2468 committed_text = Some(committed_snapshot.text());
2469 if let Some(index_text) = index_snapshot {
2470 if index_text.remote_id() == committed_snapshot.remote_id() {
2471 mode = Mode::IndexMatchesHead;
2472 staged_text = None;
2473 } else {
2474 mode = Mode::IndexAndHead;
2475 staged_text = Some(index_text.text());
2476 }
2477 } else {
2478 mode = Mode::IndexAndHead;
2479 staged_text = None;
2480 }
2481 } else {
2482 mode = Mode::IndexAndHead;
2483 committed_text = None;
2484 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2485 }
2486
2487 proto::OpenUncommittedDiffResponse {
2488 committed_text,
2489 staged_text,
2490 mode: mode.into(),
2491 }
2492 })
2493 }
2494
2495 pub async fn handle_update_diff_bases(
2496 this: Entity<Self>,
2497 request: TypedEnvelope<proto::UpdateDiffBases>,
2498 mut cx: AsyncApp,
2499 ) -> Result<()> {
2500 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2501 this.update(&mut cx, |this, cx| {
2502 if let Some(OpenBuffer::Complete { diff_state, buffer }) =
2503 this.opened_buffers.get_mut(&buffer_id)
2504 {
2505 if let Some(buffer) = buffer.upgrade() {
2506 let buffer = buffer.read(cx).text_snapshot();
2507 diff_state.update(cx, |diff_state, cx| {
2508 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2509 })
2510 }
2511 }
2512 })
2513 }
2514
2515 pub fn reload_buffers(
2516 &self,
2517 buffers: HashSet<Entity<Buffer>>,
2518 push_to_history: bool,
2519 cx: &mut Context<Self>,
2520 ) -> Task<Result<ProjectTransaction>> {
2521 if buffers.is_empty() {
2522 return Task::ready(Ok(ProjectTransaction::default()));
2523 }
2524 match &self.state {
2525 BufferStoreState::Local(this) => this.reload_buffers(buffers, push_to_history, cx),
2526 BufferStoreState::Remote(this) => this.reload_buffers(buffers, push_to_history, cx),
2527 }
2528 }
2529
2530 async fn handle_reload_buffers(
2531 this: Entity<Self>,
2532 envelope: TypedEnvelope<proto::ReloadBuffers>,
2533 mut cx: AsyncApp,
2534 ) -> Result<proto::ReloadBuffersResponse> {
2535 let sender_id = envelope.original_sender_id().unwrap_or_default();
2536 let reload = this.update(&mut cx, |this, cx| {
2537 let mut buffers = HashSet::default();
2538 for buffer_id in &envelope.payload.buffer_ids {
2539 let buffer_id = BufferId::new(*buffer_id)?;
2540 buffers.insert(this.get_existing(buffer_id)?);
2541 }
2542 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
2543 })??;
2544
2545 let project_transaction = reload.await?;
2546 let project_transaction = this.update(&mut cx, |this, cx| {
2547 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2548 })?;
2549 Ok(proto::ReloadBuffersResponse {
2550 transaction: Some(project_transaction),
2551 })
2552 }
2553
2554 pub fn create_buffer_for_peer(
2555 &mut self,
2556 buffer: &Entity<Buffer>,
2557 peer_id: proto::PeerId,
2558 cx: &mut Context<Self>,
2559 ) -> Task<Result<()>> {
2560 let buffer_id = buffer.read(cx).remote_id();
2561 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
2562 if shared_buffers.contains_key(&buffer_id) {
2563 return Task::ready(Ok(()));
2564 }
2565 shared_buffers.insert(
2566 buffer_id,
2567 SharedBuffer {
2568 buffer: buffer.clone(),
2569 diff: None,
2570 lsp_handle: None,
2571 },
2572 );
2573
2574 let Some((client, project_id)) = self.downstream_client.clone() else {
2575 return Task::ready(Ok(()));
2576 };
2577
2578 cx.spawn(|this, mut cx| async move {
2579 let Some(buffer) = this.update(&mut cx, |this, _| this.get(buffer_id))? else {
2580 return anyhow::Ok(());
2581 };
2582
2583 let operations = buffer.update(&mut cx, |b, cx| b.serialize_ops(None, cx))?;
2584 let operations = operations.await;
2585 let state = buffer.update(&mut cx, |buffer, cx| buffer.to_proto(cx))?;
2586
2587 let initial_state = proto::CreateBufferForPeer {
2588 project_id,
2589 peer_id: Some(peer_id),
2590 variant: Some(proto::create_buffer_for_peer::Variant::State(state)),
2591 };
2592
2593 if client.send(initial_state).log_err().is_some() {
2594 let client = client.clone();
2595 cx.background_spawn(async move {
2596 let mut chunks = split_operations(operations).peekable();
2597 while let Some(chunk) = chunks.next() {
2598 let is_last = chunks.peek().is_none();
2599 client.send(proto::CreateBufferForPeer {
2600 project_id,
2601 peer_id: Some(peer_id),
2602 variant: Some(proto::create_buffer_for_peer::Variant::Chunk(
2603 proto::BufferChunk {
2604 buffer_id: buffer_id.into(),
2605 operations: chunk,
2606 is_last,
2607 },
2608 )),
2609 })?;
2610 }
2611 anyhow::Ok(())
2612 })
2613 .await
2614 .log_err();
2615 }
2616 Ok(())
2617 })
2618 }
2619
2620 pub fn forget_shared_buffers(&mut self) {
2621 self.shared_buffers.clear();
2622 }
2623
2624 pub fn forget_shared_buffers_for(&mut self, peer_id: &proto::PeerId) {
2625 self.shared_buffers.remove(peer_id);
2626 }
2627
2628 pub fn update_peer_id(&mut self, old_peer_id: &proto::PeerId, new_peer_id: proto::PeerId) {
2629 if let Some(buffers) = self.shared_buffers.remove(old_peer_id) {
2630 self.shared_buffers.insert(new_peer_id, buffers);
2631 }
2632 }
2633
2634 pub fn has_shared_buffers(&self) -> bool {
2635 !self.shared_buffers.is_empty()
2636 }
2637
2638 pub fn create_local_buffer(
2639 &mut self,
2640 text: &str,
2641 language: Option<Arc<Language>>,
2642 cx: &mut Context<Self>,
2643 ) -> Entity<Buffer> {
2644 let buffer = cx.new(|cx| {
2645 Buffer::local(text, cx)
2646 .with_language(language.unwrap_or_else(|| language::PLAIN_TEXT.clone()), cx)
2647 });
2648
2649 self.add_buffer(buffer.clone(), cx).log_err();
2650 let buffer_id = buffer.read(cx).remote_id();
2651
2652 let this = self
2653 .as_local_mut()
2654 .expect("local-only method called in a non-local context");
2655 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
2656 this.local_buffer_ids_by_path.insert(
2657 ProjectPath {
2658 worktree_id: file.worktree_id(cx),
2659 path: file.path.clone(),
2660 },
2661 buffer_id,
2662 );
2663
2664 if let Some(entry_id) = file.entry_id {
2665 this.local_buffer_ids_by_entry_id
2666 .insert(entry_id, buffer_id);
2667 }
2668 }
2669 buffer
2670 }
2671
2672 pub fn deserialize_project_transaction(
2673 &mut self,
2674 message: proto::ProjectTransaction,
2675 push_to_history: bool,
2676 cx: &mut Context<Self>,
2677 ) -> Task<Result<ProjectTransaction>> {
2678 if let Some(this) = self.as_remote_mut() {
2679 this.deserialize_project_transaction(message, push_to_history, cx)
2680 } else {
2681 debug_panic!("not a remote buffer store");
2682 Task::ready(Err(anyhow!("not a remote buffer store")))
2683 }
2684 }
2685
2686 pub fn wait_for_remote_buffer(
2687 &mut self,
2688 id: BufferId,
2689 cx: &mut Context<BufferStore>,
2690 ) -> Task<Result<Entity<Buffer>>> {
2691 if let Some(this) = self.as_remote_mut() {
2692 this.wait_for_remote_buffer(id, cx)
2693 } else {
2694 debug_panic!("not a remote buffer store");
2695 Task::ready(Err(anyhow!("not a remote buffer store")))
2696 }
2697 }
2698
2699 pub fn serialize_project_transaction_for_peer(
2700 &mut self,
2701 project_transaction: ProjectTransaction,
2702 peer_id: proto::PeerId,
2703 cx: &mut Context<Self>,
2704 ) -> proto::ProjectTransaction {
2705 let mut serialized_transaction = proto::ProjectTransaction {
2706 buffer_ids: Default::default(),
2707 transactions: Default::default(),
2708 };
2709 for (buffer, transaction) in project_transaction.0 {
2710 self.create_buffer_for_peer(&buffer, peer_id, cx)
2711 .detach_and_log_err(cx);
2712 serialized_transaction
2713 .buffer_ids
2714 .push(buffer.read(cx).remote_id().into());
2715 serialized_transaction
2716 .transactions
2717 .push(language::proto::serialize_transaction(&transaction));
2718 }
2719 serialized_transaction
2720 }
2721}
2722
2723impl OpenBuffer {
2724 fn upgrade(&self) -> Option<Entity<Buffer>> {
2725 match self {
2726 OpenBuffer::Complete { buffer, .. } => buffer.upgrade(),
2727 OpenBuffer::Operations(_) => None,
2728 }
2729 }
2730}
2731
2732fn is_not_found_error(error: &anyhow::Error) -> bool {
2733 error
2734 .root_cause()
2735 .downcast_ref::<io::Error>()
2736 .is_some_and(|err| err.kind() == io::ErrorKind::NotFound)
2737}
2738
2739fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
2740 let Some(blame) = blame else {
2741 return proto::BlameBufferResponse {
2742 blame_response: None,
2743 };
2744 };
2745
2746 let entries = blame
2747 .entries
2748 .into_iter()
2749 .map(|entry| proto::BlameEntry {
2750 sha: entry.sha.as_bytes().into(),
2751 start_line: entry.range.start,
2752 end_line: entry.range.end,
2753 original_line_number: entry.original_line_number,
2754 author: entry.author.clone(),
2755 author_mail: entry.author_mail.clone(),
2756 author_time: entry.author_time,
2757 author_tz: entry.author_tz.clone(),
2758 committer: entry.committer_name.clone(),
2759 committer_mail: entry.committer_email.clone(),
2760 committer_time: entry.committer_time,
2761 committer_tz: entry.committer_tz.clone(),
2762 summary: entry.summary.clone(),
2763 previous: entry.previous.clone(),
2764 filename: entry.filename.clone(),
2765 })
2766 .collect::<Vec<_>>();
2767
2768 let messages = blame
2769 .messages
2770 .into_iter()
2771 .map(|(oid, message)| proto::CommitMessage {
2772 oid: oid.as_bytes().into(),
2773 message,
2774 })
2775 .collect::<Vec<_>>();
2776
2777 proto::BlameBufferResponse {
2778 blame_response: Some(proto::blame_buffer_response::BlameResponse {
2779 entries,
2780 messages,
2781 remote_url: blame.remote_url,
2782 }),
2783 }
2784}
2785
2786fn deserialize_blame_buffer_response(
2787 response: proto::BlameBufferResponse,
2788) -> Option<git::blame::Blame> {
2789 let response = response.blame_response?;
2790 let entries = response
2791 .entries
2792 .into_iter()
2793 .filter_map(|entry| {
2794 Some(git::blame::BlameEntry {
2795 sha: git::Oid::from_bytes(&entry.sha).ok()?,
2796 range: entry.start_line..entry.end_line,
2797 original_line_number: entry.original_line_number,
2798 committer_name: entry.committer,
2799 committer_time: entry.committer_time,
2800 committer_tz: entry.committer_tz,
2801 committer_email: entry.committer_mail,
2802 author: entry.author,
2803 author_mail: entry.author_mail,
2804 author_time: entry.author_time,
2805 author_tz: entry.author_tz,
2806 summary: entry.summary,
2807 previous: entry.previous,
2808 filename: entry.filename,
2809 })
2810 })
2811 .collect::<Vec<_>>();
2812
2813 let messages = response
2814 .messages
2815 .into_iter()
2816 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
2817 .collect::<HashMap<_, _>>();
2818
2819 Some(Blame {
2820 entries,
2821 messages,
2822 remote_url: response.remote_url,
2823 })
2824}
2825
2826fn get_permalink_in_rust_registry_src(
2827 provider_registry: Arc<GitHostingProviderRegistry>,
2828 path: PathBuf,
2829 selection: Range<u32>,
2830) -> Result<url::Url> {
2831 #[derive(Deserialize)]
2832 struct CargoVcsGit {
2833 sha1: String,
2834 }
2835
2836 #[derive(Deserialize)]
2837 struct CargoVcsInfo {
2838 git: CargoVcsGit,
2839 path_in_vcs: String,
2840 }
2841
2842 #[derive(Deserialize)]
2843 struct CargoPackage {
2844 repository: String,
2845 }
2846
2847 #[derive(Deserialize)]
2848 struct CargoToml {
2849 package: CargoPackage,
2850 }
2851
2852 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
2853 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
2854 Some((dir, json))
2855 }) else {
2856 bail!("No .cargo_vcs_info.json found in parent directories")
2857 };
2858 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
2859 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
2860 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
2861 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
2862 .ok_or_else(|| anyhow!("Failed to parse package.repository field of manifest"))?;
2863 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
2864 let permalink = provider.build_permalink(
2865 remote,
2866 BuildPermalinkParams {
2867 sha: &cargo_vcs_info.git.sha1,
2868 path: &path.to_string_lossy(),
2869 selection: Some(selection),
2870 },
2871 );
2872 Ok(permalink)
2873}