1use crate::{
2 lsp_store::OpenLspBufferHandle,
3 search::SearchQuery,
4 worktree_store::{WorktreeStore, WorktreeStoreEvent},
5 ProjectItem as _, ProjectPath,
6};
7use ::git::{parse_git_remote_url, BuildPermalinkParams, GitHostingProviderRegistry};
8use anyhow::{anyhow, bail, Context as _, Result};
9use buffer_diff::BufferDiff;
10use client::Client;
11use collections::{hash_map, HashMap, HashSet};
12use fs::Fs;
13use futures::{channel::oneshot, future::Shared, Future, FutureExt as _, StreamExt};
14use git::{blame::Blame, repository::RepoPath};
15use gpui::{
16 App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Subscription, Task, WeakEntity,
17};
18use language::{
19 proto::{
20 deserialize_line_ending, deserialize_version, serialize_line_ending, serialize_version,
21 split_operations,
22 },
23 Buffer, BufferEvent, Capability, DiskState, File as _, Language, LanguageRegistry, Operation,
24};
25use rpc::{
26 proto::{self, ToProto},
27 AnyProtoClient, ErrorExt as _, TypedEnvelope,
28};
29use serde::Deserialize;
30use smol::channel::Receiver;
31use std::{
32 io,
33 ops::Range,
34 path::{Path, PathBuf},
35 pin::pin,
36 sync::Arc,
37 time::Instant,
38};
39use text::BufferId;
40use util::{debug_panic, maybe, ResultExt as _, TryFutureExt};
41use worktree::{File, PathChange, ProjectEntryId, UpdatedGitRepositoriesSet, Worktree, WorktreeId};
42
43#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
44enum DiffKind {
45 Unstaged,
46 Uncommitted,
47}
48
49/// A set of open buffers.
50pub struct BufferStore {
51 state: BufferStoreState,
52 #[allow(clippy::type_complexity)]
53 loading_buffers: HashMap<ProjectPath, Shared<Task<Result<Entity<Buffer>, Arc<anyhow::Error>>>>>,
54 #[allow(clippy::type_complexity)]
55 loading_diffs:
56 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
57 worktree_store: Entity<WorktreeStore>,
58 opened_buffers: HashMap<BufferId, OpenBuffer>,
59 downstream_client: Option<(AnyProtoClient, u64)>,
60 shared_buffers: HashMap<proto::PeerId, HashMap<BufferId, SharedBuffer>>,
61}
62
63#[derive(Hash, Eq, PartialEq, Clone)]
64struct SharedBuffer {
65 buffer: Entity<Buffer>,
66 diff: Option<Entity<BufferDiff>>,
67 lsp_handle: Option<OpenLspBufferHandle>,
68}
69
70#[derive(Default)]
71struct BufferDiffState {
72 unstaged_diff: Option<WeakEntity<BufferDiff>>,
73 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
74 recalculate_diff_task: Option<Task<Result<()>>>,
75 language: Option<Arc<Language>>,
76 language_registry: Option<Arc<LanguageRegistry>>,
77 diff_updated_futures: Vec<oneshot::Sender<()>>,
78
79 head_text: Option<Arc<String>>,
80 index_text: Option<Arc<String>>,
81 head_changed: bool,
82 index_changed: bool,
83 language_changed: bool,
84}
85
86#[derive(Clone, Debug)]
87enum DiffBasesChange {
88 SetIndex(Option<String>),
89 SetHead(Option<String>),
90 SetEach {
91 index: Option<String>,
92 head: Option<String>,
93 },
94 SetBoth(Option<String>),
95}
96
97impl BufferDiffState {
98 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
99 self.language = buffer.read(cx).language().cloned();
100 self.language_changed = true;
101 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
102 }
103
104 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
105 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
106 }
107
108 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
109 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
110 }
111
112 fn handle_base_texts_updated(
113 &mut self,
114 buffer: text::BufferSnapshot,
115 message: proto::UpdateDiffBases,
116 cx: &mut Context<Self>,
117 ) {
118 use proto::update_diff_bases::Mode;
119
120 let Some(mode) = Mode::from_i32(message.mode) else {
121 return;
122 };
123
124 let diff_bases_change = match mode {
125 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
126 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
127 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
128 Mode::IndexAndHead => DiffBasesChange::SetEach {
129 index: message.staged_text,
130 head: message.committed_text,
131 },
132 };
133
134 let _ = self.diff_bases_changed(buffer, diff_bases_change, cx);
135 }
136
137 pub fn wait_for_recalculation(&mut self) -> Option<oneshot::Receiver<()>> {
138 if self.diff_updated_futures.is_empty() {
139 return None;
140 }
141 let (tx, rx) = oneshot::channel();
142 self.diff_updated_futures.push(tx);
143 Some(rx)
144 }
145
146 fn diff_bases_changed(
147 &mut self,
148 buffer: text::BufferSnapshot,
149 diff_bases_change: DiffBasesChange,
150 cx: &mut Context<Self>,
151 ) -> oneshot::Receiver<()> {
152 match diff_bases_change {
153 DiffBasesChange::SetIndex(index) => {
154 self.index_text = index.map(|mut index| {
155 text::LineEnding::normalize(&mut index);
156 Arc::new(index)
157 });
158 self.index_changed = true;
159 }
160 DiffBasesChange::SetHead(head) => {
161 self.head_text = head.map(|mut head| {
162 text::LineEnding::normalize(&mut head);
163 Arc::new(head)
164 });
165 self.head_changed = true;
166 }
167 DiffBasesChange::SetBoth(text) => {
168 let text = text.map(|mut text| {
169 text::LineEnding::normalize(&mut text);
170 Arc::new(text)
171 });
172 self.head_text = text.clone();
173 self.index_text = text;
174 self.head_changed = true;
175 self.index_changed = true;
176 }
177 DiffBasesChange::SetEach { index, head } => {
178 self.index_text = index.map(|mut index| {
179 text::LineEnding::normalize(&mut index);
180 Arc::new(index)
181 });
182 self.index_changed = true;
183 self.head_text = head.map(|mut head| {
184 text::LineEnding::normalize(&mut head);
185 Arc::new(head)
186 });
187 self.head_changed = true;
188 }
189 }
190
191 self.recalculate_diffs(buffer, cx)
192 }
193
194 fn recalculate_diffs(
195 &mut self,
196 buffer: text::BufferSnapshot,
197 cx: &mut Context<Self>,
198 ) -> oneshot::Receiver<()> {
199 log::debug!("recalculate diffs");
200 let (tx, rx) = oneshot::channel();
201 self.diff_updated_futures.push(tx);
202
203 let language = self.language.clone();
204 let language_registry = self.language_registry.clone();
205 let unstaged_diff = self.unstaged_diff();
206 let uncommitted_diff = self.uncommitted_diff();
207 let head = self.head_text.clone();
208 let index = self.index_text.clone();
209 let index_changed = self.index_changed;
210 let head_changed = self.head_changed;
211 let language_changed = self.language_changed;
212 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
213 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
214 (None, None) => true,
215 _ => false,
216 };
217 self.recalculate_diff_task = Some(cx.spawn(|this, mut cx| async move {
218 let mut new_unstaged_diff = None;
219 if let Some(unstaged_diff) = &unstaged_diff {
220 new_unstaged_diff = Some(
221 BufferDiff::update_diff(
222 unstaged_diff.clone(),
223 buffer.clone(),
224 index,
225 index_changed,
226 language_changed,
227 language.clone(),
228 language_registry.clone(),
229 &mut cx,
230 )
231 .await?,
232 );
233 }
234
235 let mut new_uncommitted_diff = None;
236 if let Some(uncommitted_diff) = &uncommitted_diff {
237 new_uncommitted_diff = if index_matches_head {
238 new_unstaged_diff.clone()
239 } else {
240 Some(
241 BufferDiff::update_diff(
242 uncommitted_diff.clone(),
243 buffer.clone(),
244 head,
245 head_changed,
246 language_changed,
247 language.clone(),
248 language_registry.clone(),
249 &mut cx,
250 )
251 .await?,
252 )
253 }
254 }
255
256 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
257 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
258 {
259 unstaged_diff.update(&mut cx, |diff, cx| {
260 diff.set_snapshot(&buffer, new_unstaged_diff, language_changed, None, cx)
261 })?
262 } else {
263 None
264 };
265
266 if let Some((uncommitted_diff, new_uncommitted_diff)) =
267 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
268 {
269 uncommitted_diff.update(&mut cx, |uncommitted_diff, cx| {
270 uncommitted_diff.set_snapshot(
271 &buffer,
272 new_uncommitted_diff,
273 language_changed,
274 unstaged_changed_range,
275 cx,
276 );
277 })?;
278 }
279
280 if let Some(this) = this.upgrade() {
281 this.update(&mut cx, |this, _| {
282 this.index_changed = false;
283 this.head_changed = false;
284 this.language_changed = false;
285 for tx in this.diff_updated_futures.drain(..) {
286 tx.send(()).ok();
287 }
288 })?;
289 }
290
291 Ok(())
292 }));
293
294 rx
295 }
296}
297
298enum BufferStoreState {
299 Local(LocalBufferStore),
300 Remote(RemoteBufferStore),
301}
302
303struct RemoteBufferStore {
304 shared_with_me: HashSet<Entity<Buffer>>,
305 upstream_client: AnyProtoClient,
306 project_id: u64,
307 loading_remote_buffers_by_id: HashMap<BufferId, Entity<Buffer>>,
308 remote_buffer_listeners:
309 HashMap<BufferId, Vec<oneshot::Sender<Result<Entity<Buffer>, anyhow::Error>>>>,
310 worktree_store: Entity<WorktreeStore>,
311}
312
313struct LocalBufferStore {
314 local_buffer_ids_by_path: HashMap<ProjectPath, BufferId>,
315 local_buffer_ids_by_entry_id: HashMap<ProjectEntryId, BufferId>,
316 worktree_store: Entity<WorktreeStore>,
317 _subscription: Subscription,
318}
319
320enum OpenBuffer {
321 Complete {
322 buffer: WeakEntity<Buffer>,
323 diff_state: Entity<BufferDiffState>,
324 },
325 Operations(Vec<Operation>),
326}
327
328pub enum BufferStoreEvent {
329 BufferAdded(Entity<Buffer>),
330 BufferDiffAdded(Entity<BufferDiff>),
331 BufferDropped(BufferId),
332 BufferChangedFilePath {
333 buffer: Entity<Buffer>,
334 old_file: Option<Arc<dyn language::File>>,
335 },
336}
337
338#[derive(Default, Debug)]
339pub struct ProjectTransaction(pub HashMap<Entity<Buffer>, language::Transaction>);
340
341impl EventEmitter<BufferStoreEvent> for BufferStore {}
342
343impl RemoteBufferStore {
344 fn open_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Task<Result<Option<String>>> {
345 let project_id = self.project_id;
346 let client = self.upstream_client.clone();
347 cx.background_spawn(async move {
348 let response = client
349 .request(proto::OpenUnstagedDiff {
350 project_id,
351 buffer_id: buffer_id.to_proto(),
352 })
353 .await?;
354 Ok(response.staged_text)
355 })
356 }
357
358 fn open_uncommitted_diff(
359 &self,
360 buffer_id: BufferId,
361 cx: &App,
362 ) -> Task<Result<DiffBasesChange>> {
363 use proto::open_uncommitted_diff_response::Mode;
364
365 let project_id = self.project_id;
366 let client = self.upstream_client.clone();
367 cx.background_spawn(async move {
368 let response = client
369 .request(proto::OpenUncommittedDiff {
370 project_id,
371 buffer_id: buffer_id.to_proto(),
372 })
373 .await?;
374 let mode = Mode::from_i32(response.mode).ok_or_else(|| anyhow!("Invalid mode"))?;
375 let bases = match mode {
376 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
377 Mode::IndexAndHead => DiffBasesChange::SetEach {
378 head: response.committed_text,
379 index: response.staged_text,
380 },
381 };
382 Ok(bases)
383 })
384 }
385
386 pub fn wait_for_remote_buffer(
387 &mut self,
388 id: BufferId,
389 cx: &mut Context<BufferStore>,
390 ) -> Task<Result<Entity<Buffer>>> {
391 let (tx, rx) = oneshot::channel();
392 self.remote_buffer_listeners.entry(id).or_default().push(tx);
393
394 cx.spawn(|this, cx| async move {
395 if let Some(buffer) = this
396 .read_with(&cx, |buffer_store, _| buffer_store.get(id))
397 .ok()
398 .flatten()
399 {
400 return Ok(buffer);
401 }
402
403 cx.background_spawn(async move { rx.await? }).await
404 })
405 }
406
407 fn save_remote_buffer(
408 &self,
409 buffer_handle: Entity<Buffer>,
410 new_path: Option<proto::ProjectPath>,
411 cx: &Context<BufferStore>,
412 ) -> Task<Result<()>> {
413 let buffer = buffer_handle.read(cx);
414 let buffer_id = buffer.remote_id().into();
415 let version = buffer.version();
416 let rpc = self.upstream_client.clone();
417 let project_id = self.project_id;
418 cx.spawn(move |_, mut cx| async move {
419 let response = rpc
420 .request(proto::SaveBuffer {
421 project_id,
422 buffer_id,
423 new_path,
424 version: serialize_version(&version),
425 })
426 .await?;
427 let version = deserialize_version(&response.version);
428 let mtime = response.mtime.map(|mtime| mtime.into());
429
430 buffer_handle.update(&mut cx, |buffer, cx| {
431 buffer.did_save(version.clone(), mtime, cx);
432 })?;
433
434 Ok(())
435 })
436 }
437
438 pub fn handle_create_buffer_for_peer(
439 &mut self,
440 envelope: TypedEnvelope<proto::CreateBufferForPeer>,
441 replica_id: u16,
442 capability: Capability,
443 cx: &mut Context<BufferStore>,
444 ) -> Result<Option<Entity<Buffer>>> {
445 match envelope
446 .payload
447 .variant
448 .ok_or_else(|| anyhow!("missing variant"))?
449 {
450 proto::create_buffer_for_peer::Variant::State(mut state) => {
451 let buffer_id = BufferId::new(state.id)?;
452
453 let buffer_result = maybe!({
454 let mut buffer_file = None;
455 if let Some(file) = state.file.take() {
456 let worktree_id = worktree::WorktreeId::from_proto(file.worktree_id);
457 let worktree = self
458 .worktree_store
459 .read(cx)
460 .worktree_for_id(worktree_id, cx)
461 .ok_or_else(|| {
462 anyhow!("no worktree found for id {}", file.worktree_id)
463 })?;
464 buffer_file = Some(Arc::new(File::from_proto(file, worktree.clone(), cx)?)
465 as Arc<dyn language::File>);
466 }
467 Buffer::from_proto(replica_id, capability, state, buffer_file)
468 });
469
470 match buffer_result {
471 Ok(buffer) => {
472 let buffer = cx.new(|_| buffer);
473 self.loading_remote_buffers_by_id.insert(buffer_id, buffer);
474 }
475 Err(error) => {
476 if let Some(listeners) = self.remote_buffer_listeners.remove(&buffer_id) {
477 for listener in listeners {
478 listener.send(Err(anyhow!(error.cloned()))).ok();
479 }
480 }
481 }
482 }
483 }
484 proto::create_buffer_for_peer::Variant::Chunk(chunk) => {
485 let buffer_id = BufferId::new(chunk.buffer_id)?;
486 let buffer = self
487 .loading_remote_buffers_by_id
488 .get(&buffer_id)
489 .cloned()
490 .ok_or_else(|| {
491 anyhow!(
492 "received chunk for buffer {} without initial state",
493 chunk.buffer_id
494 )
495 })?;
496
497 let result = maybe!({
498 let operations = chunk
499 .operations
500 .into_iter()
501 .map(language::proto::deserialize_operation)
502 .collect::<Result<Vec<_>>>()?;
503 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx));
504 anyhow::Ok(())
505 });
506
507 if let Err(error) = result {
508 self.loading_remote_buffers_by_id.remove(&buffer_id);
509 if let Some(listeners) = self.remote_buffer_listeners.remove(&buffer_id) {
510 for listener in listeners {
511 listener.send(Err(error.cloned())).ok();
512 }
513 }
514 } else if chunk.is_last {
515 self.loading_remote_buffers_by_id.remove(&buffer_id);
516 if self.upstream_client.is_via_collab() {
517 // retain buffers sent by peers to avoid races.
518 self.shared_with_me.insert(buffer.clone());
519 }
520
521 if let Some(senders) = self.remote_buffer_listeners.remove(&buffer_id) {
522 for sender in senders {
523 sender.send(Ok(buffer.clone())).ok();
524 }
525 }
526 return Ok(Some(buffer));
527 }
528 }
529 }
530 return Ok(None);
531 }
532
533 pub fn incomplete_buffer_ids(&self) -> Vec<BufferId> {
534 self.loading_remote_buffers_by_id
535 .keys()
536 .copied()
537 .collect::<Vec<_>>()
538 }
539
540 pub fn deserialize_project_transaction(
541 &self,
542 message: proto::ProjectTransaction,
543 push_to_history: bool,
544 cx: &mut Context<BufferStore>,
545 ) -> Task<Result<ProjectTransaction>> {
546 cx.spawn(|this, mut cx| async move {
547 let mut project_transaction = ProjectTransaction::default();
548 for (buffer_id, transaction) in message.buffer_ids.into_iter().zip(message.transactions)
549 {
550 let buffer_id = BufferId::new(buffer_id)?;
551 let buffer = this
552 .update(&mut cx, |this, cx| {
553 this.wait_for_remote_buffer(buffer_id, cx)
554 })?
555 .await?;
556 let transaction = language::proto::deserialize_transaction(transaction)?;
557 project_transaction.0.insert(buffer, transaction);
558 }
559
560 for (buffer, transaction) in &project_transaction.0 {
561 buffer
562 .update(&mut cx, |buffer, _| {
563 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
564 })?
565 .await?;
566
567 if push_to_history {
568 buffer.update(&mut cx, |buffer, _| {
569 buffer.push_transaction(transaction.clone(), Instant::now());
570 })?;
571 }
572 }
573
574 Ok(project_transaction)
575 })
576 }
577
578 fn open_buffer(
579 &self,
580 path: Arc<Path>,
581 worktree: Entity<Worktree>,
582 cx: &mut Context<BufferStore>,
583 ) -> Task<Result<Entity<Buffer>>> {
584 let worktree_id = worktree.read(cx).id().to_proto();
585 let project_id = self.project_id;
586 let client = self.upstream_client.clone();
587 cx.spawn(move |this, mut cx| async move {
588 let response = client
589 .request(proto::OpenBufferByPath {
590 project_id,
591 worktree_id,
592 path: path.to_proto(),
593 })
594 .await?;
595 let buffer_id = BufferId::new(response.buffer_id)?;
596
597 let buffer = this
598 .update(&mut cx, {
599 |this, cx| this.wait_for_remote_buffer(buffer_id, cx)
600 })?
601 .await?;
602
603 Ok(buffer)
604 })
605 }
606
607 fn create_buffer(&self, cx: &mut Context<BufferStore>) -> Task<Result<Entity<Buffer>>> {
608 let create = self.upstream_client.request(proto::OpenNewBuffer {
609 project_id: self.project_id,
610 });
611 cx.spawn(|this, mut cx| async move {
612 let response = create.await?;
613 let buffer_id = BufferId::new(response.buffer_id)?;
614
615 this.update(&mut cx, |this, cx| {
616 this.wait_for_remote_buffer(buffer_id, cx)
617 })?
618 .await
619 })
620 }
621
622 fn reload_buffers(
623 &self,
624 buffers: HashSet<Entity<Buffer>>,
625 push_to_history: bool,
626 cx: &mut Context<BufferStore>,
627 ) -> Task<Result<ProjectTransaction>> {
628 let request = self.upstream_client.request(proto::ReloadBuffers {
629 project_id: self.project_id,
630 buffer_ids: buffers
631 .iter()
632 .map(|buffer| buffer.read(cx).remote_id().to_proto())
633 .collect(),
634 });
635
636 cx.spawn(|this, mut cx| async move {
637 let response = request
638 .await?
639 .transaction
640 .ok_or_else(|| anyhow!("missing transaction"))?;
641 this.update(&mut cx, |this, cx| {
642 this.deserialize_project_transaction(response, push_to_history, cx)
643 })?
644 .await
645 })
646 }
647}
648
649impl LocalBufferStore {
650 fn worktree_for_buffer(
651 &self,
652 buffer: &Entity<Buffer>,
653 cx: &App,
654 ) -> Option<(Entity<Worktree>, Arc<Path>)> {
655 let file = buffer.read(cx).file()?;
656 let worktree_id = file.worktree_id(cx);
657 let path = file.path().clone();
658 let worktree = self
659 .worktree_store
660 .read(cx)
661 .worktree_for_id(worktree_id, cx)?;
662 Some((worktree, path))
663 }
664
665 fn load_staged_text(&self, buffer: &Entity<Buffer>, cx: &App) -> Task<Result<Option<String>>> {
666 if let Some((worktree, path)) = self.worktree_for_buffer(buffer, cx) {
667 worktree.read(cx).load_staged_file(path.as_ref(), cx)
668 } else {
669 return Task::ready(Err(anyhow!("no such worktree")));
670 }
671 }
672
673 fn load_committed_text(
674 &self,
675 buffer: &Entity<Buffer>,
676 cx: &App,
677 ) -> Task<Result<Option<String>>> {
678 if let Some((worktree, path)) = self.worktree_for_buffer(buffer, cx) {
679 worktree.read(cx).load_committed_file(path.as_ref(), cx)
680 } else {
681 Task::ready(Err(anyhow!("no such worktree")))
682 }
683 }
684
685 fn save_local_buffer(
686 &self,
687 buffer_handle: Entity<Buffer>,
688 worktree: Entity<Worktree>,
689 path: Arc<Path>,
690 mut has_changed_file: bool,
691 cx: &mut Context<BufferStore>,
692 ) -> Task<Result<()>> {
693 let buffer = buffer_handle.read(cx);
694
695 let text = buffer.as_rope().clone();
696 let line_ending = buffer.line_ending();
697 let version = buffer.version();
698 let buffer_id = buffer.remote_id();
699 if buffer
700 .file()
701 .is_some_and(|file| file.disk_state() == DiskState::New)
702 {
703 has_changed_file = true;
704 }
705
706 let save = worktree.update(cx, |worktree, cx| {
707 worktree.write_file(path.as_ref(), text, line_ending, cx)
708 });
709
710 cx.spawn(move |this, mut cx| async move {
711 let new_file = save.await?;
712 let mtime = new_file.disk_state().mtime();
713 this.update(&mut cx, |this, cx| {
714 if let Some((downstream_client, project_id)) = this.downstream_client.clone() {
715 if has_changed_file {
716 downstream_client
717 .send(proto::UpdateBufferFile {
718 project_id,
719 buffer_id: buffer_id.to_proto(),
720 file: Some(language::File::to_proto(&*new_file, cx)),
721 })
722 .log_err();
723 }
724 downstream_client
725 .send(proto::BufferSaved {
726 project_id,
727 buffer_id: buffer_id.to_proto(),
728 version: serialize_version(&version),
729 mtime: mtime.map(|time| time.into()),
730 })
731 .log_err();
732 }
733 })?;
734 buffer_handle.update(&mut cx, |buffer, cx| {
735 if has_changed_file {
736 buffer.file_updated(new_file, cx);
737 }
738 buffer.did_save(version.clone(), mtime, cx);
739 })
740 })
741 }
742
743 fn subscribe_to_worktree(
744 &mut self,
745 worktree: &Entity<Worktree>,
746 cx: &mut Context<BufferStore>,
747 ) {
748 cx.subscribe(worktree, |this, worktree, event, cx| {
749 if worktree.read(cx).is_local() {
750 match event {
751 worktree::Event::UpdatedEntries(changes) => {
752 Self::local_worktree_entries_changed(this, &worktree, changes, cx);
753 }
754 worktree::Event::UpdatedGitRepositories(updated_repos) => {
755 Self::local_worktree_git_repos_changed(
756 this,
757 worktree.clone(),
758 updated_repos,
759 cx,
760 )
761 }
762 _ => {}
763 }
764 }
765 })
766 .detach();
767 }
768
769 fn local_worktree_entries_changed(
770 this: &mut BufferStore,
771 worktree_handle: &Entity<Worktree>,
772 changes: &[(Arc<Path>, ProjectEntryId, PathChange)],
773 cx: &mut Context<BufferStore>,
774 ) {
775 let snapshot = worktree_handle.read(cx).snapshot();
776 for (path, entry_id, _) in changes {
777 Self::local_worktree_entry_changed(
778 this,
779 *entry_id,
780 path,
781 worktree_handle,
782 &snapshot,
783 cx,
784 );
785 }
786 }
787
788 fn local_worktree_git_repos_changed(
789 this: &mut BufferStore,
790 worktree_handle: Entity<Worktree>,
791 changed_repos: &UpdatedGitRepositoriesSet,
792 cx: &mut Context<BufferStore>,
793 ) {
794 debug_assert!(worktree_handle.read(cx).is_local());
795
796 let mut diff_state_updates = Vec::new();
797 for buffer in this.opened_buffers.values() {
798 let OpenBuffer::Complete { buffer, diff_state } = buffer else {
799 continue;
800 };
801 let Some(buffer) = buffer.upgrade() else {
802 continue;
803 };
804 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
805 continue;
806 };
807 if file.worktree != worktree_handle {
808 continue;
809 }
810 let diff_state = diff_state.read(cx);
811 if changed_repos
812 .iter()
813 .any(|(work_dir, _)| file.path.starts_with(work_dir))
814 {
815 let has_unstaged_diff = diff_state
816 .unstaged_diff
817 .as_ref()
818 .is_some_and(|diff| diff.is_upgradable());
819 let has_uncommitted_diff = diff_state
820 .uncommitted_diff
821 .as_ref()
822 .is_some_and(|set| set.is_upgradable());
823 diff_state_updates.push((
824 buffer,
825 file.path.clone(),
826 has_unstaged_diff.then(|| diff_state.index_text.clone()),
827 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
828 ));
829 }
830 }
831
832 if diff_state_updates.is_empty() {
833 return;
834 }
835
836 cx.spawn(move |this, mut cx| async move {
837 let snapshot =
838 worktree_handle.update(&mut cx, |tree, _| tree.as_local().unwrap().snapshot())?;
839 let diff_bases_changes_by_buffer = cx
840 .background_spawn(async move {
841 diff_state_updates
842 .into_iter()
843 .filter_map(|(buffer, path, current_index_text, current_head_text)| {
844 let local_repo = snapshot.local_repo_for_path(&path)?;
845 let relative_path = local_repo.relativize(&path).ok()?;
846 let index_text = if current_index_text.is_some() {
847 local_repo.repo().load_index_text(&relative_path)
848 } else {
849 None
850 };
851 let head_text = if current_head_text.is_some() {
852 local_repo.repo().load_committed_text(&relative_path)
853 } else {
854 None
855 };
856
857 // Avoid triggering a diff update if the base text has not changed.
858 if let Some((current_index, current_head)) =
859 current_index_text.as_ref().zip(current_head_text.as_ref())
860 {
861 if current_index.as_deref() == index_text.as_ref()
862 && current_head.as_deref() == head_text.as_ref()
863 {
864 return None;
865 }
866 }
867
868 let diff_bases_change =
869 match (current_index_text.is_some(), current_head_text.is_some()) {
870 (true, true) => Some(if index_text == head_text {
871 DiffBasesChange::SetBoth(head_text)
872 } else {
873 DiffBasesChange::SetEach {
874 index: index_text,
875 head: head_text,
876 }
877 }),
878 (true, false) => Some(DiffBasesChange::SetIndex(index_text)),
879 (false, true) => Some(DiffBasesChange::SetHead(head_text)),
880 (false, false) => None,
881 };
882
883 Some((buffer, diff_bases_change))
884 })
885 .collect::<Vec<_>>()
886 })
887 .await;
888
889 this.update(&mut cx, |this, cx| {
890 for (buffer, diff_bases_change) in diff_bases_changes_by_buffer {
891 let Some(OpenBuffer::Complete { diff_state, .. }) =
892 this.opened_buffers.get_mut(&buffer.read(cx).remote_id())
893 else {
894 continue;
895 };
896 let Some(diff_bases_change) = diff_bases_change else {
897 continue;
898 };
899
900 diff_state.update(cx, |diff_state, cx| {
901 use proto::update_diff_bases::Mode;
902
903 let buffer = buffer.read(cx);
904 if let Some((client, project_id)) = this.downstream_client.as_ref() {
905 let buffer_id = buffer.remote_id().to_proto();
906 let (staged_text, committed_text, mode) = match diff_bases_change
907 .clone()
908 {
909 DiffBasesChange::SetIndex(index) => (index, None, Mode::IndexOnly),
910 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
911 DiffBasesChange::SetEach { index, head } => {
912 (index, head, Mode::IndexAndHead)
913 }
914 DiffBasesChange::SetBoth(text) => {
915 (None, text, Mode::IndexMatchesHead)
916 }
917 };
918 let message = proto::UpdateDiffBases {
919 project_id: *project_id,
920 buffer_id,
921 staged_text,
922 committed_text,
923 mode: mode as i32,
924 };
925
926 client.send(message).log_err();
927 }
928
929 let _ = diff_state.diff_bases_changed(
930 buffer.text_snapshot(),
931 diff_bases_change,
932 cx,
933 );
934 });
935 }
936 })
937 })
938 .detach_and_log_err(cx);
939 }
940
941 fn local_worktree_entry_changed(
942 this: &mut BufferStore,
943 entry_id: ProjectEntryId,
944 path: &Arc<Path>,
945 worktree: &Entity<worktree::Worktree>,
946 snapshot: &worktree::Snapshot,
947 cx: &mut Context<BufferStore>,
948 ) -> Option<()> {
949 let project_path = ProjectPath {
950 worktree_id: snapshot.id(),
951 path: path.clone(),
952 };
953
954 let buffer_id = {
955 let local = this.as_local_mut()?;
956 match local.local_buffer_ids_by_entry_id.get(&entry_id) {
957 Some(&buffer_id) => buffer_id,
958 None => local.local_buffer_ids_by_path.get(&project_path).copied()?,
959 }
960 };
961
962 let buffer = if let Some(buffer) = this.get(buffer_id) {
963 Some(buffer)
964 } else {
965 this.opened_buffers.remove(&buffer_id);
966 None
967 };
968
969 let buffer = if let Some(buffer) = buffer {
970 buffer
971 } else {
972 let this = this.as_local_mut()?;
973 this.local_buffer_ids_by_path.remove(&project_path);
974 this.local_buffer_ids_by_entry_id.remove(&entry_id);
975 return None;
976 };
977
978 let events = buffer.update(cx, |buffer, cx| {
979 let local = this.as_local_mut()?;
980 let file = buffer.file()?;
981 let old_file = File::from_dyn(Some(file))?;
982 if old_file.worktree != *worktree {
983 return None;
984 }
985
986 let snapshot_entry = old_file
987 .entry_id
988 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
989 .or_else(|| snapshot.entry_for_path(old_file.path.as_ref()));
990
991 let new_file = if let Some(entry) = snapshot_entry {
992 File {
993 disk_state: match entry.mtime {
994 Some(mtime) => DiskState::Present { mtime },
995 None => old_file.disk_state,
996 },
997 is_local: true,
998 entry_id: Some(entry.id),
999 path: entry.path.clone(),
1000 worktree: worktree.clone(),
1001 is_private: entry.is_private,
1002 }
1003 } else {
1004 File {
1005 disk_state: DiskState::Deleted,
1006 is_local: true,
1007 entry_id: old_file.entry_id,
1008 path: old_file.path.clone(),
1009 worktree: worktree.clone(),
1010 is_private: old_file.is_private,
1011 }
1012 };
1013
1014 if new_file == *old_file {
1015 return None;
1016 }
1017
1018 let mut events = Vec::new();
1019 if new_file.path != old_file.path {
1020 local.local_buffer_ids_by_path.remove(&ProjectPath {
1021 path: old_file.path.clone(),
1022 worktree_id: old_file.worktree_id(cx),
1023 });
1024 local.local_buffer_ids_by_path.insert(
1025 ProjectPath {
1026 worktree_id: new_file.worktree_id(cx),
1027 path: new_file.path.clone(),
1028 },
1029 buffer_id,
1030 );
1031 events.push(BufferStoreEvent::BufferChangedFilePath {
1032 buffer: cx.entity(),
1033 old_file: buffer.file().cloned(),
1034 });
1035 }
1036
1037 if new_file.entry_id != old_file.entry_id {
1038 if let Some(entry_id) = old_file.entry_id {
1039 local.local_buffer_ids_by_entry_id.remove(&entry_id);
1040 }
1041 if let Some(entry_id) = new_file.entry_id {
1042 local
1043 .local_buffer_ids_by_entry_id
1044 .insert(entry_id, buffer_id);
1045 }
1046 }
1047
1048 if let Some((client, project_id)) = &this.downstream_client {
1049 client
1050 .send(proto::UpdateBufferFile {
1051 project_id: *project_id,
1052 buffer_id: buffer_id.to_proto(),
1053 file: Some(new_file.to_proto(cx)),
1054 })
1055 .ok();
1056 }
1057
1058 buffer.file_updated(Arc::new(new_file), cx);
1059 Some(events)
1060 })?;
1061
1062 for event in events {
1063 cx.emit(event);
1064 }
1065
1066 None
1067 }
1068
1069 fn buffer_changed_file(&mut self, buffer: Entity<Buffer>, cx: &mut App) -> Option<()> {
1070 let file = File::from_dyn(buffer.read(cx).file())?;
1071
1072 let remote_id = buffer.read(cx).remote_id();
1073 if let Some(entry_id) = file.entry_id {
1074 match self.local_buffer_ids_by_entry_id.get(&entry_id) {
1075 Some(_) => {
1076 return None;
1077 }
1078 None => {
1079 self.local_buffer_ids_by_entry_id
1080 .insert(entry_id, remote_id);
1081 }
1082 }
1083 };
1084 self.local_buffer_ids_by_path.insert(
1085 ProjectPath {
1086 worktree_id: file.worktree_id(cx),
1087 path: file.path.clone(),
1088 },
1089 remote_id,
1090 );
1091
1092 Some(())
1093 }
1094
1095 fn save_buffer(
1096 &self,
1097 buffer: Entity<Buffer>,
1098 cx: &mut Context<BufferStore>,
1099 ) -> Task<Result<()>> {
1100 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1101 return Task::ready(Err(anyhow!("buffer doesn't have a file")));
1102 };
1103 let worktree = file.worktree.clone();
1104 self.save_local_buffer(buffer, worktree, file.path.clone(), false, cx)
1105 }
1106
1107 fn save_buffer_as(
1108 &self,
1109 buffer: Entity<Buffer>,
1110 path: ProjectPath,
1111 cx: &mut Context<BufferStore>,
1112 ) -> Task<Result<()>> {
1113 let Some(worktree) = self
1114 .worktree_store
1115 .read(cx)
1116 .worktree_for_id(path.worktree_id, cx)
1117 else {
1118 return Task::ready(Err(anyhow!("no such worktree")));
1119 };
1120 self.save_local_buffer(buffer, worktree, path.path.clone(), true, cx)
1121 }
1122
1123 fn open_buffer(
1124 &self,
1125 path: Arc<Path>,
1126 worktree: Entity<Worktree>,
1127 cx: &mut Context<BufferStore>,
1128 ) -> Task<Result<Entity<Buffer>>> {
1129 let load_buffer = worktree.update(cx, |worktree, cx| {
1130 let load_file = worktree.load_file(path.as_ref(), cx);
1131 let reservation = cx.reserve_entity();
1132 let buffer_id = BufferId::from(reservation.entity_id().as_non_zero_u64());
1133 cx.spawn(move |_, mut cx| async move {
1134 let loaded = load_file.await?;
1135 let text_buffer = cx
1136 .background_spawn(async move { text::Buffer::new(0, buffer_id, loaded.text) })
1137 .await;
1138 cx.insert_entity(reservation, |_| {
1139 Buffer::build(text_buffer, Some(loaded.file), Capability::ReadWrite)
1140 })
1141 })
1142 });
1143
1144 cx.spawn(move |this, mut cx| async move {
1145 let buffer = match load_buffer.await {
1146 Ok(buffer) => Ok(buffer),
1147 Err(error) if is_not_found_error(&error) => cx.new(|cx| {
1148 let buffer_id = BufferId::from(cx.entity_id().as_non_zero_u64());
1149 let text_buffer = text::Buffer::new(0, buffer_id, "".into());
1150 Buffer::build(
1151 text_buffer,
1152 Some(Arc::new(File {
1153 worktree,
1154 path,
1155 disk_state: DiskState::New,
1156 entry_id: None,
1157 is_local: true,
1158 is_private: false,
1159 })),
1160 Capability::ReadWrite,
1161 )
1162 }),
1163 Err(e) => Err(e),
1164 }?;
1165 this.update(&mut cx, |this, cx| {
1166 this.add_buffer(buffer.clone(), cx)?;
1167 let buffer_id = buffer.read(cx).remote_id();
1168 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
1169 let this = this.as_local_mut().unwrap();
1170 this.local_buffer_ids_by_path.insert(
1171 ProjectPath {
1172 worktree_id: file.worktree_id(cx),
1173 path: file.path.clone(),
1174 },
1175 buffer_id,
1176 );
1177
1178 if let Some(entry_id) = file.entry_id {
1179 this.local_buffer_ids_by_entry_id
1180 .insert(entry_id, buffer_id);
1181 }
1182 }
1183
1184 anyhow::Ok(())
1185 })??;
1186
1187 Ok(buffer)
1188 })
1189 }
1190
1191 fn create_buffer(&self, cx: &mut Context<BufferStore>) -> Task<Result<Entity<Buffer>>> {
1192 cx.spawn(|buffer_store, mut cx| async move {
1193 let buffer =
1194 cx.new(|cx| Buffer::local("", cx).with_language(language::PLAIN_TEXT.clone(), cx))?;
1195 buffer_store.update(&mut cx, |buffer_store, cx| {
1196 buffer_store.add_buffer(buffer.clone(), cx).log_err();
1197 })?;
1198 Ok(buffer)
1199 })
1200 }
1201
1202 fn reload_buffers(
1203 &self,
1204 buffers: HashSet<Entity<Buffer>>,
1205 push_to_history: bool,
1206 cx: &mut Context<BufferStore>,
1207 ) -> Task<Result<ProjectTransaction>> {
1208 cx.spawn(move |_, mut cx| async move {
1209 let mut project_transaction = ProjectTransaction::default();
1210 for buffer in buffers {
1211 let transaction = buffer
1212 .update(&mut cx, |buffer, cx| buffer.reload(cx))?
1213 .await?;
1214 buffer.update(&mut cx, |buffer, cx| {
1215 if let Some(transaction) = transaction {
1216 if !push_to_history {
1217 buffer.forget_transaction(transaction.id);
1218 }
1219 project_transaction.0.insert(cx.entity(), transaction);
1220 }
1221 })?;
1222 }
1223
1224 Ok(project_transaction)
1225 })
1226 }
1227}
1228
1229impl BufferStore {
1230 pub fn init(client: &AnyProtoClient) {
1231 client.add_entity_message_handler(Self::handle_buffer_reloaded);
1232 client.add_entity_message_handler(Self::handle_buffer_saved);
1233 client.add_entity_message_handler(Self::handle_update_buffer_file);
1234 client.add_entity_request_handler(Self::handle_save_buffer);
1235 client.add_entity_request_handler(Self::handle_blame_buffer);
1236 client.add_entity_request_handler(Self::handle_reload_buffers);
1237 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
1238 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
1239 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
1240 client.add_entity_message_handler(Self::handle_update_diff_bases);
1241 }
1242
1243 /// Creates a buffer store, optionally retaining its buffers.
1244 pub fn local(worktree_store: Entity<WorktreeStore>, cx: &mut Context<Self>) -> Self {
1245 Self {
1246 state: BufferStoreState::Local(LocalBufferStore {
1247 local_buffer_ids_by_path: Default::default(),
1248 local_buffer_ids_by_entry_id: Default::default(),
1249 worktree_store: worktree_store.clone(),
1250 _subscription: cx.subscribe(&worktree_store, |this, _, event, cx| {
1251 if let WorktreeStoreEvent::WorktreeAdded(worktree) = event {
1252 let this = this.as_local_mut().unwrap();
1253 this.subscribe_to_worktree(worktree, cx);
1254 }
1255 }),
1256 }),
1257 downstream_client: None,
1258 opened_buffers: Default::default(),
1259 shared_buffers: Default::default(),
1260 loading_buffers: Default::default(),
1261 loading_diffs: Default::default(),
1262 worktree_store,
1263 }
1264 }
1265
1266 pub fn remote(
1267 worktree_store: Entity<WorktreeStore>,
1268 upstream_client: AnyProtoClient,
1269 remote_id: u64,
1270 _cx: &mut Context<Self>,
1271 ) -> Self {
1272 Self {
1273 state: BufferStoreState::Remote(RemoteBufferStore {
1274 shared_with_me: Default::default(),
1275 loading_remote_buffers_by_id: Default::default(),
1276 remote_buffer_listeners: Default::default(),
1277 project_id: remote_id,
1278 upstream_client,
1279 worktree_store: worktree_store.clone(),
1280 }),
1281 downstream_client: None,
1282 opened_buffers: Default::default(),
1283 loading_buffers: Default::default(),
1284 loading_diffs: Default::default(),
1285 shared_buffers: Default::default(),
1286 worktree_store,
1287 }
1288 }
1289
1290 fn as_local_mut(&mut self) -> Option<&mut LocalBufferStore> {
1291 match &mut self.state {
1292 BufferStoreState::Local(state) => Some(state),
1293 _ => None,
1294 }
1295 }
1296
1297 fn as_remote_mut(&mut self) -> Option<&mut RemoteBufferStore> {
1298 match &mut self.state {
1299 BufferStoreState::Remote(state) => Some(state),
1300 _ => None,
1301 }
1302 }
1303
1304 fn as_remote(&self) -> Option<&RemoteBufferStore> {
1305 match &self.state {
1306 BufferStoreState::Remote(state) => Some(state),
1307 _ => None,
1308 }
1309 }
1310
1311 pub fn open_buffer(
1312 &mut self,
1313 project_path: ProjectPath,
1314 cx: &mut Context<Self>,
1315 ) -> Task<Result<Entity<Buffer>>> {
1316 if let Some(buffer) = self.get_by_path(&project_path, cx) {
1317 return Task::ready(Ok(buffer));
1318 }
1319
1320 let task = match self.loading_buffers.entry(project_path.clone()) {
1321 hash_map::Entry::Occupied(e) => e.get().clone(),
1322 hash_map::Entry::Vacant(entry) => {
1323 let path = project_path.path.clone();
1324 let Some(worktree) = self
1325 .worktree_store
1326 .read(cx)
1327 .worktree_for_id(project_path.worktree_id, cx)
1328 else {
1329 return Task::ready(Err(anyhow!("no such worktree")));
1330 };
1331 let load_buffer = match &self.state {
1332 BufferStoreState::Local(this) => this.open_buffer(path, worktree, cx),
1333 BufferStoreState::Remote(this) => this.open_buffer(path, worktree, cx),
1334 };
1335
1336 entry
1337 .insert(
1338 cx.spawn(move |this, mut cx| async move {
1339 let load_result = load_buffer.await;
1340 this.update(&mut cx, |this, _cx| {
1341 // Record the fact that the buffer is no longer loading.
1342 this.loading_buffers.remove(&project_path);
1343 })
1344 .ok();
1345 load_result.map_err(Arc::new)
1346 })
1347 .shared(),
1348 )
1349 .clone()
1350 }
1351 };
1352
1353 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
1354 }
1355
1356 pub fn open_unstaged_diff(
1357 &mut self,
1358 buffer: Entity<Buffer>,
1359 cx: &mut Context<Self>,
1360 ) -> Task<Result<Entity<BufferDiff>>> {
1361 let buffer_id = buffer.read(cx).remote_id();
1362 if let Some(OpenBuffer::Complete { diff_state, .. }) = self.opened_buffers.get(&buffer_id) {
1363 if let Some(unstaged_diff) = diff_state
1364 .read(cx)
1365 .unstaged_diff
1366 .as_ref()
1367 .and_then(|weak| weak.upgrade())
1368 {
1369 if let Some(task) =
1370 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
1371 {
1372 return cx.background_executor().spawn(async move {
1373 task.await?;
1374 Ok(unstaged_diff)
1375 });
1376 }
1377 return Task::ready(Ok(unstaged_diff));
1378 }
1379 }
1380
1381 let task = match self.loading_diffs.entry((buffer_id, DiffKind::Unstaged)) {
1382 hash_map::Entry::Occupied(e) => e.get().clone(),
1383 hash_map::Entry::Vacant(entry) => {
1384 let staged_text = match &self.state {
1385 BufferStoreState::Local(this) => this.load_staged_text(&buffer, cx),
1386 BufferStoreState::Remote(this) => this.open_unstaged_diff(buffer_id, cx),
1387 };
1388
1389 entry
1390 .insert(
1391 cx.spawn(move |this, cx| async move {
1392 Self::open_diff_internal(
1393 this,
1394 DiffKind::Unstaged,
1395 staged_text.await.map(DiffBasesChange::SetIndex),
1396 buffer,
1397 cx,
1398 )
1399 .await
1400 .map_err(Arc::new)
1401 })
1402 .shared(),
1403 )
1404 .clone()
1405 }
1406 };
1407
1408 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
1409 }
1410
1411 pub fn open_uncommitted_diff(
1412 &mut self,
1413 buffer: Entity<Buffer>,
1414 cx: &mut Context<Self>,
1415 ) -> Task<Result<Entity<BufferDiff>>> {
1416 let buffer_id = buffer.read(cx).remote_id();
1417
1418 if let Some(OpenBuffer::Complete { diff_state, .. }) = self.opened_buffers.get(&buffer_id) {
1419 if let Some(uncommitted_diff) = diff_state
1420 .read(cx)
1421 .uncommitted_diff
1422 .as_ref()
1423 .and_then(|weak| weak.upgrade())
1424 {
1425 if let Some(task) =
1426 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
1427 {
1428 return cx.background_executor().spawn(async move {
1429 task.await?;
1430 Ok(uncommitted_diff)
1431 });
1432 }
1433 return Task::ready(Ok(uncommitted_diff));
1434 }
1435 }
1436
1437 let task = match self.loading_diffs.entry((buffer_id, DiffKind::Uncommitted)) {
1438 hash_map::Entry::Occupied(e) => e.get().clone(),
1439 hash_map::Entry::Vacant(entry) => {
1440 let changes = match &self.state {
1441 BufferStoreState::Local(this) => {
1442 let committed_text = this.load_committed_text(&buffer, cx);
1443 let staged_text = this.load_staged_text(&buffer, cx);
1444 cx.background_spawn(async move {
1445 let committed_text = committed_text.await?;
1446 let staged_text = staged_text.await?;
1447 let diff_bases_change = if committed_text == staged_text {
1448 DiffBasesChange::SetBoth(committed_text)
1449 } else {
1450 DiffBasesChange::SetEach {
1451 index: staged_text,
1452 head: committed_text,
1453 }
1454 };
1455 Ok(diff_bases_change)
1456 })
1457 }
1458 BufferStoreState::Remote(this) => this.open_uncommitted_diff(buffer_id, cx),
1459 };
1460
1461 entry
1462 .insert(
1463 cx.spawn(move |this, cx| async move {
1464 Self::open_diff_internal(
1465 this,
1466 DiffKind::Uncommitted,
1467 changes.await,
1468 buffer,
1469 cx,
1470 )
1471 .await
1472 .map_err(Arc::new)
1473 })
1474 .shared(),
1475 )
1476 .clone()
1477 }
1478 };
1479
1480 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
1481 }
1482
1483 async fn open_diff_internal(
1484 this: WeakEntity<Self>,
1485 kind: DiffKind,
1486 texts: Result<DiffBasesChange>,
1487 buffer_entity: Entity<Buffer>,
1488 mut cx: AsyncApp,
1489 ) -> Result<Entity<BufferDiff>> {
1490 let diff_bases_change = match texts {
1491 Err(e) => {
1492 this.update(&mut cx, |this, cx| {
1493 let buffer = buffer_entity.read(cx);
1494 let buffer_id = buffer.remote_id();
1495 this.loading_diffs.remove(&(buffer_id, kind));
1496 })?;
1497 return Err(e);
1498 }
1499 Ok(change) => change,
1500 };
1501
1502 this.update(&mut cx, |this, cx| {
1503 let buffer = buffer_entity.read(cx);
1504 let buffer_id = buffer.remote_id();
1505 let language = buffer.language().cloned();
1506 let language_registry = buffer.language_registry();
1507 let text_snapshot = buffer.text_snapshot();
1508 this.loading_diffs.remove(&(buffer_id, kind));
1509
1510 if let Some(OpenBuffer::Complete { diff_state, .. }) =
1511 this.opened_buffers.get_mut(&buffer_id)
1512 {
1513 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
1514 cx.emit(BufferStoreEvent::BufferDiffAdded(diff.clone()));
1515 diff_state.update(cx, |diff_state, cx| {
1516 diff_state.language = language;
1517 diff_state.language_registry = language_registry;
1518
1519 match kind {
1520 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
1521 DiffKind::Uncommitted => {
1522 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
1523 diff
1524 } else {
1525 let unstaged_diff =
1526 cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
1527 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
1528 unstaged_diff
1529 };
1530
1531 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
1532 diff_state.uncommitted_diff = Some(diff.downgrade())
1533 }
1534 };
1535
1536 let rx = diff_state.diff_bases_changed(text_snapshot, diff_bases_change, cx);
1537
1538 Ok(async move {
1539 rx.await.ok();
1540 Ok(diff)
1541 })
1542 })
1543 } else {
1544 Err(anyhow!("buffer was closed"))
1545 }
1546 })??
1547 .await
1548 }
1549
1550 pub fn create_buffer(&mut self, cx: &mut Context<Self>) -> Task<Result<Entity<Buffer>>> {
1551 match &self.state {
1552 BufferStoreState::Local(this) => this.create_buffer(cx),
1553 BufferStoreState::Remote(this) => this.create_buffer(cx),
1554 }
1555 }
1556
1557 pub fn save_buffer(
1558 &mut self,
1559 buffer: Entity<Buffer>,
1560 cx: &mut Context<Self>,
1561 ) -> Task<Result<()>> {
1562 match &mut self.state {
1563 BufferStoreState::Local(this) => this.save_buffer(buffer, cx),
1564 BufferStoreState::Remote(this) => this.save_remote_buffer(buffer.clone(), None, cx),
1565 }
1566 }
1567
1568 pub fn save_buffer_as(
1569 &mut self,
1570 buffer: Entity<Buffer>,
1571 path: ProjectPath,
1572 cx: &mut Context<Self>,
1573 ) -> Task<Result<()>> {
1574 let old_file = buffer.read(cx).file().cloned();
1575 let task = match &self.state {
1576 BufferStoreState::Local(this) => this.save_buffer_as(buffer.clone(), path, cx),
1577 BufferStoreState::Remote(this) => {
1578 this.save_remote_buffer(buffer.clone(), Some(path.to_proto()), cx)
1579 }
1580 };
1581 cx.spawn(|this, mut cx| async move {
1582 task.await?;
1583 this.update(&mut cx, |_, cx| {
1584 cx.emit(BufferStoreEvent::BufferChangedFilePath { buffer, old_file });
1585 })
1586 })
1587 }
1588
1589 pub fn blame_buffer(
1590 &self,
1591 buffer: &Entity<Buffer>,
1592 version: Option<clock::Global>,
1593 cx: &App,
1594 ) -> Task<Result<Option<Blame>>> {
1595 let buffer = buffer.read(cx);
1596 let Some(file) = File::from_dyn(buffer.file()) else {
1597 return Task::ready(Err(anyhow!("buffer has no file")));
1598 };
1599
1600 match file.worktree.clone().read(cx) {
1601 Worktree::Local(worktree) => {
1602 let worktree = worktree.snapshot();
1603 let blame_params = maybe!({
1604 let local_repo = match worktree.local_repo_for_path(&file.path) {
1605 Some(repo_for_path) => repo_for_path,
1606 None => return Ok(None),
1607 };
1608
1609 let relative_path = local_repo
1610 .relativize(&file.path)
1611 .context("failed to relativize buffer path")?;
1612
1613 let repo = local_repo.repo().clone();
1614
1615 let content = match version {
1616 Some(version) => buffer.rope_for_version(&version).clone(),
1617 None => buffer.as_rope().clone(),
1618 };
1619
1620 anyhow::Ok(Some((repo, relative_path, content)))
1621 });
1622
1623 cx.background_spawn(async move {
1624 let Some((repo, relative_path, content)) = blame_params? else {
1625 return Ok(None);
1626 };
1627 repo.blame(&relative_path, content)
1628 .with_context(|| format!("Failed to blame {:?}", relative_path.0))
1629 .map(Some)
1630 })
1631 }
1632 Worktree::Remote(worktree) => {
1633 let buffer_id = buffer.remote_id();
1634 let version = buffer.version();
1635 let project_id = worktree.project_id();
1636 let client = worktree.client();
1637 cx.spawn(|_| async move {
1638 let response = client
1639 .request(proto::BlameBuffer {
1640 project_id,
1641 buffer_id: buffer_id.into(),
1642 version: serialize_version(&version),
1643 })
1644 .await?;
1645 Ok(deserialize_blame_buffer_response(response))
1646 })
1647 }
1648 }
1649 }
1650
1651 pub fn get_permalink_to_line(
1652 &self,
1653 buffer: &Entity<Buffer>,
1654 selection: Range<u32>,
1655 cx: &App,
1656 ) -> Task<Result<url::Url>> {
1657 let buffer = buffer.read(cx);
1658 let Some(file) = File::from_dyn(buffer.file()) else {
1659 return Task::ready(Err(anyhow!("buffer has no file")));
1660 };
1661
1662 match file.worktree.read(cx) {
1663 Worktree::Local(worktree) => {
1664 let worktree_path = worktree.abs_path().clone();
1665 let Some((repo_entry, repo)) =
1666 worktree.repository_for_path(file.path()).and_then(|entry| {
1667 let repo = worktree.get_local_repo(&entry)?.repo().clone();
1668 Some((entry, repo))
1669 })
1670 else {
1671 // If we're not in a Git repo, check whether this is a Rust source
1672 // file in the Cargo registry (presumably opened with go-to-definition
1673 // from a normal Rust file). If so, we can put together a permalink
1674 // using crate metadata.
1675 if buffer
1676 .language()
1677 .is_none_or(|lang| lang.name() != "Rust".into())
1678 {
1679 return Task::ready(Err(anyhow!("no permalink available")));
1680 }
1681 let file_path = worktree_path.join(file.path());
1682 return cx.spawn(|cx| async move {
1683 let provider_registry =
1684 cx.update(GitHostingProviderRegistry::default_global)?;
1685 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1686 .map_err(|_| anyhow!("no permalink available"))
1687 });
1688 };
1689
1690 let path = match repo_entry.relativize(file.path()) {
1691 Ok(RepoPath(path)) => path,
1692 Err(e) => return Task::ready(Err(e)),
1693 };
1694
1695 let remote = repo_entry
1696 .branch()
1697 .and_then(|b| b.upstream.as_ref())
1698 .and_then(|b| b.remote_name())
1699 .unwrap_or("origin")
1700 .to_string();
1701
1702 cx.spawn(|cx| async move {
1703 let origin_url = repo
1704 .remote_url(&remote)
1705 .ok_or_else(|| anyhow!("remote \"{remote}\" not found"))?;
1706
1707 let sha = repo
1708 .head_sha()
1709 .ok_or_else(|| anyhow!("failed to read HEAD SHA"))?;
1710
1711 let provider_registry =
1712 cx.update(GitHostingProviderRegistry::default_global)?;
1713
1714 let (provider, remote) =
1715 parse_git_remote_url(provider_registry, &origin_url)
1716 .ok_or_else(|| anyhow!("failed to parse Git remote URL"))?;
1717
1718 let path = path
1719 .to_str()
1720 .ok_or_else(|| anyhow!("failed to convert path to string"))?;
1721
1722 Ok(provider.build_permalink(
1723 remote,
1724 BuildPermalinkParams {
1725 sha: &sha,
1726 path,
1727 selection: Some(selection),
1728 },
1729 ))
1730 })
1731 }
1732 Worktree::Remote(worktree) => {
1733 let buffer_id = buffer.remote_id();
1734 let project_id = worktree.project_id();
1735 let client = worktree.client();
1736 cx.spawn(|_| async move {
1737 let response = client
1738 .request(proto::GetPermalinkToLine {
1739 project_id,
1740 buffer_id: buffer_id.into(),
1741 selection: Some(proto::Range {
1742 start: selection.start as u64,
1743 end: selection.end as u64,
1744 }),
1745 })
1746 .await?;
1747
1748 url::Url::parse(&response.permalink).context("failed to parse permalink")
1749 })
1750 }
1751 }
1752 }
1753
1754 fn add_buffer(&mut self, buffer_entity: Entity<Buffer>, cx: &mut Context<Self>) -> Result<()> {
1755 let buffer = buffer_entity.read(cx);
1756 let language = buffer.language().cloned();
1757 let language_registry = buffer.language_registry();
1758 let remote_id = buffer.remote_id();
1759 let is_remote = buffer.replica_id() != 0;
1760 let open_buffer = OpenBuffer::Complete {
1761 buffer: buffer_entity.downgrade(),
1762 diff_state: cx.new(|_| BufferDiffState {
1763 language,
1764 language_registry,
1765 ..Default::default()
1766 }),
1767 };
1768
1769 let handle = cx.entity().downgrade();
1770 buffer_entity.update(cx, move |_, cx| {
1771 cx.on_release(move |buffer, cx| {
1772 handle
1773 .update(cx, |_, cx| {
1774 cx.emit(BufferStoreEvent::BufferDropped(buffer.remote_id()))
1775 })
1776 .ok();
1777 })
1778 .detach()
1779 });
1780
1781 match self.opened_buffers.entry(remote_id) {
1782 hash_map::Entry::Vacant(entry) => {
1783 entry.insert(open_buffer);
1784 }
1785 hash_map::Entry::Occupied(mut entry) => {
1786 if let OpenBuffer::Operations(operations) = entry.get_mut() {
1787 buffer_entity.update(cx, |b, cx| b.apply_ops(operations.drain(..), cx));
1788 } else if entry.get().upgrade().is_some() {
1789 if is_remote {
1790 return Ok(());
1791 } else {
1792 debug_panic!("buffer {} was already registered", remote_id);
1793 Err(anyhow!("buffer {} was already registered", remote_id))?;
1794 }
1795 }
1796 entry.insert(open_buffer);
1797 }
1798 }
1799
1800 cx.subscribe(&buffer_entity, Self::on_buffer_event).detach();
1801 cx.emit(BufferStoreEvent::BufferAdded(buffer_entity));
1802 Ok(())
1803 }
1804
1805 pub fn buffers(&self) -> impl '_ + Iterator<Item = Entity<Buffer>> {
1806 self.opened_buffers
1807 .values()
1808 .filter_map(|buffer| buffer.upgrade())
1809 }
1810
1811 pub fn loading_buffers(
1812 &self,
1813 ) -> impl Iterator<Item = (&ProjectPath, impl Future<Output = Result<Entity<Buffer>>>)> {
1814 self.loading_buffers.iter().map(|(path, task)| {
1815 let task = task.clone();
1816 (path, async move { task.await.map_err(|e| anyhow!("{e}")) })
1817 })
1818 }
1819
1820 pub fn get_by_path(&self, path: &ProjectPath, cx: &App) -> Option<Entity<Buffer>> {
1821 self.buffers().find_map(|buffer| {
1822 let file = File::from_dyn(buffer.read(cx).file())?;
1823 if file.worktree_id(cx) == path.worktree_id && file.path == path.path {
1824 Some(buffer)
1825 } else {
1826 None
1827 }
1828 })
1829 }
1830
1831 pub fn get(&self, buffer_id: BufferId) -> Option<Entity<Buffer>> {
1832 self.opened_buffers.get(&buffer_id)?.upgrade()
1833 }
1834
1835 pub fn get_existing(&self, buffer_id: BufferId) -> Result<Entity<Buffer>> {
1836 self.get(buffer_id)
1837 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
1838 }
1839
1840 pub fn get_possibly_incomplete(&self, buffer_id: BufferId) -> Option<Entity<Buffer>> {
1841 self.get(buffer_id).or_else(|| {
1842 self.as_remote()
1843 .and_then(|remote| remote.loading_remote_buffers_by_id.get(&buffer_id).cloned())
1844 })
1845 }
1846
1847 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
1848 if let OpenBuffer::Complete { diff_state, .. } = self.opened_buffers.get(&buffer_id)? {
1849 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
1850 } else {
1851 None
1852 }
1853 }
1854
1855 pub fn get_uncommitted_diff(
1856 &self,
1857 buffer_id: BufferId,
1858 cx: &App,
1859 ) -> Option<Entity<BufferDiff>> {
1860 if let OpenBuffer::Complete { diff_state, .. } = self.opened_buffers.get(&buffer_id)? {
1861 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
1862 } else {
1863 None
1864 }
1865 }
1866
1867 pub fn buffer_version_info(&self, cx: &App) -> (Vec<proto::BufferVersion>, Vec<BufferId>) {
1868 let buffers = self
1869 .buffers()
1870 .map(|buffer| {
1871 let buffer = buffer.read(cx);
1872 proto::BufferVersion {
1873 id: buffer.remote_id().into(),
1874 version: language::proto::serialize_version(&buffer.version),
1875 }
1876 })
1877 .collect();
1878 let incomplete_buffer_ids = self
1879 .as_remote()
1880 .map(|remote| remote.incomplete_buffer_ids())
1881 .unwrap_or_default();
1882 (buffers, incomplete_buffer_ids)
1883 }
1884
1885 pub fn disconnected_from_host(&mut self, cx: &mut App) {
1886 for open_buffer in self.opened_buffers.values_mut() {
1887 if let Some(buffer) = open_buffer.upgrade() {
1888 buffer.update(cx, |buffer, _| buffer.give_up_waiting());
1889 }
1890 }
1891
1892 for buffer in self.buffers() {
1893 buffer.update(cx, |buffer, cx| {
1894 buffer.set_capability(Capability::ReadOnly, cx)
1895 });
1896 }
1897
1898 if let Some(remote) = self.as_remote_mut() {
1899 // Wake up all futures currently waiting on a buffer to get opened,
1900 // to give them a chance to fail now that we've disconnected.
1901 remote.remote_buffer_listeners.clear()
1902 }
1903 }
1904
1905 pub fn shared(&mut self, remote_id: u64, downstream_client: AnyProtoClient, _cx: &mut App) {
1906 self.downstream_client = Some((downstream_client, remote_id));
1907 }
1908
1909 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
1910 self.downstream_client.take();
1911 self.forget_shared_buffers();
1912 }
1913
1914 pub fn discard_incomplete(&mut self) {
1915 self.opened_buffers
1916 .retain(|_, buffer| !matches!(buffer, OpenBuffer::Operations(_)));
1917 }
1918
1919 pub fn find_search_candidates(
1920 &mut self,
1921 query: &SearchQuery,
1922 mut limit: usize,
1923 fs: Arc<dyn Fs>,
1924 cx: &mut Context<Self>,
1925 ) -> Receiver<Entity<Buffer>> {
1926 let (tx, rx) = smol::channel::unbounded();
1927 let mut open_buffers = HashSet::default();
1928 let mut unnamed_buffers = Vec::new();
1929 for handle in self.buffers() {
1930 let buffer = handle.read(cx);
1931 if let Some(entry_id) = buffer.entry_id(cx) {
1932 open_buffers.insert(entry_id);
1933 } else {
1934 limit = limit.saturating_sub(1);
1935 unnamed_buffers.push(handle)
1936 };
1937 }
1938
1939 const MAX_CONCURRENT_BUFFER_OPENS: usize = 64;
1940 let project_paths_rx = self
1941 .worktree_store
1942 .update(cx, |worktree_store, cx| {
1943 worktree_store.find_search_candidates(query.clone(), limit, open_buffers, fs, cx)
1944 })
1945 .chunks(MAX_CONCURRENT_BUFFER_OPENS);
1946
1947 cx.spawn(|this, mut cx| async move {
1948 for buffer in unnamed_buffers {
1949 tx.send(buffer).await.ok();
1950 }
1951
1952 let mut project_paths_rx = pin!(project_paths_rx);
1953 while let Some(project_paths) = project_paths_rx.next().await {
1954 let buffers = this.update(&mut cx, |this, cx| {
1955 project_paths
1956 .into_iter()
1957 .map(|project_path| this.open_buffer(project_path, cx))
1958 .collect::<Vec<_>>()
1959 })?;
1960 for buffer_task in buffers {
1961 if let Some(buffer) = buffer_task.await.log_err() {
1962 if tx.send(buffer).await.is_err() {
1963 return anyhow::Ok(());
1964 }
1965 }
1966 }
1967 }
1968 anyhow::Ok(())
1969 })
1970 .detach();
1971 rx
1972 }
1973
1974 pub fn recalculate_buffer_diffs(
1975 &mut self,
1976 buffers: Vec<Entity<Buffer>>,
1977 cx: &mut Context<Self>,
1978 ) -> impl Future<Output = ()> {
1979 let mut futures = Vec::new();
1980 for buffer in buffers {
1981 if let Some(OpenBuffer::Complete { diff_state, .. }) =
1982 self.opened_buffers.get_mut(&buffer.read(cx).remote_id())
1983 {
1984 let buffer = buffer.read(cx).text_snapshot();
1985 futures.push(diff_state.update(cx, |diff_state, cx| {
1986 diff_state.recalculate_diffs(buffer, cx)
1987 }));
1988 }
1989 }
1990 async move {
1991 futures::future::join_all(futures).await;
1992 }
1993 }
1994
1995 fn on_buffer_event(
1996 &mut self,
1997 buffer: Entity<Buffer>,
1998 event: &BufferEvent,
1999 cx: &mut Context<Self>,
2000 ) {
2001 match event {
2002 BufferEvent::FileHandleChanged => {
2003 if let Some(local) = self.as_local_mut() {
2004 local.buffer_changed_file(buffer, cx);
2005 }
2006 }
2007 BufferEvent::Reloaded => {
2008 let Some((downstream_client, project_id)) = self.downstream_client.as_ref() else {
2009 return;
2010 };
2011 let buffer = buffer.read(cx);
2012 downstream_client
2013 .send(proto::BufferReloaded {
2014 project_id: *project_id,
2015 buffer_id: buffer.remote_id().to_proto(),
2016 version: serialize_version(&buffer.version()),
2017 mtime: buffer.saved_mtime().map(|t| t.into()),
2018 line_ending: serialize_line_ending(buffer.line_ending()) as i32,
2019 })
2020 .log_err();
2021 }
2022 BufferEvent::LanguageChanged => {
2023 let buffer_id = buffer.read(cx).remote_id();
2024 if let Some(OpenBuffer::Complete { diff_state, .. }) =
2025 self.opened_buffers.get(&buffer_id)
2026 {
2027 diff_state.update(cx, |diff_state, cx| {
2028 diff_state.buffer_language_changed(buffer, cx);
2029 });
2030 }
2031 }
2032 _ => {}
2033 }
2034 }
2035
2036 pub async fn handle_update_buffer(
2037 this: Entity<Self>,
2038 envelope: TypedEnvelope<proto::UpdateBuffer>,
2039 mut cx: AsyncApp,
2040 ) -> Result<proto::Ack> {
2041 let payload = envelope.payload.clone();
2042 let buffer_id = BufferId::new(payload.buffer_id)?;
2043 let ops = payload
2044 .operations
2045 .into_iter()
2046 .map(language::proto::deserialize_operation)
2047 .collect::<Result<Vec<_>, _>>()?;
2048 this.update(&mut cx, |this, cx| {
2049 match this.opened_buffers.entry(buffer_id) {
2050 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
2051 OpenBuffer::Operations(operations) => operations.extend_from_slice(&ops),
2052 OpenBuffer::Complete { buffer, .. } => {
2053 if let Some(buffer) = buffer.upgrade() {
2054 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx));
2055 }
2056 }
2057 },
2058 hash_map::Entry::Vacant(e) => {
2059 e.insert(OpenBuffer::Operations(ops));
2060 }
2061 }
2062 Ok(proto::Ack {})
2063 })?
2064 }
2065
2066 pub fn register_shared_lsp_handle(
2067 &mut self,
2068 peer_id: proto::PeerId,
2069 buffer_id: BufferId,
2070 handle: OpenLspBufferHandle,
2071 ) {
2072 if let Some(shared_buffers) = self.shared_buffers.get_mut(&peer_id) {
2073 if let Some(buffer) = shared_buffers.get_mut(&buffer_id) {
2074 buffer.lsp_handle = Some(handle);
2075 return;
2076 }
2077 }
2078 debug_panic!("tried to register shared lsp handle, but buffer was not shared")
2079 }
2080
2081 pub fn handle_synchronize_buffers(
2082 &mut self,
2083 envelope: TypedEnvelope<proto::SynchronizeBuffers>,
2084 cx: &mut Context<Self>,
2085 client: Arc<Client>,
2086 ) -> Result<proto::SynchronizeBuffersResponse> {
2087 let project_id = envelope.payload.project_id;
2088 let mut response = proto::SynchronizeBuffersResponse {
2089 buffers: Default::default(),
2090 };
2091 let Some(guest_id) = envelope.original_sender_id else {
2092 anyhow::bail!("missing original_sender_id on SynchronizeBuffers request");
2093 };
2094
2095 self.shared_buffers.entry(guest_id).or_default().clear();
2096 for buffer in envelope.payload.buffers {
2097 let buffer_id = BufferId::new(buffer.id)?;
2098 let remote_version = language::proto::deserialize_version(&buffer.version);
2099 if let Some(buffer) = self.get(buffer_id) {
2100 self.shared_buffers
2101 .entry(guest_id)
2102 .or_default()
2103 .entry(buffer_id)
2104 .or_insert_with(|| SharedBuffer {
2105 buffer: buffer.clone(),
2106 diff: None,
2107 lsp_handle: None,
2108 });
2109
2110 let buffer = buffer.read(cx);
2111 response.buffers.push(proto::BufferVersion {
2112 id: buffer_id.into(),
2113 version: language::proto::serialize_version(&buffer.version),
2114 });
2115
2116 let operations = buffer.serialize_ops(Some(remote_version), cx);
2117 let client = client.clone();
2118 if let Some(file) = buffer.file() {
2119 client
2120 .send(proto::UpdateBufferFile {
2121 project_id,
2122 buffer_id: buffer_id.into(),
2123 file: Some(file.to_proto(cx)),
2124 })
2125 .log_err();
2126 }
2127
2128 // TODO(max): do something
2129 // client
2130 // .send(proto::UpdateStagedText {
2131 // project_id,
2132 // buffer_id: buffer_id.into(),
2133 // diff_base: buffer.diff_base().map(ToString::to_string),
2134 // })
2135 // .log_err();
2136
2137 client
2138 .send(proto::BufferReloaded {
2139 project_id,
2140 buffer_id: buffer_id.into(),
2141 version: language::proto::serialize_version(buffer.saved_version()),
2142 mtime: buffer.saved_mtime().map(|time| time.into()),
2143 line_ending: language::proto::serialize_line_ending(buffer.line_ending())
2144 as i32,
2145 })
2146 .log_err();
2147
2148 cx.background_spawn(
2149 async move {
2150 let operations = operations.await;
2151 for chunk in split_operations(operations) {
2152 client
2153 .request(proto::UpdateBuffer {
2154 project_id,
2155 buffer_id: buffer_id.into(),
2156 operations: chunk,
2157 })
2158 .await?;
2159 }
2160 anyhow::Ok(())
2161 }
2162 .log_err(),
2163 )
2164 .detach();
2165 }
2166 }
2167 Ok(response)
2168 }
2169
2170 pub fn handle_create_buffer_for_peer(
2171 &mut self,
2172 envelope: TypedEnvelope<proto::CreateBufferForPeer>,
2173 replica_id: u16,
2174 capability: Capability,
2175 cx: &mut Context<Self>,
2176 ) -> Result<()> {
2177 let Some(remote) = self.as_remote_mut() else {
2178 return Err(anyhow!("buffer store is not a remote"));
2179 };
2180
2181 if let Some(buffer) =
2182 remote.handle_create_buffer_for_peer(envelope, replica_id, capability, cx)?
2183 {
2184 self.add_buffer(buffer, cx)?;
2185 }
2186
2187 Ok(())
2188 }
2189
2190 pub async fn handle_update_buffer_file(
2191 this: Entity<Self>,
2192 envelope: TypedEnvelope<proto::UpdateBufferFile>,
2193 mut cx: AsyncApp,
2194 ) -> Result<()> {
2195 let buffer_id = envelope.payload.buffer_id;
2196 let buffer_id = BufferId::new(buffer_id)?;
2197
2198 this.update(&mut cx, |this, cx| {
2199 let payload = envelope.payload.clone();
2200 if let Some(buffer) = this.get_possibly_incomplete(buffer_id) {
2201 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
2202 let worktree = this
2203 .worktree_store
2204 .read(cx)
2205 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
2206 .ok_or_else(|| anyhow!("no such worktree"))?;
2207 let file = File::from_proto(file, worktree, cx)?;
2208 let old_file = buffer.update(cx, |buffer, cx| {
2209 let old_file = buffer.file().cloned();
2210 let new_path = file.path.clone();
2211 buffer.file_updated(Arc::new(file), cx);
2212 if old_file
2213 .as_ref()
2214 .map_or(true, |old| *old.path() != new_path)
2215 {
2216 Some(old_file)
2217 } else {
2218 None
2219 }
2220 });
2221 if let Some(old_file) = old_file {
2222 cx.emit(BufferStoreEvent::BufferChangedFilePath { buffer, old_file });
2223 }
2224 }
2225 if let Some((downstream_client, project_id)) = this.downstream_client.as_ref() {
2226 downstream_client
2227 .send(proto::UpdateBufferFile {
2228 project_id: *project_id,
2229 buffer_id: buffer_id.into(),
2230 file: envelope.payload.file,
2231 })
2232 .log_err();
2233 }
2234 Ok(())
2235 })?
2236 }
2237
2238 pub async fn handle_save_buffer(
2239 this: Entity<Self>,
2240 envelope: TypedEnvelope<proto::SaveBuffer>,
2241 mut cx: AsyncApp,
2242 ) -> Result<proto::BufferSaved> {
2243 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2244 let (buffer, project_id) = this.update(&mut cx, |this, _| {
2245 anyhow::Ok((
2246 this.get_existing(buffer_id)?,
2247 this.downstream_client
2248 .as_ref()
2249 .map(|(_, project_id)| *project_id)
2250 .context("project is not shared")?,
2251 ))
2252 })??;
2253 buffer
2254 .update(&mut cx, |buffer, _| {
2255 buffer.wait_for_version(deserialize_version(&envelope.payload.version))
2256 })?
2257 .await?;
2258 let buffer_id = buffer.update(&mut cx, |buffer, _| buffer.remote_id())?;
2259
2260 if let Some(new_path) = envelope.payload.new_path {
2261 let new_path = ProjectPath::from_proto(new_path);
2262 this.update(&mut cx, |this, cx| {
2263 this.save_buffer_as(buffer.clone(), new_path, cx)
2264 })?
2265 .await?;
2266 } else {
2267 this.update(&mut cx, |this, cx| this.save_buffer(buffer.clone(), cx))?
2268 .await?;
2269 }
2270
2271 buffer.update(&mut cx, |buffer, _| proto::BufferSaved {
2272 project_id,
2273 buffer_id: buffer_id.into(),
2274 version: serialize_version(buffer.saved_version()),
2275 mtime: buffer.saved_mtime().map(|time| time.into()),
2276 })
2277 }
2278
2279 pub async fn handle_close_buffer(
2280 this: Entity<Self>,
2281 envelope: TypedEnvelope<proto::CloseBuffer>,
2282 mut cx: AsyncApp,
2283 ) -> Result<()> {
2284 let peer_id = envelope.sender_id;
2285 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2286 this.update(&mut cx, |this, _| {
2287 if let Some(shared) = this.shared_buffers.get_mut(&peer_id) {
2288 if shared.remove(&buffer_id).is_some() {
2289 if shared.is_empty() {
2290 this.shared_buffers.remove(&peer_id);
2291 }
2292 return;
2293 }
2294 }
2295 debug_panic!(
2296 "peer_id {} closed buffer_id {} which was either not open or already closed",
2297 peer_id,
2298 buffer_id
2299 )
2300 })
2301 }
2302
2303 pub async fn handle_buffer_saved(
2304 this: Entity<Self>,
2305 envelope: TypedEnvelope<proto::BufferSaved>,
2306 mut cx: AsyncApp,
2307 ) -> Result<()> {
2308 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2309 let version = deserialize_version(&envelope.payload.version);
2310 let mtime = envelope.payload.mtime.clone().map(|time| time.into());
2311 this.update(&mut cx, move |this, cx| {
2312 if let Some(buffer) = this.get_possibly_incomplete(buffer_id) {
2313 buffer.update(cx, |buffer, cx| {
2314 buffer.did_save(version, mtime, cx);
2315 });
2316 }
2317
2318 if let Some((downstream_client, project_id)) = this.downstream_client.as_ref() {
2319 downstream_client
2320 .send(proto::BufferSaved {
2321 project_id: *project_id,
2322 buffer_id: buffer_id.into(),
2323 mtime: envelope.payload.mtime,
2324 version: envelope.payload.version,
2325 })
2326 .log_err();
2327 }
2328 })
2329 }
2330
2331 pub async fn handle_buffer_reloaded(
2332 this: Entity<Self>,
2333 envelope: TypedEnvelope<proto::BufferReloaded>,
2334 mut cx: AsyncApp,
2335 ) -> Result<()> {
2336 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2337 let version = deserialize_version(&envelope.payload.version);
2338 let mtime = envelope.payload.mtime.clone().map(|time| time.into());
2339 let line_ending = deserialize_line_ending(
2340 proto::LineEnding::from_i32(envelope.payload.line_ending)
2341 .ok_or_else(|| anyhow!("missing line ending"))?,
2342 );
2343 this.update(&mut cx, |this, cx| {
2344 if let Some(buffer) = this.get_possibly_incomplete(buffer_id) {
2345 buffer.update(cx, |buffer, cx| {
2346 buffer.did_reload(version, line_ending, mtime, cx);
2347 });
2348 }
2349
2350 if let Some((downstream_client, project_id)) = this.downstream_client.as_ref() {
2351 downstream_client
2352 .send(proto::BufferReloaded {
2353 project_id: *project_id,
2354 buffer_id: buffer_id.into(),
2355 mtime: envelope.payload.mtime,
2356 version: envelope.payload.version,
2357 line_ending: envelope.payload.line_ending,
2358 })
2359 .log_err();
2360 }
2361 })
2362 }
2363
2364 pub async fn handle_blame_buffer(
2365 this: Entity<Self>,
2366 envelope: TypedEnvelope<proto::BlameBuffer>,
2367 mut cx: AsyncApp,
2368 ) -> Result<proto::BlameBufferResponse> {
2369 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2370 let version = deserialize_version(&envelope.payload.version);
2371 let buffer = this.read_with(&cx, |this, _| this.get_existing(buffer_id))??;
2372 buffer
2373 .update(&mut cx, |buffer, _| {
2374 buffer.wait_for_version(version.clone())
2375 })?
2376 .await?;
2377 let blame = this
2378 .update(&mut cx, |this, cx| {
2379 this.blame_buffer(&buffer, Some(version), cx)
2380 })?
2381 .await?;
2382 Ok(serialize_blame_buffer_response(blame))
2383 }
2384
2385 pub async fn handle_get_permalink_to_line(
2386 this: Entity<Self>,
2387 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2388 mut cx: AsyncApp,
2389 ) -> Result<proto::GetPermalinkToLineResponse> {
2390 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2391 // let version = deserialize_version(&envelope.payload.version);
2392 let selection = {
2393 let proto_selection = envelope
2394 .payload
2395 .selection
2396 .context("no selection to get permalink for defined")?;
2397 proto_selection.start as u32..proto_selection.end as u32
2398 };
2399 let buffer = this.read_with(&cx, |this, _| this.get_existing(buffer_id))??;
2400 let permalink = this
2401 .update(&mut cx, |this, cx| {
2402 this.get_permalink_to_line(&buffer, selection, cx)
2403 })?
2404 .await?;
2405 Ok(proto::GetPermalinkToLineResponse {
2406 permalink: permalink.to_string(),
2407 })
2408 }
2409
2410 pub async fn handle_open_unstaged_diff(
2411 this: Entity<Self>,
2412 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2413 mut cx: AsyncApp,
2414 ) -> Result<proto::OpenUnstagedDiffResponse> {
2415 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2416 let diff = this
2417 .update(&mut cx, |this, cx| {
2418 let buffer = this.get(buffer_id)?;
2419 Some(this.open_unstaged_diff(buffer, cx))
2420 })?
2421 .ok_or_else(|| anyhow!("no such buffer"))?
2422 .await?;
2423 this.update(&mut cx, |this, _| {
2424 let shared_buffers = this
2425 .shared_buffers
2426 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2427 .or_default();
2428 debug_assert!(shared_buffers.contains_key(&buffer_id));
2429 if let Some(shared) = shared_buffers.get_mut(&buffer_id) {
2430 shared.diff = Some(diff.clone());
2431 }
2432 })?;
2433 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
2434 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2435 }
2436
2437 pub async fn handle_open_uncommitted_diff(
2438 this: Entity<Self>,
2439 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2440 mut cx: AsyncApp,
2441 ) -> Result<proto::OpenUncommittedDiffResponse> {
2442 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2443 let diff = this
2444 .update(&mut cx, |this, cx| {
2445 let buffer = this.get(buffer_id)?;
2446 Some(this.open_uncommitted_diff(buffer, cx))
2447 })?
2448 .ok_or_else(|| anyhow!("no such buffer"))?
2449 .await?;
2450 this.update(&mut cx, |this, _| {
2451 let shared_buffers = this
2452 .shared_buffers
2453 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2454 .or_default();
2455 debug_assert!(shared_buffers.contains_key(&buffer_id));
2456 if let Some(shared) = shared_buffers.get_mut(&buffer_id) {
2457 shared.diff = Some(diff.clone());
2458 }
2459 })?;
2460 diff.read_with(&cx, |diff, cx| {
2461 use proto::open_uncommitted_diff_response::Mode;
2462
2463 let unstaged_diff = diff.secondary_diff();
2464 let index_snapshot = unstaged_diff.and_then(|diff| {
2465 let diff = diff.read(cx);
2466 diff.base_text_exists().then(|| diff.base_text())
2467 });
2468
2469 let mode;
2470 let staged_text;
2471 let committed_text;
2472 if diff.base_text_exists() {
2473 let committed_snapshot = diff.base_text();
2474 committed_text = Some(committed_snapshot.text());
2475 if let Some(index_text) = index_snapshot {
2476 if index_text.remote_id() == committed_snapshot.remote_id() {
2477 mode = Mode::IndexMatchesHead;
2478 staged_text = None;
2479 } else {
2480 mode = Mode::IndexAndHead;
2481 staged_text = Some(index_text.text());
2482 }
2483 } else {
2484 mode = Mode::IndexAndHead;
2485 staged_text = None;
2486 }
2487 } else {
2488 mode = Mode::IndexAndHead;
2489 committed_text = None;
2490 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2491 }
2492
2493 proto::OpenUncommittedDiffResponse {
2494 committed_text,
2495 staged_text,
2496 mode: mode.into(),
2497 }
2498 })
2499 }
2500
2501 pub async fn handle_update_diff_bases(
2502 this: Entity<Self>,
2503 request: TypedEnvelope<proto::UpdateDiffBases>,
2504 mut cx: AsyncApp,
2505 ) -> Result<()> {
2506 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2507 this.update(&mut cx, |this, cx| {
2508 if let Some(OpenBuffer::Complete { diff_state, buffer }) =
2509 this.opened_buffers.get_mut(&buffer_id)
2510 {
2511 if let Some(buffer) = buffer.upgrade() {
2512 let buffer = buffer.read(cx).text_snapshot();
2513 diff_state.update(cx, |diff_state, cx| {
2514 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2515 })
2516 }
2517 }
2518 })
2519 }
2520
2521 pub fn reload_buffers(
2522 &self,
2523 buffers: HashSet<Entity<Buffer>>,
2524 push_to_history: bool,
2525 cx: &mut Context<Self>,
2526 ) -> Task<Result<ProjectTransaction>> {
2527 if buffers.is_empty() {
2528 return Task::ready(Ok(ProjectTransaction::default()));
2529 }
2530 match &self.state {
2531 BufferStoreState::Local(this) => this.reload_buffers(buffers, push_to_history, cx),
2532 BufferStoreState::Remote(this) => this.reload_buffers(buffers, push_to_history, cx),
2533 }
2534 }
2535
2536 async fn handle_reload_buffers(
2537 this: Entity<Self>,
2538 envelope: TypedEnvelope<proto::ReloadBuffers>,
2539 mut cx: AsyncApp,
2540 ) -> Result<proto::ReloadBuffersResponse> {
2541 let sender_id = envelope.original_sender_id().unwrap_or_default();
2542 let reload = this.update(&mut cx, |this, cx| {
2543 let mut buffers = HashSet::default();
2544 for buffer_id in &envelope.payload.buffer_ids {
2545 let buffer_id = BufferId::new(*buffer_id)?;
2546 buffers.insert(this.get_existing(buffer_id)?);
2547 }
2548 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
2549 })??;
2550
2551 let project_transaction = reload.await?;
2552 let project_transaction = this.update(&mut cx, |this, cx| {
2553 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2554 })?;
2555 Ok(proto::ReloadBuffersResponse {
2556 transaction: Some(project_transaction),
2557 })
2558 }
2559
2560 pub fn create_buffer_for_peer(
2561 &mut self,
2562 buffer: &Entity<Buffer>,
2563 peer_id: proto::PeerId,
2564 cx: &mut Context<Self>,
2565 ) -> Task<Result<()>> {
2566 let buffer_id = buffer.read(cx).remote_id();
2567 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
2568 if shared_buffers.contains_key(&buffer_id) {
2569 return Task::ready(Ok(()));
2570 }
2571 shared_buffers.insert(
2572 buffer_id,
2573 SharedBuffer {
2574 buffer: buffer.clone(),
2575 diff: None,
2576 lsp_handle: None,
2577 },
2578 );
2579
2580 let Some((client, project_id)) = self.downstream_client.clone() else {
2581 return Task::ready(Ok(()));
2582 };
2583
2584 cx.spawn(|this, mut cx| async move {
2585 let Some(buffer) = this.update(&mut cx, |this, _| this.get(buffer_id))? else {
2586 return anyhow::Ok(());
2587 };
2588
2589 let operations = buffer.update(&mut cx, |b, cx| b.serialize_ops(None, cx))?;
2590 let operations = operations.await;
2591 let state = buffer.update(&mut cx, |buffer, cx| buffer.to_proto(cx))?;
2592
2593 let initial_state = proto::CreateBufferForPeer {
2594 project_id,
2595 peer_id: Some(peer_id),
2596 variant: Some(proto::create_buffer_for_peer::Variant::State(state)),
2597 };
2598
2599 if client.send(initial_state).log_err().is_some() {
2600 let client = client.clone();
2601 cx.background_spawn(async move {
2602 let mut chunks = split_operations(operations).peekable();
2603 while let Some(chunk) = chunks.next() {
2604 let is_last = chunks.peek().is_none();
2605 client.send(proto::CreateBufferForPeer {
2606 project_id,
2607 peer_id: Some(peer_id),
2608 variant: Some(proto::create_buffer_for_peer::Variant::Chunk(
2609 proto::BufferChunk {
2610 buffer_id: buffer_id.into(),
2611 operations: chunk,
2612 is_last,
2613 },
2614 )),
2615 })?;
2616 }
2617 anyhow::Ok(())
2618 })
2619 .await
2620 .log_err();
2621 }
2622 Ok(())
2623 })
2624 }
2625
2626 pub fn forget_shared_buffers(&mut self) {
2627 self.shared_buffers.clear();
2628 }
2629
2630 pub fn forget_shared_buffers_for(&mut self, peer_id: &proto::PeerId) {
2631 self.shared_buffers.remove(peer_id);
2632 }
2633
2634 pub fn update_peer_id(&mut self, old_peer_id: &proto::PeerId, new_peer_id: proto::PeerId) {
2635 if let Some(buffers) = self.shared_buffers.remove(old_peer_id) {
2636 self.shared_buffers.insert(new_peer_id, buffers);
2637 }
2638 }
2639
2640 pub fn has_shared_buffers(&self) -> bool {
2641 !self.shared_buffers.is_empty()
2642 }
2643
2644 pub fn create_local_buffer(
2645 &mut self,
2646 text: &str,
2647 language: Option<Arc<Language>>,
2648 cx: &mut Context<Self>,
2649 ) -> Entity<Buffer> {
2650 let buffer = cx.new(|cx| {
2651 Buffer::local(text, cx)
2652 .with_language(language.unwrap_or_else(|| language::PLAIN_TEXT.clone()), cx)
2653 });
2654
2655 self.add_buffer(buffer.clone(), cx).log_err();
2656 let buffer_id = buffer.read(cx).remote_id();
2657
2658 let this = self
2659 .as_local_mut()
2660 .expect("local-only method called in a non-local context");
2661 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
2662 this.local_buffer_ids_by_path.insert(
2663 ProjectPath {
2664 worktree_id: file.worktree_id(cx),
2665 path: file.path.clone(),
2666 },
2667 buffer_id,
2668 );
2669
2670 if let Some(entry_id) = file.entry_id {
2671 this.local_buffer_ids_by_entry_id
2672 .insert(entry_id, buffer_id);
2673 }
2674 }
2675 buffer
2676 }
2677
2678 pub fn deserialize_project_transaction(
2679 &mut self,
2680 message: proto::ProjectTransaction,
2681 push_to_history: bool,
2682 cx: &mut Context<Self>,
2683 ) -> Task<Result<ProjectTransaction>> {
2684 if let Some(this) = self.as_remote_mut() {
2685 this.deserialize_project_transaction(message, push_to_history, cx)
2686 } else {
2687 debug_panic!("not a remote buffer store");
2688 Task::ready(Err(anyhow!("not a remote buffer store")))
2689 }
2690 }
2691
2692 pub fn wait_for_remote_buffer(
2693 &mut self,
2694 id: BufferId,
2695 cx: &mut Context<BufferStore>,
2696 ) -> Task<Result<Entity<Buffer>>> {
2697 if let Some(this) = self.as_remote_mut() {
2698 this.wait_for_remote_buffer(id, cx)
2699 } else {
2700 debug_panic!("not a remote buffer store");
2701 Task::ready(Err(anyhow!("not a remote buffer store")))
2702 }
2703 }
2704
2705 pub fn serialize_project_transaction_for_peer(
2706 &mut self,
2707 project_transaction: ProjectTransaction,
2708 peer_id: proto::PeerId,
2709 cx: &mut Context<Self>,
2710 ) -> proto::ProjectTransaction {
2711 let mut serialized_transaction = proto::ProjectTransaction {
2712 buffer_ids: Default::default(),
2713 transactions: Default::default(),
2714 };
2715 for (buffer, transaction) in project_transaction.0 {
2716 self.create_buffer_for_peer(&buffer, peer_id, cx)
2717 .detach_and_log_err(cx);
2718 serialized_transaction
2719 .buffer_ids
2720 .push(buffer.read(cx).remote_id().into());
2721 serialized_transaction
2722 .transactions
2723 .push(language::proto::serialize_transaction(&transaction));
2724 }
2725 serialized_transaction
2726 }
2727}
2728
2729impl OpenBuffer {
2730 fn upgrade(&self) -> Option<Entity<Buffer>> {
2731 match self {
2732 OpenBuffer::Complete { buffer, .. } => buffer.upgrade(),
2733 OpenBuffer::Operations(_) => None,
2734 }
2735 }
2736}
2737
2738fn is_not_found_error(error: &anyhow::Error) -> bool {
2739 error
2740 .root_cause()
2741 .downcast_ref::<io::Error>()
2742 .is_some_and(|err| err.kind() == io::ErrorKind::NotFound)
2743}
2744
2745fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
2746 let Some(blame) = blame else {
2747 return proto::BlameBufferResponse {
2748 blame_response: None,
2749 };
2750 };
2751
2752 let entries = blame
2753 .entries
2754 .into_iter()
2755 .map(|entry| proto::BlameEntry {
2756 sha: entry.sha.as_bytes().into(),
2757 start_line: entry.range.start,
2758 end_line: entry.range.end,
2759 original_line_number: entry.original_line_number,
2760 author: entry.author.clone(),
2761 author_mail: entry.author_mail.clone(),
2762 author_time: entry.author_time,
2763 author_tz: entry.author_tz.clone(),
2764 committer: entry.committer_name.clone(),
2765 committer_mail: entry.committer_email.clone(),
2766 committer_time: entry.committer_time,
2767 committer_tz: entry.committer_tz.clone(),
2768 summary: entry.summary.clone(),
2769 previous: entry.previous.clone(),
2770 filename: entry.filename.clone(),
2771 })
2772 .collect::<Vec<_>>();
2773
2774 let messages = blame
2775 .messages
2776 .into_iter()
2777 .map(|(oid, message)| proto::CommitMessage {
2778 oid: oid.as_bytes().into(),
2779 message,
2780 })
2781 .collect::<Vec<_>>();
2782
2783 proto::BlameBufferResponse {
2784 blame_response: Some(proto::blame_buffer_response::BlameResponse {
2785 entries,
2786 messages,
2787 remote_url: blame.remote_url,
2788 }),
2789 }
2790}
2791
2792fn deserialize_blame_buffer_response(
2793 response: proto::BlameBufferResponse,
2794) -> Option<git::blame::Blame> {
2795 let response = response.blame_response?;
2796 let entries = response
2797 .entries
2798 .into_iter()
2799 .filter_map(|entry| {
2800 Some(git::blame::BlameEntry {
2801 sha: git::Oid::from_bytes(&entry.sha).ok()?,
2802 range: entry.start_line..entry.end_line,
2803 original_line_number: entry.original_line_number,
2804 committer_name: entry.committer,
2805 committer_time: entry.committer_time,
2806 committer_tz: entry.committer_tz,
2807 committer_email: entry.committer_mail,
2808 author: entry.author,
2809 author_mail: entry.author_mail,
2810 author_time: entry.author_time,
2811 author_tz: entry.author_tz,
2812 summary: entry.summary,
2813 previous: entry.previous,
2814 filename: entry.filename,
2815 })
2816 })
2817 .collect::<Vec<_>>();
2818
2819 let messages = response
2820 .messages
2821 .into_iter()
2822 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
2823 .collect::<HashMap<_, _>>();
2824
2825 Some(Blame {
2826 entries,
2827 messages,
2828 remote_url: response.remote_url,
2829 })
2830}
2831
2832fn get_permalink_in_rust_registry_src(
2833 provider_registry: Arc<GitHostingProviderRegistry>,
2834 path: PathBuf,
2835 selection: Range<u32>,
2836) -> Result<url::Url> {
2837 #[derive(Deserialize)]
2838 struct CargoVcsGit {
2839 sha1: String,
2840 }
2841
2842 #[derive(Deserialize)]
2843 struct CargoVcsInfo {
2844 git: CargoVcsGit,
2845 path_in_vcs: String,
2846 }
2847
2848 #[derive(Deserialize)]
2849 struct CargoPackage {
2850 repository: String,
2851 }
2852
2853 #[derive(Deserialize)]
2854 struct CargoToml {
2855 package: CargoPackage,
2856 }
2857
2858 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
2859 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
2860 Some((dir, json))
2861 }) else {
2862 bail!("No .cargo_vcs_info.json found in parent directories")
2863 };
2864 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
2865 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
2866 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
2867 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
2868 .ok_or_else(|| anyhow!("Failed to parse package.repository field of manifest"))?;
2869 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
2870 let permalink = provider.build_permalink(
2871 remote,
2872 BuildPermalinkParams {
2873 sha: &cargo_vcs_info.git.sha1,
2874 path: &path.to_string_lossy(),
2875 selection: Some(selection),
2876 },
2877 );
2878 Ok(permalink)
2879}