1use crate::{
2 lsp_store::OpenLspBufferHandle,
3 search::SearchQuery,
4 worktree_store::{WorktreeStore, WorktreeStoreEvent},
5 ProjectItem as _, ProjectPath,
6};
7use ::git::{parse_git_remote_url, BuildPermalinkParams, GitHostingProviderRegistry};
8use anyhow::{anyhow, bail, Context as _, Result};
9use buffer_diff::BufferDiff;
10use client::Client;
11use collections::{hash_map, HashMap, HashSet};
12use fs::Fs;
13use futures::{channel::oneshot, future::Shared, Future, FutureExt as _, StreamExt};
14use git::{blame::Blame, repository::RepoPath};
15use gpui::{
16 App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Subscription, Task, WeakEntity,
17};
18use language::{
19 proto::{
20 deserialize_line_ending, deserialize_version, serialize_line_ending, serialize_version,
21 split_operations,
22 },
23 Buffer, BufferEvent, Capability, DiskState, File as _, Language, LanguageRegistry, Operation,
24};
25use rpc::{
26 proto::{self, ToProto},
27 AnyProtoClient, ErrorExt as _, TypedEnvelope,
28};
29use serde::Deserialize;
30use smol::channel::Receiver;
31use std::{
32 io,
33 ops::Range,
34 path::{Path, PathBuf},
35 pin::pin,
36 sync::Arc,
37 time::Instant,
38};
39use text::BufferId;
40use util::{debug_panic, maybe, ResultExt as _, TryFutureExt};
41use worktree::{File, PathChange, ProjectEntryId, UpdatedGitRepositoriesSet, Worktree, WorktreeId};
42
43#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
44enum DiffKind {
45 Unstaged,
46 Uncommitted,
47}
48
49/// A set of open buffers.
50pub struct BufferStore {
51 state: BufferStoreState,
52 #[allow(clippy::type_complexity)]
53 loading_buffers: HashMap<ProjectPath, Shared<Task<Result<Entity<Buffer>, Arc<anyhow::Error>>>>>,
54 #[allow(clippy::type_complexity)]
55 loading_diffs:
56 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
57 worktree_store: Entity<WorktreeStore>,
58 opened_buffers: HashMap<BufferId, OpenBuffer>,
59 downstream_client: Option<(AnyProtoClient, u64)>,
60 shared_buffers: HashMap<proto::PeerId, HashMap<BufferId, SharedBuffer>>,
61}
62
63#[derive(Hash, Eq, PartialEq, Clone)]
64struct SharedBuffer {
65 buffer: Entity<Buffer>,
66 diff: Option<Entity<BufferDiff>>,
67 lsp_handle: Option<OpenLspBufferHandle>,
68}
69
70#[derive(Default)]
71struct BufferDiffState {
72 unstaged_diff: Option<WeakEntity<BufferDiff>>,
73 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
74 recalculate_diff_task: Option<Task<Result<()>>>,
75 language: Option<Arc<Language>>,
76 language_registry: Option<Arc<LanguageRegistry>>,
77 diff_updated_futures: Vec<oneshot::Sender<()>>,
78
79 head_text: Option<Arc<String>>,
80 index_text: Option<Arc<String>>,
81 head_changed: bool,
82 index_changed: bool,
83 language_changed: bool,
84}
85
86#[derive(Clone, Debug)]
87enum DiffBasesChange {
88 SetIndex(Option<String>),
89 SetHead(Option<String>),
90 SetEach {
91 index: Option<String>,
92 head: Option<String>,
93 },
94 SetBoth(Option<String>),
95}
96
97impl BufferDiffState {
98 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
99 self.language = buffer.read(cx).language().cloned();
100 self.language_changed = true;
101 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
102 }
103
104 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
105 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
106 }
107
108 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
109 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
110 }
111
112 fn handle_base_texts_updated(
113 &mut self,
114 buffer: text::BufferSnapshot,
115 message: proto::UpdateDiffBases,
116 cx: &mut Context<Self>,
117 ) {
118 use proto::update_diff_bases::Mode;
119
120 let Some(mode) = Mode::from_i32(message.mode) else {
121 return;
122 };
123
124 let diff_bases_change = match mode {
125 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
126 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
127 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
128 Mode::IndexAndHead => DiffBasesChange::SetEach {
129 index: message.staged_text,
130 head: message.committed_text,
131 },
132 };
133
134 let _ = self.diff_bases_changed(buffer, diff_bases_change, cx);
135 }
136
137 pub fn wait_for_recalculation(&mut self) -> Option<oneshot::Receiver<()>> {
138 if self.diff_updated_futures.is_empty() {
139 return None;
140 }
141 let (tx, rx) = oneshot::channel();
142 self.diff_updated_futures.push(tx);
143 Some(rx)
144 }
145
146 fn diff_bases_changed(
147 &mut self,
148 buffer: text::BufferSnapshot,
149 diff_bases_change: DiffBasesChange,
150 cx: &mut Context<Self>,
151 ) -> oneshot::Receiver<()> {
152 match diff_bases_change {
153 DiffBasesChange::SetIndex(index) => {
154 self.index_text = index.map(|mut index| {
155 text::LineEnding::normalize(&mut index);
156 Arc::new(index)
157 });
158 self.index_changed = true;
159 }
160 DiffBasesChange::SetHead(head) => {
161 self.head_text = head.map(|mut head| {
162 text::LineEnding::normalize(&mut head);
163 Arc::new(head)
164 });
165 self.head_changed = true;
166 }
167 DiffBasesChange::SetBoth(text) => {
168 let text = text.map(|mut text| {
169 text::LineEnding::normalize(&mut text);
170 Arc::new(text)
171 });
172 self.head_text = text.clone();
173 self.index_text = text;
174 self.head_changed = true;
175 self.index_changed = true;
176 }
177 DiffBasesChange::SetEach { index, head } => {
178 self.index_text = index.map(|mut index| {
179 text::LineEnding::normalize(&mut index);
180 Arc::new(index)
181 });
182 self.index_changed = true;
183 self.head_text = head.map(|mut head| {
184 text::LineEnding::normalize(&mut head);
185 Arc::new(head)
186 });
187 self.head_changed = true;
188 }
189 }
190
191 self.recalculate_diffs(buffer, cx)
192 }
193
194 fn recalculate_diffs(
195 &mut self,
196 buffer: text::BufferSnapshot,
197 cx: &mut Context<Self>,
198 ) -> oneshot::Receiver<()> {
199 log::debug!("recalculate diffs");
200 let (tx, rx) = oneshot::channel();
201 self.diff_updated_futures.push(tx);
202
203 let language = self.language.clone();
204 let language_registry = self.language_registry.clone();
205 let unstaged_diff = self.unstaged_diff();
206 let uncommitted_diff = self.uncommitted_diff();
207 let head = self.head_text.clone();
208 let index = self.index_text.clone();
209 let index_changed = self.index_changed;
210 let head_changed = self.head_changed;
211 let language_changed = self.language_changed;
212 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
213 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
214 (None, None) => true,
215 _ => false,
216 };
217 self.recalculate_diff_task = Some(cx.spawn(|this, mut cx| async move {
218 let mut new_unstaged_diff = None;
219 if let Some(unstaged_diff) = &unstaged_diff {
220 new_unstaged_diff = Some(
221 BufferDiff::update_diff(
222 unstaged_diff.clone(),
223 buffer.clone(),
224 index,
225 index_changed,
226 language_changed,
227 language.clone(),
228 language_registry.clone(),
229 &mut cx,
230 )
231 .await?,
232 );
233 }
234
235 let mut new_uncommitted_diff = None;
236 if let Some(uncommitted_diff) = &uncommitted_diff {
237 new_uncommitted_diff = if index_matches_head {
238 new_unstaged_diff.clone()
239 } else {
240 Some(
241 BufferDiff::update_diff(
242 uncommitted_diff.clone(),
243 buffer.clone(),
244 head,
245 head_changed,
246 language_changed,
247 language.clone(),
248 language_registry.clone(),
249 &mut cx,
250 )
251 .await?,
252 )
253 }
254 }
255
256 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
257 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
258 {
259 unstaged_diff.update(&mut cx, |diff, cx| {
260 diff.set_snapshot(&buffer, new_unstaged_diff, language_changed, None, cx)
261 })?
262 } else {
263 None
264 };
265
266 if let Some((uncommitted_diff, new_uncommitted_diff)) =
267 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
268 {
269 uncommitted_diff.update(&mut cx, |uncommitted_diff, cx| {
270 uncommitted_diff.set_snapshot(
271 &buffer,
272 new_uncommitted_diff,
273 language_changed,
274 unstaged_changed_range,
275 cx,
276 );
277 })?;
278 }
279
280 if let Some(this) = this.upgrade() {
281 this.update(&mut cx, |this, _| {
282 this.index_changed = false;
283 this.head_changed = false;
284 this.language_changed = false;
285 for tx in this.diff_updated_futures.drain(..) {
286 tx.send(()).ok();
287 }
288 })?;
289 }
290
291 Ok(())
292 }));
293
294 rx
295 }
296}
297
298enum BufferStoreState {
299 Local(LocalBufferStore),
300 Remote(RemoteBufferStore),
301}
302
303struct RemoteBufferStore {
304 shared_with_me: HashSet<Entity<Buffer>>,
305 upstream_client: AnyProtoClient,
306 project_id: u64,
307 loading_remote_buffers_by_id: HashMap<BufferId, Entity<Buffer>>,
308 remote_buffer_listeners:
309 HashMap<BufferId, Vec<oneshot::Sender<Result<Entity<Buffer>, anyhow::Error>>>>,
310 worktree_store: Entity<WorktreeStore>,
311}
312
313struct LocalBufferStore {
314 local_buffer_ids_by_path: HashMap<ProjectPath, BufferId>,
315 local_buffer_ids_by_entry_id: HashMap<ProjectEntryId, BufferId>,
316 worktree_store: Entity<WorktreeStore>,
317 _subscription: Subscription,
318}
319
320enum OpenBuffer {
321 Complete {
322 buffer: WeakEntity<Buffer>,
323 diff_state: Entity<BufferDiffState>,
324 },
325 Operations(Vec<Operation>),
326}
327
328pub enum BufferStoreEvent {
329 BufferAdded(Entity<Buffer>),
330 BufferDiffAdded(Entity<BufferDiff>),
331 BufferDropped(BufferId),
332 BufferChangedFilePath {
333 buffer: Entity<Buffer>,
334 old_file: Option<Arc<dyn language::File>>,
335 },
336}
337
338#[derive(Default, Debug)]
339pub struct ProjectTransaction(pub HashMap<Entity<Buffer>, language::Transaction>);
340
341impl EventEmitter<BufferStoreEvent> for BufferStore {}
342
343impl RemoteBufferStore {
344 fn open_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Task<Result<Option<String>>> {
345 let project_id = self.project_id;
346 let client = self.upstream_client.clone();
347 cx.background_spawn(async move {
348 let response = client
349 .request(proto::OpenUnstagedDiff {
350 project_id,
351 buffer_id: buffer_id.to_proto(),
352 })
353 .await?;
354 Ok(response.staged_text)
355 })
356 }
357
358 fn open_uncommitted_diff(
359 &self,
360 buffer_id: BufferId,
361 cx: &App,
362 ) -> Task<Result<DiffBasesChange>> {
363 use proto::open_uncommitted_diff_response::Mode;
364
365 let project_id = self.project_id;
366 let client = self.upstream_client.clone();
367 cx.background_spawn(async move {
368 let response = client
369 .request(proto::OpenUncommittedDiff {
370 project_id,
371 buffer_id: buffer_id.to_proto(),
372 })
373 .await?;
374 let mode = Mode::from_i32(response.mode).ok_or_else(|| anyhow!("Invalid mode"))?;
375 let bases = match mode {
376 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
377 Mode::IndexAndHead => DiffBasesChange::SetEach {
378 head: response.committed_text,
379 index: response.staged_text,
380 },
381 };
382 Ok(bases)
383 })
384 }
385
386 pub fn wait_for_remote_buffer(
387 &mut self,
388 id: BufferId,
389 cx: &mut Context<BufferStore>,
390 ) -> Task<Result<Entity<Buffer>>> {
391 let (tx, rx) = oneshot::channel();
392 self.remote_buffer_listeners.entry(id).or_default().push(tx);
393
394 cx.spawn(|this, cx| async move {
395 if let Some(buffer) = this
396 .read_with(&cx, |buffer_store, _| buffer_store.get(id))
397 .ok()
398 .flatten()
399 {
400 return Ok(buffer);
401 }
402
403 cx.background_spawn(async move { rx.await? }).await
404 })
405 }
406
407 fn save_remote_buffer(
408 &self,
409 buffer_handle: Entity<Buffer>,
410 new_path: Option<proto::ProjectPath>,
411 cx: &Context<BufferStore>,
412 ) -> Task<Result<()>> {
413 let buffer = buffer_handle.read(cx);
414 let buffer_id = buffer.remote_id().into();
415 let version = buffer.version();
416 let rpc = self.upstream_client.clone();
417 let project_id = self.project_id;
418 cx.spawn(move |_, mut cx| async move {
419 let response = rpc
420 .request(proto::SaveBuffer {
421 project_id,
422 buffer_id,
423 new_path,
424 version: serialize_version(&version),
425 })
426 .await?;
427 let version = deserialize_version(&response.version);
428 let mtime = response.mtime.map(|mtime| mtime.into());
429
430 buffer_handle.update(&mut cx, |buffer, cx| {
431 buffer.did_save(version.clone(), mtime, cx);
432 })?;
433
434 Ok(())
435 })
436 }
437
438 pub fn handle_create_buffer_for_peer(
439 &mut self,
440 envelope: TypedEnvelope<proto::CreateBufferForPeer>,
441 replica_id: u16,
442 capability: Capability,
443 cx: &mut Context<BufferStore>,
444 ) -> Result<Option<Entity<Buffer>>> {
445 match envelope
446 .payload
447 .variant
448 .ok_or_else(|| anyhow!("missing variant"))?
449 {
450 proto::create_buffer_for_peer::Variant::State(mut state) => {
451 let buffer_id = BufferId::new(state.id)?;
452
453 let buffer_result = maybe!({
454 let mut buffer_file = None;
455 if let Some(file) = state.file.take() {
456 let worktree_id = worktree::WorktreeId::from_proto(file.worktree_id);
457 let worktree = self
458 .worktree_store
459 .read(cx)
460 .worktree_for_id(worktree_id, cx)
461 .ok_or_else(|| {
462 anyhow!("no worktree found for id {}", file.worktree_id)
463 })?;
464 buffer_file = Some(Arc::new(File::from_proto(file, worktree.clone(), cx)?)
465 as Arc<dyn language::File>);
466 }
467 Buffer::from_proto(replica_id, capability, state, buffer_file)
468 });
469
470 match buffer_result {
471 Ok(buffer) => {
472 let buffer = cx.new(|_| buffer);
473 self.loading_remote_buffers_by_id.insert(buffer_id, buffer);
474 }
475 Err(error) => {
476 if let Some(listeners) = self.remote_buffer_listeners.remove(&buffer_id) {
477 for listener in listeners {
478 listener.send(Err(anyhow!(error.cloned()))).ok();
479 }
480 }
481 }
482 }
483 }
484 proto::create_buffer_for_peer::Variant::Chunk(chunk) => {
485 let buffer_id = BufferId::new(chunk.buffer_id)?;
486 let buffer = self
487 .loading_remote_buffers_by_id
488 .get(&buffer_id)
489 .cloned()
490 .ok_or_else(|| {
491 anyhow!(
492 "received chunk for buffer {} without initial state",
493 chunk.buffer_id
494 )
495 })?;
496
497 let result = maybe!({
498 let operations = chunk
499 .operations
500 .into_iter()
501 .map(language::proto::deserialize_operation)
502 .collect::<Result<Vec<_>>>()?;
503 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx));
504 anyhow::Ok(())
505 });
506
507 if let Err(error) = result {
508 self.loading_remote_buffers_by_id.remove(&buffer_id);
509 if let Some(listeners) = self.remote_buffer_listeners.remove(&buffer_id) {
510 for listener in listeners {
511 listener.send(Err(error.cloned())).ok();
512 }
513 }
514 } else if chunk.is_last {
515 self.loading_remote_buffers_by_id.remove(&buffer_id);
516 if self.upstream_client.is_via_collab() {
517 // retain buffers sent by peers to avoid races.
518 self.shared_with_me.insert(buffer.clone());
519 }
520
521 if let Some(senders) = self.remote_buffer_listeners.remove(&buffer_id) {
522 for sender in senders {
523 sender.send(Ok(buffer.clone())).ok();
524 }
525 }
526 return Ok(Some(buffer));
527 }
528 }
529 }
530 return Ok(None);
531 }
532
533 pub fn incomplete_buffer_ids(&self) -> Vec<BufferId> {
534 self.loading_remote_buffers_by_id
535 .keys()
536 .copied()
537 .collect::<Vec<_>>()
538 }
539
540 pub fn deserialize_project_transaction(
541 &self,
542 message: proto::ProjectTransaction,
543 push_to_history: bool,
544 cx: &mut Context<BufferStore>,
545 ) -> Task<Result<ProjectTransaction>> {
546 cx.spawn(|this, mut cx| async move {
547 let mut project_transaction = ProjectTransaction::default();
548 for (buffer_id, transaction) in message.buffer_ids.into_iter().zip(message.transactions)
549 {
550 let buffer_id = BufferId::new(buffer_id)?;
551 let buffer = this
552 .update(&mut cx, |this, cx| {
553 this.wait_for_remote_buffer(buffer_id, cx)
554 })?
555 .await?;
556 let transaction = language::proto::deserialize_transaction(transaction)?;
557 project_transaction.0.insert(buffer, transaction);
558 }
559
560 for (buffer, transaction) in &project_transaction.0 {
561 buffer
562 .update(&mut cx, |buffer, _| {
563 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
564 })?
565 .await?;
566
567 if push_to_history {
568 buffer.update(&mut cx, |buffer, _| {
569 buffer.push_transaction(transaction.clone(), Instant::now());
570 })?;
571 }
572 }
573
574 Ok(project_transaction)
575 })
576 }
577
578 fn open_buffer(
579 &self,
580 path: Arc<Path>,
581 worktree: Entity<Worktree>,
582 cx: &mut Context<BufferStore>,
583 ) -> Task<Result<Entity<Buffer>>> {
584 let worktree_id = worktree.read(cx).id().to_proto();
585 let project_id = self.project_id;
586 let client = self.upstream_client.clone();
587 cx.spawn(move |this, mut cx| async move {
588 let response = client
589 .request(proto::OpenBufferByPath {
590 project_id,
591 worktree_id,
592 path: path.to_proto(),
593 })
594 .await?;
595 let buffer_id = BufferId::new(response.buffer_id)?;
596
597 let buffer = this
598 .update(&mut cx, {
599 |this, cx| this.wait_for_remote_buffer(buffer_id, cx)
600 })?
601 .await?;
602
603 Ok(buffer)
604 })
605 }
606
607 fn create_buffer(&self, cx: &mut Context<BufferStore>) -> Task<Result<Entity<Buffer>>> {
608 let create = self.upstream_client.request(proto::OpenNewBuffer {
609 project_id: self.project_id,
610 });
611 cx.spawn(|this, mut cx| async move {
612 let response = create.await?;
613 let buffer_id = BufferId::new(response.buffer_id)?;
614
615 this.update(&mut cx, |this, cx| {
616 this.wait_for_remote_buffer(buffer_id, cx)
617 })?
618 .await
619 })
620 }
621
622 fn reload_buffers(
623 &self,
624 buffers: HashSet<Entity<Buffer>>,
625 push_to_history: bool,
626 cx: &mut Context<BufferStore>,
627 ) -> Task<Result<ProjectTransaction>> {
628 let request = self.upstream_client.request(proto::ReloadBuffers {
629 project_id: self.project_id,
630 buffer_ids: buffers
631 .iter()
632 .map(|buffer| buffer.read(cx).remote_id().to_proto())
633 .collect(),
634 });
635
636 cx.spawn(|this, mut cx| async move {
637 let response = request
638 .await?
639 .transaction
640 .ok_or_else(|| anyhow!("missing transaction"))?;
641 this.update(&mut cx, |this, cx| {
642 this.deserialize_project_transaction(response, push_to_history, cx)
643 })?
644 .await
645 })
646 }
647}
648
649impl LocalBufferStore {
650 fn worktree_for_buffer(
651 &self,
652 buffer: &Entity<Buffer>,
653 cx: &App,
654 ) -> Option<(Entity<Worktree>, Arc<Path>)> {
655 let file = buffer.read(cx).file()?;
656 let worktree_id = file.worktree_id(cx);
657 let path = file.path().clone();
658 let worktree = self
659 .worktree_store
660 .read(cx)
661 .worktree_for_id(worktree_id, cx)?;
662 Some((worktree, path))
663 }
664
665 fn load_staged_text(&self, buffer: &Entity<Buffer>, cx: &App) -> Task<Result<Option<String>>> {
666 if let Some((worktree, path)) = self.worktree_for_buffer(buffer, cx) {
667 worktree.read(cx).load_staged_file(path.as_ref(), cx)
668 } else {
669 return Task::ready(Err(anyhow!("no such worktree")));
670 }
671 }
672
673 fn load_committed_text(
674 &self,
675 buffer: &Entity<Buffer>,
676 cx: &App,
677 ) -> Task<Result<Option<String>>> {
678 if let Some((worktree, path)) = self.worktree_for_buffer(buffer, cx) {
679 worktree.read(cx).load_committed_file(path.as_ref(), cx)
680 } else {
681 Task::ready(Err(anyhow!("no such worktree")))
682 }
683 }
684
685 fn save_local_buffer(
686 &self,
687 buffer_handle: Entity<Buffer>,
688 worktree: Entity<Worktree>,
689 path: Arc<Path>,
690 mut has_changed_file: bool,
691 cx: &mut Context<BufferStore>,
692 ) -> Task<Result<()>> {
693 let buffer = buffer_handle.read(cx);
694
695 let text = buffer.as_rope().clone();
696 let line_ending = buffer.line_ending();
697 let version = buffer.version();
698 let buffer_id = buffer.remote_id();
699 if buffer
700 .file()
701 .is_some_and(|file| file.disk_state() == DiskState::New)
702 {
703 has_changed_file = true;
704 }
705
706 let save = worktree.update(cx, |worktree, cx| {
707 worktree.write_file(path.as_ref(), text, line_ending, cx)
708 });
709
710 cx.spawn(move |this, mut cx| async move {
711 let new_file = save.await?;
712 let mtime = new_file.disk_state().mtime();
713 this.update(&mut cx, |this, cx| {
714 if let Some((downstream_client, project_id)) = this.downstream_client.clone() {
715 if has_changed_file {
716 downstream_client
717 .send(proto::UpdateBufferFile {
718 project_id,
719 buffer_id: buffer_id.to_proto(),
720 file: Some(language::File::to_proto(&*new_file, cx)),
721 })
722 .log_err();
723 }
724 downstream_client
725 .send(proto::BufferSaved {
726 project_id,
727 buffer_id: buffer_id.to_proto(),
728 version: serialize_version(&version),
729 mtime: mtime.map(|time| time.into()),
730 })
731 .log_err();
732 }
733 })?;
734 buffer_handle.update(&mut cx, |buffer, cx| {
735 if has_changed_file {
736 buffer.file_updated(new_file, cx);
737 }
738 buffer.did_save(version.clone(), mtime, cx);
739 })
740 })
741 }
742
743 fn subscribe_to_worktree(
744 &mut self,
745 worktree: &Entity<Worktree>,
746 cx: &mut Context<BufferStore>,
747 ) {
748 cx.subscribe(worktree, |this, worktree, event, cx| {
749 if worktree.read(cx).is_local() {
750 match event {
751 worktree::Event::UpdatedEntries(changes) => {
752 Self::local_worktree_entries_changed(this, &worktree, changes, cx);
753 }
754 worktree::Event::UpdatedGitRepositories(updated_repos) => {
755 Self::local_worktree_git_repos_changed(
756 this,
757 worktree.clone(),
758 updated_repos,
759 cx,
760 )
761 }
762 _ => {}
763 }
764 }
765 })
766 .detach();
767 }
768
769 fn local_worktree_entries_changed(
770 this: &mut BufferStore,
771 worktree_handle: &Entity<Worktree>,
772 changes: &[(Arc<Path>, ProjectEntryId, PathChange)],
773 cx: &mut Context<BufferStore>,
774 ) {
775 let snapshot = worktree_handle.read(cx).snapshot();
776 for (path, entry_id, _) in changes {
777 Self::local_worktree_entry_changed(
778 this,
779 *entry_id,
780 path,
781 worktree_handle,
782 &snapshot,
783 cx,
784 );
785 }
786 }
787
788 fn local_worktree_git_repos_changed(
789 this: &mut BufferStore,
790 worktree_handle: Entity<Worktree>,
791 changed_repos: &UpdatedGitRepositoriesSet,
792 cx: &mut Context<BufferStore>,
793 ) {
794 debug_assert!(worktree_handle.read(cx).is_local());
795
796 let mut diff_state_updates = Vec::new();
797 for buffer in this.opened_buffers.values() {
798 let OpenBuffer::Complete { buffer, diff_state } = buffer else {
799 continue;
800 };
801 let Some(buffer) = buffer.upgrade() else {
802 continue;
803 };
804 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
805 continue;
806 };
807 if file.worktree != worktree_handle {
808 continue;
809 }
810 let diff_state = diff_state.read(cx);
811 if changed_repos
812 .iter()
813 .any(|(work_dir, _)| file.path.starts_with(work_dir))
814 {
815 let has_unstaged_diff = diff_state
816 .unstaged_diff
817 .as_ref()
818 .is_some_and(|diff| diff.is_upgradable());
819 let has_uncommitted_diff = diff_state
820 .uncommitted_diff
821 .as_ref()
822 .is_some_and(|set| set.is_upgradable());
823 diff_state_updates.push((
824 buffer,
825 file.path.clone(),
826 has_unstaged_diff.then(|| diff_state.index_text.clone()),
827 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
828 ));
829 }
830 }
831
832 if diff_state_updates.is_empty() {
833 return;
834 }
835
836 cx.spawn(move |this, mut cx| async move {
837 let snapshot =
838 worktree_handle.update(&mut cx, |tree, _| tree.as_local().unwrap().snapshot())?;
839 let diff_bases_changes_by_buffer = cx
840 .spawn(async move |cx| {
841 let mut results = Vec::new();
842 for (buffer, path, current_index_text, current_head_text) in diff_state_updates
843 {
844 let Some(local_repo) = snapshot.local_repo_for_path(&path) else {
845 continue;
846 };
847 let Some(relative_path) = local_repo.relativize(&path).ok() else {
848 continue;
849 };
850 let index_text = if current_index_text.is_some() {
851 local_repo
852 .repo()
853 .load_index_text(relative_path.clone(), cx.clone())
854 .await
855 } else {
856 None
857 };
858 let head_text = if current_head_text.is_some() {
859 local_repo
860 .repo()
861 .load_committed_text(relative_path, cx.clone())
862 .await
863 } else {
864 None
865 };
866
867 // Avoid triggering a diff update if the base text has not changed.
868 if let Some((current_index, current_head)) =
869 current_index_text.as_ref().zip(current_head_text.as_ref())
870 {
871 if current_index.as_deref() == index_text.as_ref()
872 && current_head.as_deref() == head_text.as_ref()
873 {
874 continue;
875 }
876 }
877
878 let diff_bases_change =
879 match (current_index_text.is_some(), current_head_text.is_some()) {
880 (true, true) => Some(if index_text == head_text {
881 DiffBasesChange::SetBoth(head_text)
882 } else {
883 DiffBasesChange::SetEach {
884 index: index_text,
885 head: head_text,
886 }
887 }),
888 (true, false) => Some(DiffBasesChange::SetIndex(index_text)),
889 (false, true) => Some(DiffBasesChange::SetHead(head_text)),
890 (false, false) => None,
891 };
892
893 results.push((buffer, diff_bases_change))
894 }
895
896 results
897 })
898 .await;
899
900 this.update(&mut cx, |this, cx| {
901 for (buffer, diff_bases_change) in diff_bases_changes_by_buffer {
902 let Some(OpenBuffer::Complete { diff_state, .. }) =
903 this.opened_buffers.get_mut(&buffer.read(cx).remote_id())
904 else {
905 continue;
906 };
907 let Some(diff_bases_change) = diff_bases_change else {
908 continue;
909 };
910
911 diff_state.update(cx, |diff_state, cx| {
912 use proto::update_diff_bases::Mode;
913
914 let buffer = buffer.read(cx);
915 if let Some((client, project_id)) = this.downstream_client.as_ref() {
916 let buffer_id = buffer.remote_id().to_proto();
917 let (staged_text, committed_text, mode) = match diff_bases_change
918 .clone()
919 {
920 DiffBasesChange::SetIndex(index) => (index, None, Mode::IndexOnly),
921 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
922 DiffBasesChange::SetEach { index, head } => {
923 (index, head, Mode::IndexAndHead)
924 }
925 DiffBasesChange::SetBoth(text) => {
926 (None, text, Mode::IndexMatchesHead)
927 }
928 };
929 let message = proto::UpdateDiffBases {
930 project_id: *project_id,
931 buffer_id,
932 staged_text,
933 committed_text,
934 mode: mode as i32,
935 };
936
937 client.send(message).log_err();
938 }
939
940 let _ = diff_state.diff_bases_changed(
941 buffer.text_snapshot(),
942 diff_bases_change,
943 cx,
944 );
945 });
946 }
947 })
948 })
949 .detach_and_log_err(cx);
950 }
951
952 fn local_worktree_entry_changed(
953 this: &mut BufferStore,
954 entry_id: ProjectEntryId,
955 path: &Arc<Path>,
956 worktree: &Entity<worktree::Worktree>,
957 snapshot: &worktree::Snapshot,
958 cx: &mut Context<BufferStore>,
959 ) -> Option<()> {
960 let project_path = ProjectPath {
961 worktree_id: snapshot.id(),
962 path: path.clone(),
963 };
964
965 let buffer_id = {
966 let local = this.as_local_mut()?;
967 match local.local_buffer_ids_by_entry_id.get(&entry_id) {
968 Some(&buffer_id) => buffer_id,
969 None => local.local_buffer_ids_by_path.get(&project_path).copied()?,
970 }
971 };
972
973 let buffer = if let Some(buffer) = this.get(buffer_id) {
974 Some(buffer)
975 } else {
976 this.opened_buffers.remove(&buffer_id);
977 None
978 };
979
980 let buffer = if let Some(buffer) = buffer {
981 buffer
982 } else {
983 let this = this.as_local_mut()?;
984 this.local_buffer_ids_by_path.remove(&project_path);
985 this.local_buffer_ids_by_entry_id.remove(&entry_id);
986 return None;
987 };
988
989 let events = buffer.update(cx, |buffer, cx| {
990 let local = this.as_local_mut()?;
991 let file = buffer.file()?;
992 let old_file = File::from_dyn(Some(file))?;
993 if old_file.worktree != *worktree {
994 return None;
995 }
996
997 let snapshot_entry = old_file
998 .entry_id
999 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
1000 .or_else(|| snapshot.entry_for_path(old_file.path.as_ref()));
1001
1002 let new_file = if let Some(entry) = snapshot_entry {
1003 File {
1004 disk_state: match entry.mtime {
1005 Some(mtime) => DiskState::Present { mtime },
1006 None => old_file.disk_state,
1007 },
1008 is_local: true,
1009 entry_id: Some(entry.id),
1010 path: entry.path.clone(),
1011 worktree: worktree.clone(),
1012 is_private: entry.is_private,
1013 }
1014 } else {
1015 File {
1016 disk_state: DiskState::Deleted,
1017 is_local: true,
1018 entry_id: old_file.entry_id,
1019 path: old_file.path.clone(),
1020 worktree: worktree.clone(),
1021 is_private: old_file.is_private,
1022 }
1023 };
1024
1025 if new_file == *old_file {
1026 return None;
1027 }
1028
1029 let mut events = Vec::new();
1030 if new_file.path != old_file.path {
1031 local.local_buffer_ids_by_path.remove(&ProjectPath {
1032 path: old_file.path.clone(),
1033 worktree_id: old_file.worktree_id(cx),
1034 });
1035 local.local_buffer_ids_by_path.insert(
1036 ProjectPath {
1037 worktree_id: new_file.worktree_id(cx),
1038 path: new_file.path.clone(),
1039 },
1040 buffer_id,
1041 );
1042 events.push(BufferStoreEvent::BufferChangedFilePath {
1043 buffer: cx.entity(),
1044 old_file: buffer.file().cloned(),
1045 });
1046 }
1047
1048 if new_file.entry_id != old_file.entry_id {
1049 if let Some(entry_id) = old_file.entry_id {
1050 local.local_buffer_ids_by_entry_id.remove(&entry_id);
1051 }
1052 if let Some(entry_id) = new_file.entry_id {
1053 local
1054 .local_buffer_ids_by_entry_id
1055 .insert(entry_id, buffer_id);
1056 }
1057 }
1058
1059 if let Some((client, project_id)) = &this.downstream_client {
1060 client
1061 .send(proto::UpdateBufferFile {
1062 project_id: *project_id,
1063 buffer_id: buffer_id.to_proto(),
1064 file: Some(new_file.to_proto(cx)),
1065 })
1066 .ok();
1067 }
1068
1069 buffer.file_updated(Arc::new(new_file), cx);
1070 Some(events)
1071 })?;
1072
1073 for event in events {
1074 cx.emit(event);
1075 }
1076
1077 None
1078 }
1079
1080 fn buffer_changed_file(&mut self, buffer: Entity<Buffer>, cx: &mut App) -> Option<()> {
1081 let file = File::from_dyn(buffer.read(cx).file())?;
1082
1083 let remote_id = buffer.read(cx).remote_id();
1084 if let Some(entry_id) = file.entry_id {
1085 match self.local_buffer_ids_by_entry_id.get(&entry_id) {
1086 Some(_) => {
1087 return None;
1088 }
1089 None => {
1090 self.local_buffer_ids_by_entry_id
1091 .insert(entry_id, remote_id);
1092 }
1093 }
1094 };
1095 self.local_buffer_ids_by_path.insert(
1096 ProjectPath {
1097 worktree_id: file.worktree_id(cx),
1098 path: file.path.clone(),
1099 },
1100 remote_id,
1101 );
1102
1103 Some(())
1104 }
1105
1106 fn save_buffer(
1107 &self,
1108 buffer: Entity<Buffer>,
1109 cx: &mut Context<BufferStore>,
1110 ) -> Task<Result<()>> {
1111 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1112 return Task::ready(Err(anyhow!("buffer doesn't have a file")));
1113 };
1114 let worktree = file.worktree.clone();
1115 self.save_local_buffer(buffer, worktree, file.path.clone(), false, cx)
1116 }
1117
1118 fn save_buffer_as(
1119 &self,
1120 buffer: Entity<Buffer>,
1121 path: ProjectPath,
1122 cx: &mut Context<BufferStore>,
1123 ) -> Task<Result<()>> {
1124 let Some(worktree) = self
1125 .worktree_store
1126 .read(cx)
1127 .worktree_for_id(path.worktree_id, cx)
1128 else {
1129 return Task::ready(Err(anyhow!("no such worktree")));
1130 };
1131 self.save_local_buffer(buffer, worktree, path.path.clone(), true, cx)
1132 }
1133
1134 fn open_buffer(
1135 &self,
1136 path: Arc<Path>,
1137 worktree: Entity<Worktree>,
1138 cx: &mut Context<BufferStore>,
1139 ) -> Task<Result<Entity<Buffer>>> {
1140 let load_buffer = worktree.update(cx, |worktree, cx| {
1141 let load_file = worktree.load_file(path.as_ref(), cx);
1142 let reservation = cx.reserve_entity();
1143 let buffer_id = BufferId::from(reservation.entity_id().as_non_zero_u64());
1144 cx.spawn(move |_, mut cx| async move {
1145 let loaded = load_file.await?;
1146 let text_buffer = cx
1147 .background_spawn(async move { text::Buffer::new(0, buffer_id, loaded.text) })
1148 .await;
1149 cx.insert_entity(reservation, |_| {
1150 Buffer::build(text_buffer, Some(loaded.file), Capability::ReadWrite)
1151 })
1152 })
1153 });
1154
1155 cx.spawn(move |this, mut cx| async move {
1156 let buffer = match load_buffer.await {
1157 Ok(buffer) => Ok(buffer),
1158 Err(error) if is_not_found_error(&error) => cx.new(|cx| {
1159 let buffer_id = BufferId::from(cx.entity_id().as_non_zero_u64());
1160 let text_buffer = text::Buffer::new(0, buffer_id, "".into());
1161 Buffer::build(
1162 text_buffer,
1163 Some(Arc::new(File {
1164 worktree,
1165 path,
1166 disk_state: DiskState::New,
1167 entry_id: None,
1168 is_local: true,
1169 is_private: false,
1170 })),
1171 Capability::ReadWrite,
1172 )
1173 }),
1174 Err(e) => Err(e),
1175 }?;
1176 this.update(&mut cx, |this, cx| {
1177 this.add_buffer(buffer.clone(), cx)?;
1178 let buffer_id = buffer.read(cx).remote_id();
1179 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
1180 let this = this.as_local_mut().unwrap();
1181 this.local_buffer_ids_by_path.insert(
1182 ProjectPath {
1183 worktree_id: file.worktree_id(cx),
1184 path: file.path.clone(),
1185 },
1186 buffer_id,
1187 );
1188
1189 if let Some(entry_id) = file.entry_id {
1190 this.local_buffer_ids_by_entry_id
1191 .insert(entry_id, buffer_id);
1192 }
1193 }
1194
1195 anyhow::Ok(())
1196 })??;
1197
1198 Ok(buffer)
1199 })
1200 }
1201
1202 fn create_buffer(&self, cx: &mut Context<BufferStore>) -> Task<Result<Entity<Buffer>>> {
1203 cx.spawn(|buffer_store, mut cx| async move {
1204 let buffer =
1205 cx.new(|cx| Buffer::local("", cx).with_language(language::PLAIN_TEXT.clone(), cx))?;
1206 buffer_store.update(&mut cx, |buffer_store, cx| {
1207 buffer_store.add_buffer(buffer.clone(), cx).log_err();
1208 })?;
1209 Ok(buffer)
1210 })
1211 }
1212
1213 fn reload_buffers(
1214 &self,
1215 buffers: HashSet<Entity<Buffer>>,
1216 push_to_history: bool,
1217 cx: &mut Context<BufferStore>,
1218 ) -> Task<Result<ProjectTransaction>> {
1219 cx.spawn(move |_, mut cx| async move {
1220 let mut project_transaction = ProjectTransaction::default();
1221 for buffer in buffers {
1222 let transaction = buffer
1223 .update(&mut cx, |buffer, cx| buffer.reload(cx))?
1224 .await?;
1225 buffer.update(&mut cx, |buffer, cx| {
1226 if let Some(transaction) = transaction {
1227 if !push_to_history {
1228 buffer.forget_transaction(transaction.id);
1229 }
1230 project_transaction.0.insert(cx.entity(), transaction);
1231 }
1232 })?;
1233 }
1234
1235 Ok(project_transaction)
1236 })
1237 }
1238}
1239
1240impl BufferStore {
1241 pub fn init(client: &AnyProtoClient) {
1242 client.add_entity_message_handler(Self::handle_buffer_reloaded);
1243 client.add_entity_message_handler(Self::handle_buffer_saved);
1244 client.add_entity_message_handler(Self::handle_update_buffer_file);
1245 client.add_entity_request_handler(Self::handle_save_buffer);
1246 client.add_entity_request_handler(Self::handle_blame_buffer);
1247 client.add_entity_request_handler(Self::handle_reload_buffers);
1248 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
1249 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
1250 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
1251 client.add_entity_message_handler(Self::handle_update_diff_bases);
1252 }
1253
1254 /// Creates a buffer store, optionally retaining its buffers.
1255 pub fn local(worktree_store: Entity<WorktreeStore>, cx: &mut Context<Self>) -> Self {
1256 Self {
1257 state: BufferStoreState::Local(LocalBufferStore {
1258 local_buffer_ids_by_path: Default::default(),
1259 local_buffer_ids_by_entry_id: Default::default(),
1260 worktree_store: worktree_store.clone(),
1261 _subscription: cx.subscribe(&worktree_store, |this, _, event, cx| {
1262 if let WorktreeStoreEvent::WorktreeAdded(worktree) = event {
1263 let this = this.as_local_mut().unwrap();
1264 this.subscribe_to_worktree(worktree, cx);
1265 }
1266 }),
1267 }),
1268 downstream_client: None,
1269 opened_buffers: Default::default(),
1270 shared_buffers: Default::default(),
1271 loading_buffers: Default::default(),
1272 loading_diffs: Default::default(),
1273 worktree_store,
1274 }
1275 }
1276
1277 pub fn remote(
1278 worktree_store: Entity<WorktreeStore>,
1279 upstream_client: AnyProtoClient,
1280 remote_id: u64,
1281 _cx: &mut Context<Self>,
1282 ) -> Self {
1283 Self {
1284 state: BufferStoreState::Remote(RemoteBufferStore {
1285 shared_with_me: Default::default(),
1286 loading_remote_buffers_by_id: Default::default(),
1287 remote_buffer_listeners: Default::default(),
1288 project_id: remote_id,
1289 upstream_client,
1290 worktree_store: worktree_store.clone(),
1291 }),
1292 downstream_client: None,
1293 opened_buffers: Default::default(),
1294 loading_buffers: Default::default(),
1295 loading_diffs: Default::default(),
1296 shared_buffers: Default::default(),
1297 worktree_store,
1298 }
1299 }
1300
1301 fn as_local_mut(&mut self) -> Option<&mut LocalBufferStore> {
1302 match &mut self.state {
1303 BufferStoreState::Local(state) => Some(state),
1304 _ => None,
1305 }
1306 }
1307
1308 fn as_remote_mut(&mut self) -> Option<&mut RemoteBufferStore> {
1309 match &mut self.state {
1310 BufferStoreState::Remote(state) => Some(state),
1311 _ => None,
1312 }
1313 }
1314
1315 fn as_remote(&self) -> Option<&RemoteBufferStore> {
1316 match &self.state {
1317 BufferStoreState::Remote(state) => Some(state),
1318 _ => None,
1319 }
1320 }
1321
1322 pub fn open_buffer(
1323 &mut self,
1324 project_path: ProjectPath,
1325 cx: &mut Context<Self>,
1326 ) -> Task<Result<Entity<Buffer>>> {
1327 if let Some(buffer) = self.get_by_path(&project_path, cx) {
1328 return Task::ready(Ok(buffer));
1329 }
1330
1331 let task = match self.loading_buffers.entry(project_path.clone()) {
1332 hash_map::Entry::Occupied(e) => e.get().clone(),
1333 hash_map::Entry::Vacant(entry) => {
1334 let path = project_path.path.clone();
1335 let Some(worktree) = self
1336 .worktree_store
1337 .read(cx)
1338 .worktree_for_id(project_path.worktree_id, cx)
1339 else {
1340 return Task::ready(Err(anyhow!("no such worktree")));
1341 };
1342 let load_buffer = match &self.state {
1343 BufferStoreState::Local(this) => this.open_buffer(path, worktree, cx),
1344 BufferStoreState::Remote(this) => this.open_buffer(path, worktree, cx),
1345 };
1346
1347 entry
1348 .insert(
1349 cx.spawn(move |this, mut cx| async move {
1350 let load_result = load_buffer.await;
1351 this.update(&mut cx, |this, _cx| {
1352 // Record the fact that the buffer is no longer loading.
1353 this.loading_buffers.remove(&project_path);
1354 })
1355 .ok();
1356 load_result.map_err(Arc::new)
1357 })
1358 .shared(),
1359 )
1360 .clone()
1361 }
1362 };
1363
1364 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
1365 }
1366
1367 pub fn open_unstaged_diff(
1368 &mut self,
1369 buffer: Entity<Buffer>,
1370 cx: &mut Context<Self>,
1371 ) -> Task<Result<Entity<BufferDiff>>> {
1372 let buffer_id = buffer.read(cx).remote_id();
1373 if let Some(OpenBuffer::Complete { diff_state, .. }) = self.opened_buffers.get(&buffer_id) {
1374 if let Some(unstaged_diff) = diff_state
1375 .read(cx)
1376 .unstaged_diff
1377 .as_ref()
1378 .and_then(|weak| weak.upgrade())
1379 {
1380 if let Some(task) =
1381 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
1382 {
1383 return cx.background_executor().spawn(async move {
1384 task.await?;
1385 Ok(unstaged_diff)
1386 });
1387 }
1388 return Task::ready(Ok(unstaged_diff));
1389 }
1390 }
1391
1392 let task = match self.loading_diffs.entry((buffer_id, DiffKind::Unstaged)) {
1393 hash_map::Entry::Occupied(e) => e.get().clone(),
1394 hash_map::Entry::Vacant(entry) => {
1395 let staged_text = match &self.state {
1396 BufferStoreState::Local(this) => this.load_staged_text(&buffer, cx),
1397 BufferStoreState::Remote(this) => this.open_unstaged_diff(buffer_id, cx),
1398 };
1399
1400 entry
1401 .insert(
1402 cx.spawn(move |this, cx| async move {
1403 Self::open_diff_internal(
1404 this,
1405 DiffKind::Unstaged,
1406 staged_text.await.map(DiffBasesChange::SetIndex),
1407 buffer,
1408 cx,
1409 )
1410 .await
1411 .map_err(Arc::new)
1412 })
1413 .shared(),
1414 )
1415 .clone()
1416 }
1417 };
1418
1419 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
1420 }
1421
1422 pub fn open_uncommitted_diff(
1423 &mut self,
1424 buffer: Entity<Buffer>,
1425 cx: &mut Context<Self>,
1426 ) -> Task<Result<Entity<BufferDiff>>> {
1427 let buffer_id = buffer.read(cx).remote_id();
1428
1429 if let Some(OpenBuffer::Complete { diff_state, .. }) = self.opened_buffers.get(&buffer_id) {
1430 if let Some(uncommitted_diff) = diff_state
1431 .read(cx)
1432 .uncommitted_diff
1433 .as_ref()
1434 .and_then(|weak| weak.upgrade())
1435 {
1436 if let Some(task) =
1437 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
1438 {
1439 return cx.background_executor().spawn(async move {
1440 task.await?;
1441 Ok(uncommitted_diff)
1442 });
1443 }
1444 return Task::ready(Ok(uncommitted_diff));
1445 }
1446 }
1447
1448 let task = match self.loading_diffs.entry((buffer_id, DiffKind::Uncommitted)) {
1449 hash_map::Entry::Occupied(e) => e.get().clone(),
1450 hash_map::Entry::Vacant(entry) => {
1451 let changes = match &self.state {
1452 BufferStoreState::Local(this) => {
1453 let committed_text = this.load_committed_text(&buffer, cx);
1454 let staged_text = this.load_staged_text(&buffer, cx);
1455 cx.background_spawn(async move {
1456 let committed_text = committed_text.await?;
1457 let staged_text = staged_text.await?;
1458 let diff_bases_change = if committed_text == staged_text {
1459 DiffBasesChange::SetBoth(committed_text)
1460 } else {
1461 DiffBasesChange::SetEach {
1462 index: staged_text,
1463 head: committed_text,
1464 }
1465 };
1466 Ok(diff_bases_change)
1467 })
1468 }
1469 BufferStoreState::Remote(this) => this.open_uncommitted_diff(buffer_id, cx),
1470 };
1471
1472 entry
1473 .insert(
1474 cx.spawn(move |this, cx| async move {
1475 Self::open_diff_internal(
1476 this,
1477 DiffKind::Uncommitted,
1478 changes.await,
1479 buffer,
1480 cx,
1481 )
1482 .await
1483 .map_err(Arc::new)
1484 })
1485 .shared(),
1486 )
1487 .clone()
1488 }
1489 };
1490
1491 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
1492 }
1493
1494 async fn open_diff_internal(
1495 this: WeakEntity<Self>,
1496 kind: DiffKind,
1497 texts: Result<DiffBasesChange>,
1498 buffer_entity: Entity<Buffer>,
1499 mut cx: AsyncApp,
1500 ) -> Result<Entity<BufferDiff>> {
1501 let diff_bases_change = match texts {
1502 Err(e) => {
1503 this.update(&mut cx, |this, cx| {
1504 let buffer = buffer_entity.read(cx);
1505 let buffer_id = buffer.remote_id();
1506 this.loading_diffs.remove(&(buffer_id, kind));
1507 })?;
1508 return Err(e);
1509 }
1510 Ok(change) => change,
1511 };
1512
1513 this.update(&mut cx, |this, cx| {
1514 let buffer = buffer_entity.read(cx);
1515 let buffer_id = buffer.remote_id();
1516 let language = buffer.language().cloned();
1517 let language_registry = buffer.language_registry();
1518 let text_snapshot = buffer.text_snapshot();
1519 this.loading_diffs.remove(&(buffer_id, kind));
1520
1521 if let Some(OpenBuffer::Complete { diff_state, .. }) =
1522 this.opened_buffers.get_mut(&buffer_id)
1523 {
1524 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
1525 cx.emit(BufferStoreEvent::BufferDiffAdded(diff.clone()));
1526 diff_state.update(cx, |diff_state, cx| {
1527 diff_state.language = language;
1528 diff_state.language_registry = language_registry;
1529
1530 match kind {
1531 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
1532 DiffKind::Uncommitted => {
1533 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
1534 diff
1535 } else {
1536 let unstaged_diff =
1537 cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
1538 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
1539 unstaged_diff
1540 };
1541
1542 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
1543 diff_state.uncommitted_diff = Some(diff.downgrade())
1544 }
1545 };
1546
1547 let rx = diff_state.diff_bases_changed(text_snapshot, diff_bases_change, cx);
1548
1549 Ok(async move {
1550 rx.await.ok();
1551 Ok(diff)
1552 })
1553 })
1554 } else {
1555 Err(anyhow!("buffer was closed"))
1556 }
1557 })??
1558 .await
1559 }
1560
1561 pub fn create_buffer(&mut self, cx: &mut Context<Self>) -> Task<Result<Entity<Buffer>>> {
1562 match &self.state {
1563 BufferStoreState::Local(this) => this.create_buffer(cx),
1564 BufferStoreState::Remote(this) => this.create_buffer(cx),
1565 }
1566 }
1567
1568 pub fn save_buffer(
1569 &mut self,
1570 buffer: Entity<Buffer>,
1571 cx: &mut Context<Self>,
1572 ) -> Task<Result<()>> {
1573 match &mut self.state {
1574 BufferStoreState::Local(this) => this.save_buffer(buffer, cx),
1575 BufferStoreState::Remote(this) => this.save_remote_buffer(buffer.clone(), None, cx),
1576 }
1577 }
1578
1579 pub fn save_buffer_as(
1580 &mut self,
1581 buffer: Entity<Buffer>,
1582 path: ProjectPath,
1583 cx: &mut Context<Self>,
1584 ) -> Task<Result<()>> {
1585 let old_file = buffer.read(cx).file().cloned();
1586 let task = match &self.state {
1587 BufferStoreState::Local(this) => this.save_buffer_as(buffer.clone(), path, cx),
1588 BufferStoreState::Remote(this) => {
1589 this.save_remote_buffer(buffer.clone(), Some(path.to_proto()), cx)
1590 }
1591 };
1592 cx.spawn(|this, mut cx| async move {
1593 task.await?;
1594 this.update(&mut cx, |_, cx| {
1595 cx.emit(BufferStoreEvent::BufferChangedFilePath { buffer, old_file });
1596 })
1597 })
1598 }
1599
1600 pub fn blame_buffer(
1601 &self,
1602 buffer: &Entity<Buffer>,
1603 version: Option<clock::Global>,
1604 cx: &App,
1605 ) -> Task<Result<Option<Blame>>> {
1606 let buffer = buffer.read(cx);
1607 let Some(file) = File::from_dyn(buffer.file()) else {
1608 return Task::ready(Err(anyhow!("buffer has no file")));
1609 };
1610
1611 match file.worktree.clone().read(cx) {
1612 Worktree::Local(worktree) => {
1613 let worktree = worktree.snapshot();
1614 let blame_params = maybe!({
1615 let local_repo = match worktree.local_repo_for_path(&file.path) {
1616 Some(repo_for_path) => repo_for_path,
1617 None => return Ok(None),
1618 };
1619
1620 let relative_path = local_repo
1621 .relativize(&file.path)
1622 .context("failed to relativize buffer path")?;
1623
1624 let repo = local_repo.repo().clone();
1625
1626 let content = match version {
1627 Some(version) => buffer.rope_for_version(&version).clone(),
1628 None => buffer.as_rope().clone(),
1629 };
1630
1631 anyhow::Ok(Some((repo, relative_path, content)))
1632 });
1633
1634 cx.spawn(|cx| async move {
1635 let Some((repo, relative_path, content)) = blame_params? else {
1636 return Ok(None);
1637 };
1638 repo.blame(relative_path.clone(), content, cx)
1639 .await
1640 .with_context(|| format!("Failed to blame {:?}", relative_path.0))
1641 .map(Some)
1642 })
1643 }
1644 Worktree::Remote(worktree) => {
1645 let buffer_id = buffer.remote_id();
1646 let version = buffer.version();
1647 let project_id = worktree.project_id();
1648 let client = worktree.client();
1649 cx.spawn(|_| async move {
1650 let response = client
1651 .request(proto::BlameBuffer {
1652 project_id,
1653 buffer_id: buffer_id.into(),
1654 version: serialize_version(&version),
1655 })
1656 .await?;
1657 Ok(deserialize_blame_buffer_response(response))
1658 })
1659 }
1660 }
1661 }
1662
1663 pub fn get_permalink_to_line(
1664 &self,
1665 buffer: &Entity<Buffer>,
1666 selection: Range<u32>,
1667 cx: &App,
1668 ) -> Task<Result<url::Url>> {
1669 let buffer = buffer.read(cx);
1670 let Some(file) = File::from_dyn(buffer.file()) else {
1671 return Task::ready(Err(anyhow!("buffer has no file")));
1672 };
1673
1674 match file.worktree.read(cx) {
1675 Worktree::Local(worktree) => {
1676 let worktree_path = worktree.abs_path().clone();
1677 let Some((repo_entry, repo)) =
1678 worktree.repository_for_path(file.path()).and_then(|entry| {
1679 let repo = worktree.get_local_repo(&entry)?.repo().clone();
1680 Some((entry, repo))
1681 })
1682 else {
1683 // If we're not in a Git repo, check whether this is a Rust source
1684 // file in the Cargo registry (presumably opened with go-to-definition
1685 // from a normal Rust file). If so, we can put together a permalink
1686 // using crate metadata.
1687 if buffer
1688 .language()
1689 .is_none_or(|lang| lang.name() != "Rust".into())
1690 {
1691 return Task::ready(Err(anyhow!("no permalink available")));
1692 }
1693 let file_path = worktree_path.join(file.path());
1694 return cx.spawn(|cx| async move {
1695 let provider_registry =
1696 cx.update(GitHostingProviderRegistry::default_global)?;
1697 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1698 .map_err(|_| anyhow!("no permalink available"))
1699 });
1700 };
1701
1702 let path = match repo_entry.relativize(file.path()) {
1703 Ok(RepoPath(path)) => path,
1704 Err(e) => return Task::ready(Err(e)),
1705 };
1706
1707 let remote = repo_entry
1708 .branch()
1709 .and_then(|b| b.upstream.as_ref())
1710 .and_then(|b| b.remote_name())
1711 .unwrap_or("origin")
1712 .to_string();
1713
1714 cx.spawn(|cx| async move {
1715 let origin_url = repo
1716 .remote_url(&remote)
1717 .ok_or_else(|| anyhow!("remote \"{remote}\" not found"))?;
1718
1719 let sha = repo
1720 .head_sha()
1721 .ok_or_else(|| anyhow!("failed to read HEAD SHA"))?;
1722
1723 let provider_registry =
1724 cx.update(GitHostingProviderRegistry::default_global)?;
1725
1726 let (provider, remote) =
1727 parse_git_remote_url(provider_registry, &origin_url)
1728 .ok_or_else(|| anyhow!("failed to parse Git remote URL"))?;
1729
1730 let path = path
1731 .to_str()
1732 .ok_or_else(|| anyhow!("failed to convert path to string"))?;
1733
1734 Ok(provider.build_permalink(
1735 remote,
1736 BuildPermalinkParams {
1737 sha: &sha,
1738 path,
1739 selection: Some(selection),
1740 },
1741 ))
1742 })
1743 }
1744 Worktree::Remote(worktree) => {
1745 let buffer_id = buffer.remote_id();
1746 let project_id = worktree.project_id();
1747 let client = worktree.client();
1748 cx.spawn(|_| async move {
1749 let response = client
1750 .request(proto::GetPermalinkToLine {
1751 project_id,
1752 buffer_id: buffer_id.into(),
1753 selection: Some(proto::Range {
1754 start: selection.start as u64,
1755 end: selection.end as u64,
1756 }),
1757 })
1758 .await?;
1759
1760 url::Url::parse(&response.permalink).context("failed to parse permalink")
1761 })
1762 }
1763 }
1764 }
1765
1766 fn add_buffer(&mut self, buffer_entity: Entity<Buffer>, cx: &mut Context<Self>) -> Result<()> {
1767 let buffer = buffer_entity.read(cx);
1768 let language = buffer.language().cloned();
1769 let language_registry = buffer.language_registry();
1770 let remote_id = buffer.remote_id();
1771 let is_remote = buffer.replica_id() != 0;
1772 let open_buffer = OpenBuffer::Complete {
1773 buffer: buffer_entity.downgrade(),
1774 diff_state: cx.new(|_| BufferDiffState {
1775 language,
1776 language_registry,
1777 ..Default::default()
1778 }),
1779 };
1780
1781 let handle = cx.entity().downgrade();
1782 buffer_entity.update(cx, move |_, cx| {
1783 cx.on_release(move |buffer, cx| {
1784 handle
1785 .update(cx, |_, cx| {
1786 cx.emit(BufferStoreEvent::BufferDropped(buffer.remote_id()))
1787 })
1788 .ok();
1789 })
1790 .detach()
1791 });
1792
1793 match self.opened_buffers.entry(remote_id) {
1794 hash_map::Entry::Vacant(entry) => {
1795 entry.insert(open_buffer);
1796 }
1797 hash_map::Entry::Occupied(mut entry) => {
1798 if let OpenBuffer::Operations(operations) = entry.get_mut() {
1799 buffer_entity.update(cx, |b, cx| b.apply_ops(operations.drain(..), cx));
1800 } else if entry.get().upgrade().is_some() {
1801 if is_remote {
1802 return Ok(());
1803 } else {
1804 debug_panic!("buffer {} was already registered", remote_id);
1805 Err(anyhow!("buffer {} was already registered", remote_id))?;
1806 }
1807 }
1808 entry.insert(open_buffer);
1809 }
1810 }
1811
1812 cx.subscribe(&buffer_entity, Self::on_buffer_event).detach();
1813 cx.emit(BufferStoreEvent::BufferAdded(buffer_entity));
1814 Ok(())
1815 }
1816
1817 pub fn buffers(&self) -> impl '_ + Iterator<Item = Entity<Buffer>> {
1818 self.opened_buffers
1819 .values()
1820 .filter_map(|buffer| buffer.upgrade())
1821 }
1822
1823 pub fn loading_buffers(
1824 &self,
1825 ) -> impl Iterator<Item = (&ProjectPath, impl Future<Output = Result<Entity<Buffer>>>)> {
1826 self.loading_buffers.iter().map(|(path, task)| {
1827 let task = task.clone();
1828 (path, async move { task.await.map_err(|e| anyhow!("{e}")) })
1829 })
1830 }
1831
1832 pub fn get_by_path(&self, path: &ProjectPath, cx: &App) -> Option<Entity<Buffer>> {
1833 self.buffers().find_map(|buffer| {
1834 let file = File::from_dyn(buffer.read(cx).file())?;
1835 if file.worktree_id(cx) == path.worktree_id && file.path == path.path {
1836 Some(buffer)
1837 } else {
1838 None
1839 }
1840 })
1841 }
1842
1843 pub fn get(&self, buffer_id: BufferId) -> Option<Entity<Buffer>> {
1844 self.opened_buffers.get(&buffer_id)?.upgrade()
1845 }
1846
1847 pub fn get_existing(&self, buffer_id: BufferId) -> Result<Entity<Buffer>> {
1848 self.get(buffer_id)
1849 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
1850 }
1851
1852 pub fn get_possibly_incomplete(&self, buffer_id: BufferId) -> Option<Entity<Buffer>> {
1853 self.get(buffer_id).or_else(|| {
1854 self.as_remote()
1855 .and_then(|remote| remote.loading_remote_buffers_by_id.get(&buffer_id).cloned())
1856 })
1857 }
1858
1859 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
1860 if let OpenBuffer::Complete { diff_state, .. } = self.opened_buffers.get(&buffer_id)? {
1861 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
1862 } else {
1863 None
1864 }
1865 }
1866
1867 pub fn get_uncommitted_diff(
1868 &self,
1869 buffer_id: BufferId,
1870 cx: &App,
1871 ) -> Option<Entity<BufferDiff>> {
1872 if let OpenBuffer::Complete { diff_state, .. } = self.opened_buffers.get(&buffer_id)? {
1873 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
1874 } else {
1875 None
1876 }
1877 }
1878
1879 pub fn buffer_version_info(&self, cx: &App) -> (Vec<proto::BufferVersion>, Vec<BufferId>) {
1880 let buffers = self
1881 .buffers()
1882 .map(|buffer| {
1883 let buffer = buffer.read(cx);
1884 proto::BufferVersion {
1885 id: buffer.remote_id().into(),
1886 version: language::proto::serialize_version(&buffer.version),
1887 }
1888 })
1889 .collect();
1890 let incomplete_buffer_ids = self
1891 .as_remote()
1892 .map(|remote| remote.incomplete_buffer_ids())
1893 .unwrap_or_default();
1894 (buffers, incomplete_buffer_ids)
1895 }
1896
1897 pub fn disconnected_from_host(&mut self, cx: &mut App) {
1898 for open_buffer in self.opened_buffers.values_mut() {
1899 if let Some(buffer) = open_buffer.upgrade() {
1900 buffer.update(cx, |buffer, _| buffer.give_up_waiting());
1901 }
1902 }
1903
1904 for buffer in self.buffers() {
1905 buffer.update(cx, |buffer, cx| {
1906 buffer.set_capability(Capability::ReadOnly, cx)
1907 });
1908 }
1909
1910 if let Some(remote) = self.as_remote_mut() {
1911 // Wake up all futures currently waiting on a buffer to get opened,
1912 // to give them a chance to fail now that we've disconnected.
1913 remote.remote_buffer_listeners.clear()
1914 }
1915 }
1916
1917 pub fn shared(&mut self, remote_id: u64, downstream_client: AnyProtoClient, _cx: &mut App) {
1918 self.downstream_client = Some((downstream_client, remote_id));
1919 }
1920
1921 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
1922 self.downstream_client.take();
1923 self.forget_shared_buffers();
1924 }
1925
1926 pub fn discard_incomplete(&mut self) {
1927 self.opened_buffers
1928 .retain(|_, buffer| !matches!(buffer, OpenBuffer::Operations(_)));
1929 }
1930
1931 pub fn find_search_candidates(
1932 &mut self,
1933 query: &SearchQuery,
1934 mut limit: usize,
1935 fs: Arc<dyn Fs>,
1936 cx: &mut Context<Self>,
1937 ) -> Receiver<Entity<Buffer>> {
1938 let (tx, rx) = smol::channel::unbounded();
1939 let mut open_buffers = HashSet::default();
1940 let mut unnamed_buffers = Vec::new();
1941 for handle in self.buffers() {
1942 let buffer = handle.read(cx);
1943 if let Some(entry_id) = buffer.entry_id(cx) {
1944 open_buffers.insert(entry_id);
1945 } else {
1946 limit = limit.saturating_sub(1);
1947 unnamed_buffers.push(handle)
1948 };
1949 }
1950
1951 const MAX_CONCURRENT_BUFFER_OPENS: usize = 64;
1952 let project_paths_rx = self
1953 .worktree_store
1954 .update(cx, |worktree_store, cx| {
1955 worktree_store.find_search_candidates(query.clone(), limit, open_buffers, fs, cx)
1956 })
1957 .chunks(MAX_CONCURRENT_BUFFER_OPENS);
1958
1959 cx.spawn(|this, mut cx| async move {
1960 for buffer in unnamed_buffers {
1961 tx.send(buffer).await.ok();
1962 }
1963
1964 let mut project_paths_rx = pin!(project_paths_rx);
1965 while let Some(project_paths) = project_paths_rx.next().await {
1966 let buffers = this.update(&mut cx, |this, cx| {
1967 project_paths
1968 .into_iter()
1969 .map(|project_path| this.open_buffer(project_path, cx))
1970 .collect::<Vec<_>>()
1971 })?;
1972 for buffer_task in buffers {
1973 if let Some(buffer) = buffer_task.await.log_err() {
1974 if tx.send(buffer).await.is_err() {
1975 return anyhow::Ok(());
1976 }
1977 }
1978 }
1979 }
1980 anyhow::Ok(())
1981 })
1982 .detach();
1983 rx
1984 }
1985
1986 pub fn recalculate_buffer_diffs(
1987 &mut self,
1988 buffers: Vec<Entity<Buffer>>,
1989 cx: &mut Context<Self>,
1990 ) -> impl Future<Output = ()> {
1991 let mut futures = Vec::new();
1992 for buffer in buffers {
1993 if let Some(OpenBuffer::Complete { diff_state, .. }) =
1994 self.opened_buffers.get_mut(&buffer.read(cx).remote_id())
1995 {
1996 let buffer = buffer.read(cx).text_snapshot();
1997 futures.push(diff_state.update(cx, |diff_state, cx| {
1998 diff_state.recalculate_diffs(buffer, cx)
1999 }));
2000 }
2001 }
2002 async move {
2003 futures::future::join_all(futures).await;
2004 }
2005 }
2006
2007 fn on_buffer_event(
2008 &mut self,
2009 buffer: Entity<Buffer>,
2010 event: &BufferEvent,
2011 cx: &mut Context<Self>,
2012 ) {
2013 match event {
2014 BufferEvent::FileHandleChanged => {
2015 if let Some(local) = self.as_local_mut() {
2016 local.buffer_changed_file(buffer, cx);
2017 }
2018 }
2019 BufferEvent::Reloaded => {
2020 let Some((downstream_client, project_id)) = self.downstream_client.as_ref() else {
2021 return;
2022 };
2023 let buffer = buffer.read(cx);
2024 downstream_client
2025 .send(proto::BufferReloaded {
2026 project_id: *project_id,
2027 buffer_id: buffer.remote_id().to_proto(),
2028 version: serialize_version(&buffer.version()),
2029 mtime: buffer.saved_mtime().map(|t| t.into()),
2030 line_ending: serialize_line_ending(buffer.line_ending()) as i32,
2031 })
2032 .log_err();
2033 }
2034 BufferEvent::LanguageChanged => {
2035 let buffer_id = buffer.read(cx).remote_id();
2036 if let Some(OpenBuffer::Complete { diff_state, .. }) =
2037 self.opened_buffers.get(&buffer_id)
2038 {
2039 diff_state.update(cx, |diff_state, cx| {
2040 diff_state.buffer_language_changed(buffer, cx);
2041 });
2042 }
2043 }
2044 _ => {}
2045 }
2046 }
2047
2048 pub async fn handle_update_buffer(
2049 this: Entity<Self>,
2050 envelope: TypedEnvelope<proto::UpdateBuffer>,
2051 mut cx: AsyncApp,
2052 ) -> Result<proto::Ack> {
2053 let payload = envelope.payload.clone();
2054 let buffer_id = BufferId::new(payload.buffer_id)?;
2055 let ops = payload
2056 .operations
2057 .into_iter()
2058 .map(language::proto::deserialize_operation)
2059 .collect::<Result<Vec<_>, _>>()?;
2060 this.update(&mut cx, |this, cx| {
2061 match this.opened_buffers.entry(buffer_id) {
2062 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
2063 OpenBuffer::Operations(operations) => operations.extend_from_slice(&ops),
2064 OpenBuffer::Complete { buffer, .. } => {
2065 if let Some(buffer) = buffer.upgrade() {
2066 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx));
2067 }
2068 }
2069 },
2070 hash_map::Entry::Vacant(e) => {
2071 e.insert(OpenBuffer::Operations(ops));
2072 }
2073 }
2074 Ok(proto::Ack {})
2075 })?
2076 }
2077
2078 pub fn register_shared_lsp_handle(
2079 &mut self,
2080 peer_id: proto::PeerId,
2081 buffer_id: BufferId,
2082 handle: OpenLspBufferHandle,
2083 ) {
2084 if let Some(shared_buffers) = self.shared_buffers.get_mut(&peer_id) {
2085 if let Some(buffer) = shared_buffers.get_mut(&buffer_id) {
2086 buffer.lsp_handle = Some(handle);
2087 return;
2088 }
2089 }
2090 debug_panic!("tried to register shared lsp handle, but buffer was not shared")
2091 }
2092
2093 pub fn handle_synchronize_buffers(
2094 &mut self,
2095 envelope: TypedEnvelope<proto::SynchronizeBuffers>,
2096 cx: &mut Context<Self>,
2097 client: Arc<Client>,
2098 ) -> Result<proto::SynchronizeBuffersResponse> {
2099 let project_id = envelope.payload.project_id;
2100 let mut response = proto::SynchronizeBuffersResponse {
2101 buffers: Default::default(),
2102 };
2103 let Some(guest_id) = envelope.original_sender_id else {
2104 anyhow::bail!("missing original_sender_id on SynchronizeBuffers request");
2105 };
2106
2107 self.shared_buffers.entry(guest_id).or_default().clear();
2108 for buffer in envelope.payload.buffers {
2109 let buffer_id = BufferId::new(buffer.id)?;
2110 let remote_version = language::proto::deserialize_version(&buffer.version);
2111 if let Some(buffer) = self.get(buffer_id) {
2112 self.shared_buffers
2113 .entry(guest_id)
2114 .or_default()
2115 .entry(buffer_id)
2116 .or_insert_with(|| SharedBuffer {
2117 buffer: buffer.clone(),
2118 diff: None,
2119 lsp_handle: None,
2120 });
2121
2122 let buffer = buffer.read(cx);
2123 response.buffers.push(proto::BufferVersion {
2124 id: buffer_id.into(),
2125 version: language::proto::serialize_version(&buffer.version),
2126 });
2127
2128 let operations = buffer.serialize_ops(Some(remote_version), cx);
2129 let client = client.clone();
2130 if let Some(file) = buffer.file() {
2131 client
2132 .send(proto::UpdateBufferFile {
2133 project_id,
2134 buffer_id: buffer_id.into(),
2135 file: Some(file.to_proto(cx)),
2136 })
2137 .log_err();
2138 }
2139
2140 // TODO(max): do something
2141 // client
2142 // .send(proto::UpdateStagedText {
2143 // project_id,
2144 // buffer_id: buffer_id.into(),
2145 // diff_base: buffer.diff_base().map(ToString::to_string),
2146 // })
2147 // .log_err();
2148
2149 client
2150 .send(proto::BufferReloaded {
2151 project_id,
2152 buffer_id: buffer_id.into(),
2153 version: language::proto::serialize_version(buffer.saved_version()),
2154 mtime: buffer.saved_mtime().map(|time| time.into()),
2155 line_ending: language::proto::serialize_line_ending(buffer.line_ending())
2156 as i32,
2157 })
2158 .log_err();
2159
2160 cx.background_spawn(
2161 async move {
2162 let operations = operations.await;
2163 for chunk in split_operations(operations) {
2164 client
2165 .request(proto::UpdateBuffer {
2166 project_id,
2167 buffer_id: buffer_id.into(),
2168 operations: chunk,
2169 })
2170 .await?;
2171 }
2172 anyhow::Ok(())
2173 }
2174 .log_err(),
2175 )
2176 .detach();
2177 }
2178 }
2179 Ok(response)
2180 }
2181
2182 pub fn handle_create_buffer_for_peer(
2183 &mut self,
2184 envelope: TypedEnvelope<proto::CreateBufferForPeer>,
2185 replica_id: u16,
2186 capability: Capability,
2187 cx: &mut Context<Self>,
2188 ) -> Result<()> {
2189 let Some(remote) = self.as_remote_mut() else {
2190 return Err(anyhow!("buffer store is not a remote"));
2191 };
2192
2193 if let Some(buffer) =
2194 remote.handle_create_buffer_for_peer(envelope, replica_id, capability, cx)?
2195 {
2196 self.add_buffer(buffer, cx)?;
2197 }
2198
2199 Ok(())
2200 }
2201
2202 pub async fn handle_update_buffer_file(
2203 this: Entity<Self>,
2204 envelope: TypedEnvelope<proto::UpdateBufferFile>,
2205 mut cx: AsyncApp,
2206 ) -> Result<()> {
2207 let buffer_id = envelope.payload.buffer_id;
2208 let buffer_id = BufferId::new(buffer_id)?;
2209
2210 this.update(&mut cx, |this, cx| {
2211 let payload = envelope.payload.clone();
2212 if let Some(buffer) = this.get_possibly_incomplete(buffer_id) {
2213 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
2214 let worktree = this
2215 .worktree_store
2216 .read(cx)
2217 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
2218 .ok_or_else(|| anyhow!("no such worktree"))?;
2219 let file = File::from_proto(file, worktree, cx)?;
2220 let old_file = buffer.update(cx, |buffer, cx| {
2221 let old_file = buffer.file().cloned();
2222 let new_path = file.path.clone();
2223 buffer.file_updated(Arc::new(file), cx);
2224 if old_file
2225 .as_ref()
2226 .map_or(true, |old| *old.path() != new_path)
2227 {
2228 Some(old_file)
2229 } else {
2230 None
2231 }
2232 });
2233 if let Some(old_file) = old_file {
2234 cx.emit(BufferStoreEvent::BufferChangedFilePath { buffer, old_file });
2235 }
2236 }
2237 if let Some((downstream_client, project_id)) = this.downstream_client.as_ref() {
2238 downstream_client
2239 .send(proto::UpdateBufferFile {
2240 project_id: *project_id,
2241 buffer_id: buffer_id.into(),
2242 file: envelope.payload.file,
2243 })
2244 .log_err();
2245 }
2246 Ok(())
2247 })?
2248 }
2249
2250 pub async fn handle_save_buffer(
2251 this: Entity<Self>,
2252 envelope: TypedEnvelope<proto::SaveBuffer>,
2253 mut cx: AsyncApp,
2254 ) -> Result<proto::BufferSaved> {
2255 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2256 let (buffer, project_id) = this.update(&mut cx, |this, _| {
2257 anyhow::Ok((
2258 this.get_existing(buffer_id)?,
2259 this.downstream_client
2260 .as_ref()
2261 .map(|(_, project_id)| *project_id)
2262 .context("project is not shared")?,
2263 ))
2264 })??;
2265 buffer
2266 .update(&mut cx, |buffer, _| {
2267 buffer.wait_for_version(deserialize_version(&envelope.payload.version))
2268 })?
2269 .await?;
2270 let buffer_id = buffer.update(&mut cx, |buffer, _| buffer.remote_id())?;
2271
2272 if let Some(new_path) = envelope.payload.new_path {
2273 let new_path = ProjectPath::from_proto(new_path);
2274 this.update(&mut cx, |this, cx| {
2275 this.save_buffer_as(buffer.clone(), new_path, cx)
2276 })?
2277 .await?;
2278 } else {
2279 this.update(&mut cx, |this, cx| this.save_buffer(buffer.clone(), cx))?
2280 .await?;
2281 }
2282
2283 buffer.update(&mut cx, |buffer, _| proto::BufferSaved {
2284 project_id,
2285 buffer_id: buffer_id.into(),
2286 version: serialize_version(buffer.saved_version()),
2287 mtime: buffer.saved_mtime().map(|time| time.into()),
2288 })
2289 }
2290
2291 pub async fn handle_close_buffer(
2292 this: Entity<Self>,
2293 envelope: TypedEnvelope<proto::CloseBuffer>,
2294 mut cx: AsyncApp,
2295 ) -> Result<()> {
2296 let peer_id = envelope.sender_id;
2297 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2298 this.update(&mut cx, |this, _| {
2299 if let Some(shared) = this.shared_buffers.get_mut(&peer_id) {
2300 if shared.remove(&buffer_id).is_some() {
2301 if shared.is_empty() {
2302 this.shared_buffers.remove(&peer_id);
2303 }
2304 return;
2305 }
2306 }
2307 debug_panic!(
2308 "peer_id {} closed buffer_id {} which was either not open or already closed",
2309 peer_id,
2310 buffer_id
2311 )
2312 })
2313 }
2314
2315 pub async fn handle_buffer_saved(
2316 this: Entity<Self>,
2317 envelope: TypedEnvelope<proto::BufferSaved>,
2318 mut cx: AsyncApp,
2319 ) -> Result<()> {
2320 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2321 let version = deserialize_version(&envelope.payload.version);
2322 let mtime = envelope.payload.mtime.clone().map(|time| time.into());
2323 this.update(&mut cx, move |this, cx| {
2324 if let Some(buffer) = this.get_possibly_incomplete(buffer_id) {
2325 buffer.update(cx, |buffer, cx| {
2326 buffer.did_save(version, mtime, cx);
2327 });
2328 }
2329
2330 if let Some((downstream_client, project_id)) = this.downstream_client.as_ref() {
2331 downstream_client
2332 .send(proto::BufferSaved {
2333 project_id: *project_id,
2334 buffer_id: buffer_id.into(),
2335 mtime: envelope.payload.mtime,
2336 version: envelope.payload.version,
2337 })
2338 .log_err();
2339 }
2340 })
2341 }
2342
2343 pub async fn handle_buffer_reloaded(
2344 this: Entity<Self>,
2345 envelope: TypedEnvelope<proto::BufferReloaded>,
2346 mut cx: AsyncApp,
2347 ) -> Result<()> {
2348 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2349 let version = deserialize_version(&envelope.payload.version);
2350 let mtime = envelope.payload.mtime.clone().map(|time| time.into());
2351 let line_ending = deserialize_line_ending(
2352 proto::LineEnding::from_i32(envelope.payload.line_ending)
2353 .ok_or_else(|| anyhow!("missing line ending"))?,
2354 );
2355 this.update(&mut cx, |this, cx| {
2356 if let Some(buffer) = this.get_possibly_incomplete(buffer_id) {
2357 buffer.update(cx, |buffer, cx| {
2358 buffer.did_reload(version, line_ending, mtime, cx);
2359 });
2360 }
2361
2362 if let Some((downstream_client, project_id)) = this.downstream_client.as_ref() {
2363 downstream_client
2364 .send(proto::BufferReloaded {
2365 project_id: *project_id,
2366 buffer_id: buffer_id.into(),
2367 mtime: envelope.payload.mtime,
2368 version: envelope.payload.version,
2369 line_ending: envelope.payload.line_ending,
2370 })
2371 .log_err();
2372 }
2373 })
2374 }
2375
2376 pub async fn handle_blame_buffer(
2377 this: Entity<Self>,
2378 envelope: TypedEnvelope<proto::BlameBuffer>,
2379 mut cx: AsyncApp,
2380 ) -> Result<proto::BlameBufferResponse> {
2381 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2382 let version = deserialize_version(&envelope.payload.version);
2383 let buffer = this.read_with(&cx, |this, _| this.get_existing(buffer_id))??;
2384 buffer
2385 .update(&mut cx, |buffer, _| {
2386 buffer.wait_for_version(version.clone())
2387 })?
2388 .await?;
2389 let blame = this
2390 .update(&mut cx, |this, cx| {
2391 this.blame_buffer(&buffer, Some(version), cx)
2392 })?
2393 .await?;
2394 Ok(serialize_blame_buffer_response(blame))
2395 }
2396
2397 pub async fn handle_get_permalink_to_line(
2398 this: Entity<Self>,
2399 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2400 mut cx: AsyncApp,
2401 ) -> Result<proto::GetPermalinkToLineResponse> {
2402 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2403 // let version = deserialize_version(&envelope.payload.version);
2404 let selection = {
2405 let proto_selection = envelope
2406 .payload
2407 .selection
2408 .context("no selection to get permalink for defined")?;
2409 proto_selection.start as u32..proto_selection.end as u32
2410 };
2411 let buffer = this.read_with(&cx, |this, _| this.get_existing(buffer_id))??;
2412 let permalink = this
2413 .update(&mut cx, |this, cx| {
2414 this.get_permalink_to_line(&buffer, selection, cx)
2415 })?
2416 .await?;
2417 Ok(proto::GetPermalinkToLineResponse {
2418 permalink: permalink.to_string(),
2419 })
2420 }
2421
2422 pub async fn handle_open_unstaged_diff(
2423 this: Entity<Self>,
2424 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2425 mut cx: AsyncApp,
2426 ) -> Result<proto::OpenUnstagedDiffResponse> {
2427 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2428 let diff = this
2429 .update(&mut cx, |this, cx| {
2430 let buffer = this.get(buffer_id)?;
2431 Some(this.open_unstaged_diff(buffer, cx))
2432 })?
2433 .ok_or_else(|| anyhow!("no such buffer"))?
2434 .await?;
2435 this.update(&mut cx, |this, _| {
2436 let shared_buffers = this
2437 .shared_buffers
2438 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2439 .or_default();
2440 debug_assert!(shared_buffers.contains_key(&buffer_id));
2441 if let Some(shared) = shared_buffers.get_mut(&buffer_id) {
2442 shared.diff = Some(diff.clone());
2443 }
2444 })?;
2445 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
2446 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2447 }
2448
2449 pub async fn handle_open_uncommitted_diff(
2450 this: Entity<Self>,
2451 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2452 mut cx: AsyncApp,
2453 ) -> Result<proto::OpenUncommittedDiffResponse> {
2454 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2455 let diff = this
2456 .update(&mut cx, |this, cx| {
2457 let buffer = this.get(buffer_id)?;
2458 Some(this.open_uncommitted_diff(buffer, cx))
2459 })?
2460 .ok_or_else(|| anyhow!("no such buffer"))?
2461 .await?;
2462 this.update(&mut cx, |this, _| {
2463 let shared_buffers = this
2464 .shared_buffers
2465 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2466 .or_default();
2467 debug_assert!(shared_buffers.contains_key(&buffer_id));
2468 if let Some(shared) = shared_buffers.get_mut(&buffer_id) {
2469 shared.diff = Some(diff.clone());
2470 }
2471 })?;
2472 diff.read_with(&cx, |diff, cx| {
2473 use proto::open_uncommitted_diff_response::Mode;
2474
2475 let unstaged_diff = diff.secondary_diff();
2476 let index_snapshot = unstaged_diff.and_then(|diff| {
2477 let diff = diff.read(cx);
2478 diff.base_text_exists().then(|| diff.base_text())
2479 });
2480
2481 let mode;
2482 let staged_text;
2483 let committed_text;
2484 if diff.base_text_exists() {
2485 let committed_snapshot = diff.base_text();
2486 committed_text = Some(committed_snapshot.text());
2487 if let Some(index_text) = index_snapshot {
2488 if index_text.remote_id() == committed_snapshot.remote_id() {
2489 mode = Mode::IndexMatchesHead;
2490 staged_text = None;
2491 } else {
2492 mode = Mode::IndexAndHead;
2493 staged_text = Some(index_text.text());
2494 }
2495 } else {
2496 mode = Mode::IndexAndHead;
2497 staged_text = None;
2498 }
2499 } else {
2500 mode = Mode::IndexAndHead;
2501 committed_text = None;
2502 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2503 }
2504
2505 proto::OpenUncommittedDiffResponse {
2506 committed_text,
2507 staged_text,
2508 mode: mode.into(),
2509 }
2510 })
2511 }
2512
2513 pub async fn handle_update_diff_bases(
2514 this: Entity<Self>,
2515 request: TypedEnvelope<proto::UpdateDiffBases>,
2516 mut cx: AsyncApp,
2517 ) -> Result<()> {
2518 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2519 this.update(&mut cx, |this, cx| {
2520 if let Some(OpenBuffer::Complete { diff_state, buffer }) =
2521 this.opened_buffers.get_mut(&buffer_id)
2522 {
2523 if let Some(buffer) = buffer.upgrade() {
2524 let buffer = buffer.read(cx).text_snapshot();
2525 diff_state.update(cx, |diff_state, cx| {
2526 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2527 })
2528 }
2529 }
2530 })
2531 }
2532
2533 pub fn reload_buffers(
2534 &self,
2535 buffers: HashSet<Entity<Buffer>>,
2536 push_to_history: bool,
2537 cx: &mut Context<Self>,
2538 ) -> Task<Result<ProjectTransaction>> {
2539 if buffers.is_empty() {
2540 return Task::ready(Ok(ProjectTransaction::default()));
2541 }
2542 match &self.state {
2543 BufferStoreState::Local(this) => this.reload_buffers(buffers, push_to_history, cx),
2544 BufferStoreState::Remote(this) => this.reload_buffers(buffers, push_to_history, cx),
2545 }
2546 }
2547
2548 async fn handle_reload_buffers(
2549 this: Entity<Self>,
2550 envelope: TypedEnvelope<proto::ReloadBuffers>,
2551 mut cx: AsyncApp,
2552 ) -> Result<proto::ReloadBuffersResponse> {
2553 let sender_id = envelope.original_sender_id().unwrap_or_default();
2554 let reload = this.update(&mut cx, |this, cx| {
2555 let mut buffers = HashSet::default();
2556 for buffer_id in &envelope.payload.buffer_ids {
2557 let buffer_id = BufferId::new(*buffer_id)?;
2558 buffers.insert(this.get_existing(buffer_id)?);
2559 }
2560 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
2561 })??;
2562
2563 let project_transaction = reload.await?;
2564 let project_transaction = this.update(&mut cx, |this, cx| {
2565 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2566 })?;
2567 Ok(proto::ReloadBuffersResponse {
2568 transaction: Some(project_transaction),
2569 })
2570 }
2571
2572 pub fn create_buffer_for_peer(
2573 &mut self,
2574 buffer: &Entity<Buffer>,
2575 peer_id: proto::PeerId,
2576 cx: &mut Context<Self>,
2577 ) -> Task<Result<()>> {
2578 let buffer_id = buffer.read(cx).remote_id();
2579 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
2580 if shared_buffers.contains_key(&buffer_id) {
2581 return Task::ready(Ok(()));
2582 }
2583 shared_buffers.insert(
2584 buffer_id,
2585 SharedBuffer {
2586 buffer: buffer.clone(),
2587 diff: None,
2588 lsp_handle: None,
2589 },
2590 );
2591
2592 let Some((client, project_id)) = self.downstream_client.clone() else {
2593 return Task::ready(Ok(()));
2594 };
2595
2596 cx.spawn(|this, mut cx| async move {
2597 let Some(buffer) = this.update(&mut cx, |this, _| this.get(buffer_id))? else {
2598 return anyhow::Ok(());
2599 };
2600
2601 let operations = buffer.update(&mut cx, |b, cx| b.serialize_ops(None, cx))?;
2602 let operations = operations.await;
2603 let state = buffer.update(&mut cx, |buffer, cx| buffer.to_proto(cx))?;
2604
2605 let initial_state = proto::CreateBufferForPeer {
2606 project_id,
2607 peer_id: Some(peer_id),
2608 variant: Some(proto::create_buffer_for_peer::Variant::State(state)),
2609 };
2610
2611 if client.send(initial_state).log_err().is_some() {
2612 let client = client.clone();
2613 cx.background_spawn(async move {
2614 let mut chunks = split_operations(operations).peekable();
2615 while let Some(chunk) = chunks.next() {
2616 let is_last = chunks.peek().is_none();
2617 client.send(proto::CreateBufferForPeer {
2618 project_id,
2619 peer_id: Some(peer_id),
2620 variant: Some(proto::create_buffer_for_peer::Variant::Chunk(
2621 proto::BufferChunk {
2622 buffer_id: buffer_id.into(),
2623 operations: chunk,
2624 is_last,
2625 },
2626 )),
2627 })?;
2628 }
2629 anyhow::Ok(())
2630 })
2631 .await
2632 .log_err();
2633 }
2634 Ok(())
2635 })
2636 }
2637
2638 pub fn forget_shared_buffers(&mut self) {
2639 self.shared_buffers.clear();
2640 }
2641
2642 pub fn forget_shared_buffers_for(&mut self, peer_id: &proto::PeerId) {
2643 self.shared_buffers.remove(peer_id);
2644 }
2645
2646 pub fn update_peer_id(&mut self, old_peer_id: &proto::PeerId, new_peer_id: proto::PeerId) {
2647 if let Some(buffers) = self.shared_buffers.remove(old_peer_id) {
2648 self.shared_buffers.insert(new_peer_id, buffers);
2649 }
2650 }
2651
2652 pub fn has_shared_buffers(&self) -> bool {
2653 !self.shared_buffers.is_empty()
2654 }
2655
2656 pub fn create_local_buffer(
2657 &mut self,
2658 text: &str,
2659 language: Option<Arc<Language>>,
2660 cx: &mut Context<Self>,
2661 ) -> Entity<Buffer> {
2662 let buffer = cx.new(|cx| {
2663 Buffer::local(text, cx)
2664 .with_language(language.unwrap_or_else(|| language::PLAIN_TEXT.clone()), cx)
2665 });
2666
2667 self.add_buffer(buffer.clone(), cx).log_err();
2668 let buffer_id = buffer.read(cx).remote_id();
2669
2670 let this = self
2671 .as_local_mut()
2672 .expect("local-only method called in a non-local context");
2673 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
2674 this.local_buffer_ids_by_path.insert(
2675 ProjectPath {
2676 worktree_id: file.worktree_id(cx),
2677 path: file.path.clone(),
2678 },
2679 buffer_id,
2680 );
2681
2682 if let Some(entry_id) = file.entry_id {
2683 this.local_buffer_ids_by_entry_id
2684 .insert(entry_id, buffer_id);
2685 }
2686 }
2687 buffer
2688 }
2689
2690 pub fn deserialize_project_transaction(
2691 &mut self,
2692 message: proto::ProjectTransaction,
2693 push_to_history: bool,
2694 cx: &mut Context<Self>,
2695 ) -> Task<Result<ProjectTransaction>> {
2696 if let Some(this) = self.as_remote_mut() {
2697 this.deserialize_project_transaction(message, push_to_history, cx)
2698 } else {
2699 debug_panic!("not a remote buffer store");
2700 Task::ready(Err(anyhow!("not a remote buffer store")))
2701 }
2702 }
2703
2704 pub fn wait_for_remote_buffer(
2705 &mut self,
2706 id: BufferId,
2707 cx: &mut Context<BufferStore>,
2708 ) -> Task<Result<Entity<Buffer>>> {
2709 if let Some(this) = self.as_remote_mut() {
2710 this.wait_for_remote_buffer(id, cx)
2711 } else {
2712 debug_panic!("not a remote buffer store");
2713 Task::ready(Err(anyhow!("not a remote buffer store")))
2714 }
2715 }
2716
2717 pub fn serialize_project_transaction_for_peer(
2718 &mut self,
2719 project_transaction: ProjectTransaction,
2720 peer_id: proto::PeerId,
2721 cx: &mut Context<Self>,
2722 ) -> proto::ProjectTransaction {
2723 let mut serialized_transaction = proto::ProjectTransaction {
2724 buffer_ids: Default::default(),
2725 transactions: Default::default(),
2726 };
2727 for (buffer, transaction) in project_transaction.0 {
2728 self.create_buffer_for_peer(&buffer, peer_id, cx)
2729 .detach_and_log_err(cx);
2730 serialized_transaction
2731 .buffer_ids
2732 .push(buffer.read(cx).remote_id().into());
2733 serialized_transaction
2734 .transactions
2735 .push(language::proto::serialize_transaction(&transaction));
2736 }
2737 serialized_transaction
2738 }
2739}
2740
2741impl OpenBuffer {
2742 fn upgrade(&self) -> Option<Entity<Buffer>> {
2743 match self {
2744 OpenBuffer::Complete { buffer, .. } => buffer.upgrade(),
2745 OpenBuffer::Operations(_) => None,
2746 }
2747 }
2748}
2749
2750fn is_not_found_error(error: &anyhow::Error) -> bool {
2751 error
2752 .root_cause()
2753 .downcast_ref::<io::Error>()
2754 .is_some_and(|err| err.kind() == io::ErrorKind::NotFound)
2755}
2756
2757fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
2758 let Some(blame) = blame else {
2759 return proto::BlameBufferResponse {
2760 blame_response: None,
2761 };
2762 };
2763
2764 let entries = blame
2765 .entries
2766 .into_iter()
2767 .map(|entry| proto::BlameEntry {
2768 sha: entry.sha.as_bytes().into(),
2769 start_line: entry.range.start,
2770 end_line: entry.range.end,
2771 original_line_number: entry.original_line_number,
2772 author: entry.author.clone(),
2773 author_mail: entry.author_mail.clone(),
2774 author_time: entry.author_time,
2775 author_tz: entry.author_tz.clone(),
2776 committer: entry.committer_name.clone(),
2777 committer_mail: entry.committer_email.clone(),
2778 committer_time: entry.committer_time,
2779 committer_tz: entry.committer_tz.clone(),
2780 summary: entry.summary.clone(),
2781 previous: entry.previous.clone(),
2782 filename: entry.filename.clone(),
2783 })
2784 .collect::<Vec<_>>();
2785
2786 let messages = blame
2787 .messages
2788 .into_iter()
2789 .map(|(oid, message)| proto::CommitMessage {
2790 oid: oid.as_bytes().into(),
2791 message,
2792 })
2793 .collect::<Vec<_>>();
2794
2795 proto::BlameBufferResponse {
2796 blame_response: Some(proto::blame_buffer_response::BlameResponse {
2797 entries,
2798 messages,
2799 remote_url: blame.remote_url,
2800 }),
2801 }
2802}
2803
2804fn deserialize_blame_buffer_response(
2805 response: proto::BlameBufferResponse,
2806) -> Option<git::blame::Blame> {
2807 let response = response.blame_response?;
2808 let entries = response
2809 .entries
2810 .into_iter()
2811 .filter_map(|entry| {
2812 Some(git::blame::BlameEntry {
2813 sha: git::Oid::from_bytes(&entry.sha).ok()?,
2814 range: entry.start_line..entry.end_line,
2815 original_line_number: entry.original_line_number,
2816 committer_name: entry.committer,
2817 committer_time: entry.committer_time,
2818 committer_tz: entry.committer_tz,
2819 committer_email: entry.committer_mail,
2820 author: entry.author,
2821 author_mail: entry.author_mail,
2822 author_time: entry.author_time,
2823 author_tz: entry.author_tz,
2824 summary: entry.summary,
2825 previous: entry.previous,
2826 filename: entry.filename,
2827 })
2828 })
2829 .collect::<Vec<_>>();
2830
2831 let messages = response
2832 .messages
2833 .into_iter()
2834 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
2835 .collect::<HashMap<_, _>>();
2836
2837 Some(Blame {
2838 entries,
2839 messages,
2840 remote_url: response.remote_url,
2841 })
2842}
2843
2844fn get_permalink_in_rust_registry_src(
2845 provider_registry: Arc<GitHostingProviderRegistry>,
2846 path: PathBuf,
2847 selection: Range<u32>,
2848) -> Result<url::Url> {
2849 #[derive(Deserialize)]
2850 struct CargoVcsGit {
2851 sha1: String,
2852 }
2853
2854 #[derive(Deserialize)]
2855 struct CargoVcsInfo {
2856 git: CargoVcsGit,
2857 path_in_vcs: String,
2858 }
2859
2860 #[derive(Deserialize)]
2861 struct CargoPackage {
2862 repository: String,
2863 }
2864
2865 #[derive(Deserialize)]
2866 struct CargoToml {
2867 package: CargoPackage,
2868 }
2869
2870 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
2871 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
2872 Some((dir, json))
2873 }) else {
2874 bail!("No .cargo_vcs_info.json found in parent directories")
2875 };
2876 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
2877 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
2878 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
2879 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
2880 .ok_or_else(|| anyhow!("Failed to parse package.repository field of manifest"))?;
2881 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
2882 let permalink = provider.build_permalink(
2883 remote,
2884 BuildPermalinkParams {
2885 sha: &cargo_vcs_info.git.sha1,
2886 path: &path.to_string_lossy(),
2887 selection: Some(selection),
2888 },
2889 );
2890 Ok(permalink)
2891}