1use crate::{
2 lsp_store::OpenLspBufferHandle,
3 search::SearchQuery,
4 worktree_store::{WorktreeStore, WorktreeStoreEvent},
5 ProjectItem as _, ProjectPath,
6};
7use ::git::{parse_git_remote_url, BuildPermalinkParams, GitHostingProviderRegistry};
8use anyhow::{anyhow, bail, Context as _, Result};
9use client::Client;
10use collections::{hash_map, HashMap, HashSet};
11use diff::{BufferDiff, BufferDiffEvent, BufferDiffSnapshot};
12use fs::Fs;
13use futures::{channel::oneshot, future::Shared, Future, FutureExt as _, StreamExt};
14use git::{blame::Blame, repository::RepoPath};
15use gpui::{
16 App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Subscription, Task, WeakEntity,
17};
18use http_client::Url;
19use language::{
20 proto::{
21 deserialize_line_ending, deserialize_version, serialize_line_ending, serialize_version,
22 split_operations,
23 },
24 Buffer, BufferEvent, Capability, DiskState, File as _, Language, LanguageRegistry, Operation,
25};
26use rpc::{proto, AnyProtoClient, ErrorExt as _, TypedEnvelope};
27use serde::Deserialize;
28use smol::channel::Receiver;
29use std::{
30 io,
31 ops::Range,
32 path::{Path, PathBuf},
33 pin::pin,
34 str::FromStr as _,
35 sync::Arc,
36 time::Instant,
37};
38use text::BufferId;
39use util::{debug_panic, maybe, ResultExt as _, TryFutureExt};
40use worktree::{File, PathChange, ProjectEntryId, UpdatedGitRepositoriesSet, Worktree, WorktreeId};
41
42#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
43enum DiffKind {
44 Unstaged,
45 Uncommitted,
46}
47
48/// A set of open buffers.
49pub struct BufferStore {
50 state: BufferStoreState,
51 #[allow(clippy::type_complexity)]
52 loading_buffers: HashMap<ProjectPath, Shared<Task<Result<Entity<Buffer>, Arc<anyhow::Error>>>>>,
53 #[allow(clippy::type_complexity)]
54 loading_diffs:
55 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
56 worktree_store: Entity<WorktreeStore>,
57 opened_buffers: HashMap<BufferId, OpenBuffer>,
58 downstream_client: Option<(AnyProtoClient, u64)>,
59 shared_buffers: HashMap<proto::PeerId, HashMap<BufferId, SharedBuffer>>,
60}
61
62#[derive(Hash, Eq, PartialEq, Clone)]
63struct SharedBuffer {
64 buffer: Entity<Buffer>,
65 diff: Option<Entity<BufferDiff>>,
66 lsp_handle: Option<OpenLspBufferHandle>,
67}
68
69#[derive(Default)]
70struct BufferDiffState {
71 unstaged_diff: Option<WeakEntity<BufferDiff>>,
72 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
73 recalculate_diff_task: Option<Task<Result<()>>>,
74 language: Option<Arc<Language>>,
75 language_registry: Option<Arc<LanguageRegistry>>,
76 diff_updated_futures: Vec<oneshot::Sender<()>>,
77 buffer_subscription: Option<Subscription>,
78
79 head_text: Option<Arc<String>>,
80 index_text: Option<Arc<String>>,
81 head_changed: bool,
82 index_changed: bool,
83 language_changed: bool,
84}
85
86#[derive(Clone, Debug)]
87enum DiffBasesChange {
88 SetIndex(Option<String>),
89 SetHead(Option<String>),
90 SetEach {
91 index: Option<String>,
92 head: Option<String>,
93 },
94 SetBoth(Option<String>),
95}
96
97impl BufferDiffState {
98 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
99 self.language = buffer.read(cx).language().cloned();
100 self.language_changed = true;
101 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
102 }
103
104 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
105 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
106 }
107
108 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
109 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
110 }
111
112 fn handle_base_texts_updated(
113 &mut self,
114 buffer: text::BufferSnapshot,
115 message: proto::UpdateDiffBases,
116 cx: &mut Context<Self>,
117 ) {
118 use proto::update_diff_bases::Mode;
119
120 let Some(mode) = Mode::from_i32(message.mode) else {
121 return;
122 };
123
124 let diff_bases_change = match mode {
125 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
126 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
127 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
128 Mode::IndexAndHead => DiffBasesChange::SetEach {
129 index: message.staged_text,
130 head: message.committed_text,
131 },
132 };
133
134 let _ = self.diff_bases_changed(buffer, diff_bases_change, cx);
135 }
136
137 fn diff_bases_changed(
138 &mut self,
139 buffer: text::BufferSnapshot,
140 diff_bases_change: DiffBasesChange,
141 cx: &mut Context<Self>,
142 ) -> oneshot::Receiver<()> {
143 match diff_bases_change {
144 DiffBasesChange::SetIndex(index) => {
145 self.index_text = index.map(|mut index| {
146 text::LineEnding::normalize(&mut index);
147 Arc::new(index)
148 });
149 self.index_changed = true;
150 }
151 DiffBasesChange::SetHead(head) => {
152 self.head_text = head.map(|mut head| {
153 text::LineEnding::normalize(&mut head);
154 Arc::new(head)
155 });
156 self.head_changed = true;
157 }
158 DiffBasesChange::SetBoth(text) => {
159 let text = text.map(|mut text| {
160 text::LineEnding::normalize(&mut text);
161 Arc::new(text)
162 });
163 self.head_text = text.clone();
164 self.index_text = text;
165 self.head_changed = true;
166 self.index_changed = true;
167 }
168 DiffBasesChange::SetEach { index, head } => {
169 self.index_text = index.map(|mut index| {
170 text::LineEnding::normalize(&mut index);
171 Arc::new(index)
172 });
173 self.index_changed = true;
174 self.head_text = head.map(|mut head| {
175 text::LineEnding::normalize(&mut head);
176 Arc::new(head)
177 });
178 self.head_changed = true;
179 }
180 }
181
182 self.recalculate_diffs(buffer, cx)
183 }
184
185 fn recalculate_diffs(
186 &mut self,
187 buffer: text::BufferSnapshot,
188 cx: &mut Context<Self>,
189 ) -> oneshot::Receiver<()> {
190 let (tx, rx) = oneshot::channel();
191 self.diff_updated_futures.push(tx);
192
193 let language = self.language.clone();
194 let language_registry = self.language_registry.clone();
195 let unstaged_diff = self.unstaged_diff();
196 let uncommitted_diff = self.uncommitted_diff();
197 let head = self.head_text.clone();
198 let index = self.index_text.clone();
199 let index_changed = self.index_changed;
200 let head_changed = self.head_changed;
201 let language_changed = self.language_changed;
202 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
203 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
204 (None, None) => true,
205 _ => false,
206 };
207 self.recalculate_diff_task = Some(cx.spawn(|this, mut cx| async move {
208 if let Some(unstaged_diff) = &unstaged_diff {
209 let snapshot = if index_changed || language_changed {
210 cx.update(|cx| {
211 BufferDiffSnapshot::build(
212 buffer.clone(),
213 index,
214 language.clone(),
215 language_registry.clone(),
216 cx,
217 )
218 })?
219 .await
220 } else {
221 unstaged_diff
222 .read_with(&cx, |changes, cx| {
223 BufferDiffSnapshot::build_with_base_buffer(
224 buffer.clone(),
225 index,
226 changes.snapshot.base_text.clone(),
227 cx,
228 )
229 })?
230 .await
231 };
232
233 unstaged_diff.update(&mut cx, |unstaged_diff, cx| {
234 unstaged_diff.set_state(snapshot, &buffer, cx);
235 if language_changed {
236 cx.emit(BufferDiffEvent::LanguageChanged);
237 }
238 })?;
239 }
240
241 if let Some(uncommitted_diff) = &uncommitted_diff {
242 let snapshot =
243 if let (Some(unstaged_diff), true) = (&unstaged_diff, index_matches_head) {
244 unstaged_diff.read_with(&cx, |diff, _| diff.snapshot.clone())?
245 } else if head_changed || language_changed {
246 cx.update(|cx| {
247 BufferDiffSnapshot::build(
248 buffer.clone(),
249 head,
250 language.clone(),
251 language_registry.clone(),
252 cx,
253 )
254 })?
255 .await
256 } else {
257 uncommitted_diff
258 .read_with(&cx, |changes, cx| {
259 BufferDiffSnapshot::build_with_base_buffer(
260 buffer.clone(),
261 head,
262 changes.snapshot.base_text.clone(),
263 cx,
264 )
265 })?
266 .await
267 };
268
269 uncommitted_diff.update(&mut cx, |diff, cx| {
270 diff.set_state(snapshot, &buffer, cx);
271 if language_changed {
272 cx.emit(BufferDiffEvent::LanguageChanged);
273 }
274 })?;
275 }
276
277 if let Some(this) = this.upgrade() {
278 this.update(&mut cx, |this, _| {
279 this.index_changed = false;
280 this.head_changed = false;
281 for tx in this.diff_updated_futures.drain(..) {
282 tx.send(()).ok();
283 }
284 })?;
285 }
286
287 Ok(())
288 }));
289
290 rx
291 }
292}
293
294enum BufferStoreState {
295 Local(LocalBufferStore),
296 Remote(RemoteBufferStore),
297}
298
299struct RemoteBufferStore {
300 shared_with_me: HashSet<Entity<Buffer>>,
301 upstream_client: AnyProtoClient,
302 project_id: u64,
303 loading_remote_buffers_by_id: HashMap<BufferId, Entity<Buffer>>,
304 remote_buffer_listeners:
305 HashMap<BufferId, Vec<oneshot::Sender<Result<Entity<Buffer>, anyhow::Error>>>>,
306 worktree_store: Entity<WorktreeStore>,
307}
308
309struct LocalBufferStore {
310 local_buffer_ids_by_path: HashMap<ProjectPath, BufferId>,
311 local_buffer_ids_by_entry_id: HashMap<ProjectEntryId, BufferId>,
312 worktree_store: Entity<WorktreeStore>,
313 _subscription: Subscription,
314}
315
316enum OpenBuffer {
317 Complete {
318 buffer: WeakEntity<Buffer>,
319 diff_state: Entity<BufferDiffState>,
320 },
321 Operations(Vec<Operation>),
322}
323
324pub enum BufferStoreEvent {
325 BufferAdded(Entity<Buffer>),
326 BufferDropped(BufferId),
327 BufferChangedFilePath {
328 buffer: Entity<Buffer>,
329 old_file: Option<Arc<dyn language::File>>,
330 },
331}
332
333#[derive(Default, Debug)]
334pub struct ProjectTransaction(pub HashMap<Entity<Buffer>, language::Transaction>);
335
336impl EventEmitter<BufferStoreEvent> for BufferStore {}
337
338impl RemoteBufferStore {
339 fn open_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Task<Result<Option<String>>> {
340 let project_id = self.project_id;
341 let client = self.upstream_client.clone();
342 cx.background_executor().spawn(async move {
343 let response = client
344 .request(proto::OpenUnstagedDiff {
345 project_id,
346 buffer_id: buffer_id.to_proto(),
347 })
348 .await?;
349 Ok(response.staged_text)
350 })
351 }
352
353 fn open_uncommitted_diff(
354 &self,
355 buffer_id: BufferId,
356 cx: &App,
357 ) -> Task<Result<DiffBasesChange>> {
358 use proto::open_uncommitted_diff_response::Mode;
359
360 let project_id = self.project_id;
361 let client = self.upstream_client.clone();
362 cx.background_executor().spawn(async move {
363 let response = client
364 .request(proto::OpenUncommittedDiff {
365 project_id,
366 buffer_id: buffer_id.to_proto(),
367 })
368 .await?;
369 let mode = Mode::from_i32(response.mode).ok_or_else(|| anyhow!("Invalid mode"))?;
370 let bases = match mode {
371 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
372 Mode::IndexAndHead => DiffBasesChange::SetEach {
373 head: response.committed_text,
374 index: response.staged_text,
375 },
376 };
377 Ok(bases)
378 })
379 }
380
381 pub fn wait_for_remote_buffer(
382 &mut self,
383 id: BufferId,
384 cx: &mut Context<BufferStore>,
385 ) -> Task<Result<Entity<Buffer>>> {
386 let (tx, rx) = oneshot::channel();
387 self.remote_buffer_listeners.entry(id).or_default().push(tx);
388
389 cx.spawn(|this, cx| async move {
390 if let Some(buffer) = this
391 .read_with(&cx, |buffer_store, _| buffer_store.get(id))
392 .ok()
393 .flatten()
394 {
395 return Ok(buffer);
396 }
397
398 cx.background_executor()
399 .spawn(async move { rx.await? })
400 .await
401 })
402 }
403
404 fn save_remote_buffer(
405 &self,
406 buffer_handle: Entity<Buffer>,
407 new_path: Option<proto::ProjectPath>,
408 cx: &Context<BufferStore>,
409 ) -> Task<Result<()>> {
410 let buffer = buffer_handle.read(cx);
411 let buffer_id = buffer.remote_id().into();
412 let version = buffer.version();
413 let rpc = self.upstream_client.clone();
414 let project_id = self.project_id;
415 cx.spawn(move |_, mut cx| async move {
416 let response = rpc
417 .request(proto::SaveBuffer {
418 project_id,
419 buffer_id,
420 new_path,
421 version: serialize_version(&version),
422 })
423 .await?;
424 let version = deserialize_version(&response.version);
425 let mtime = response.mtime.map(|mtime| mtime.into());
426
427 buffer_handle.update(&mut cx, |buffer, cx| {
428 buffer.did_save(version.clone(), mtime, cx);
429 })?;
430
431 Ok(())
432 })
433 }
434
435 pub fn handle_create_buffer_for_peer(
436 &mut self,
437 envelope: TypedEnvelope<proto::CreateBufferForPeer>,
438 replica_id: u16,
439 capability: Capability,
440 cx: &mut Context<BufferStore>,
441 ) -> Result<Option<Entity<Buffer>>> {
442 match envelope
443 .payload
444 .variant
445 .ok_or_else(|| anyhow!("missing variant"))?
446 {
447 proto::create_buffer_for_peer::Variant::State(mut state) => {
448 let buffer_id = BufferId::new(state.id)?;
449
450 let buffer_result = maybe!({
451 let mut buffer_file = None;
452 if let Some(file) = state.file.take() {
453 let worktree_id = worktree::WorktreeId::from_proto(file.worktree_id);
454 let worktree = self
455 .worktree_store
456 .read(cx)
457 .worktree_for_id(worktree_id, cx)
458 .ok_or_else(|| {
459 anyhow!("no worktree found for id {}", file.worktree_id)
460 })?;
461 buffer_file = Some(Arc::new(File::from_proto(file, worktree.clone(), cx)?)
462 as Arc<dyn language::File>);
463 }
464 Buffer::from_proto(replica_id, capability, state, buffer_file)
465 });
466
467 match buffer_result {
468 Ok(buffer) => {
469 let buffer = cx.new(|_| buffer);
470 self.loading_remote_buffers_by_id.insert(buffer_id, buffer);
471 }
472 Err(error) => {
473 if let Some(listeners) = self.remote_buffer_listeners.remove(&buffer_id) {
474 for listener in listeners {
475 listener.send(Err(anyhow!(error.cloned()))).ok();
476 }
477 }
478 }
479 }
480 }
481 proto::create_buffer_for_peer::Variant::Chunk(chunk) => {
482 let buffer_id = BufferId::new(chunk.buffer_id)?;
483 let buffer = self
484 .loading_remote_buffers_by_id
485 .get(&buffer_id)
486 .cloned()
487 .ok_or_else(|| {
488 anyhow!(
489 "received chunk for buffer {} without initial state",
490 chunk.buffer_id
491 )
492 })?;
493
494 let result = maybe!({
495 let operations = chunk
496 .operations
497 .into_iter()
498 .map(language::proto::deserialize_operation)
499 .collect::<Result<Vec<_>>>()?;
500 buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx));
501 anyhow::Ok(())
502 });
503
504 if let Err(error) = result {
505 self.loading_remote_buffers_by_id.remove(&buffer_id);
506 if let Some(listeners) = self.remote_buffer_listeners.remove(&buffer_id) {
507 for listener in listeners {
508 listener.send(Err(error.cloned())).ok();
509 }
510 }
511 } else if chunk.is_last {
512 self.loading_remote_buffers_by_id.remove(&buffer_id);
513 if self.upstream_client.is_via_collab() {
514 // retain buffers sent by peers to avoid races.
515 self.shared_with_me.insert(buffer.clone());
516 }
517
518 if let Some(senders) = self.remote_buffer_listeners.remove(&buffer_id) {
519 for sender in senders {
520 sender.send(Ok(buffer.clone())).ok();
521 }
522 }
523 return Ok(Some(buffer));
524 }
525 }
526 }
527 return Ok(None);
528 }
529
530 pub fn incomplete_buffer_ids(&self) -> Vec<BufferId> {
531 self.loading_remote_buffers_by_id
532 .keys()
533 .copied()
534 .collect::<Vec<_>>()
535 }
536
537 pub fn deserialize_project_transaction(
538 &self,
539 message: proto::ProjectTransaction,
540 push_to_history: bool,
541 cx: &mut Context<BufferStore>,
542 ) -> Task<Result<ProjectTransaction>> {
543 cx.spawn(|this, mut cx| async move {
544 let mut project_transaction = ProjectTransaction::default();
545 for (buffer_id, transaction) in message.buffer_ids.into_iter().zip(message.transactions)
546 {
547 let buffer_id = BufferId::new(buffer_id)?;
548 let buffer = this
549 .update(&mut cx, |this, cx| {
550 this.wait_for_remote_buffer(buffer_id, cx)
551 })?
552 .await?;
553 let transaction = language::proto::deserialize_transaction(transaction)?;
554 project_transaction.0.insert(buffer, transaction);
555 }
556
557 for (buffer, transaction) in &project_transaction.0 {
558 buffer
559 .update(&mut cx, |buffer, _| {
560 buffer.wait_for_edits(transaction.edit_ids.iter().copied())
561 })?
562 .await?;
563
564 if push_to_history {
565 buffer.update(&mut cx, |buffer, _| {
566 buffer.push_transaction(transaction.clone(), Instant::now());
567 })?;
568 }
569 }
570
571 Ok(project_transaction)
572 })
573 }
574
575 fn open_buffer(
576 &self,
577 path: Arc<Path>,
578 worktree: Entity<Worktree>,
579 cx: &mut Context<BufferStore>,
580 ) -> Task<Result<Entity<Buffer>>> {
581 let worktree_id = worktree.read(cx).id().to_proto();
582 let project_id = self.project_id;
583 let client = self.upstream_client.clone();
584 let path_string = path.clone().to_string_lossy().to_string();
585 cx.spawn(move |this, mut cx| async move {
586 let response = client
587 .request(proto::OpenBufferByPath {
588 project_id,
589 worktree_id,
590 path: path_string,
591 })
592 .await?;
593 let buffer_id = BufferId::new(response.buffer_id)?;
594
595 let buffer = this
596 .update(&mut cx, {
597 |this, cx| this.wait_for_remote_buffer(buffer_id, cx)
598 })?
599 .await?;
600
601 Ok(buffer)
602 })
603 }
604
605 fn create_buffer(&self, cx: &mut Context<BufferStore>) -> Task<Result<Entity<Buffer>>> {
606 let create = self.upstream_client.request(proto::OpenNewBuffer {
607 project_id: self.project_id,
608 });
609 cx.spawn(|this, mut cx| async move {
610 let response = create.await?;
611 let buffer_id = BufferId::new(response.buffer_id)?;
612
613 this.update(&mut cx, |this, cx| {
614 this.wait_for_remote_buffer(buffer_id, cx)
615 })?
616 .await
617 })
618 }
619
620 fn reload_buffers(
621 &self,
622 buffers: HashSet<Entity<Buffer>>,
623 push_to_history: bool,
624 cx: &mut Context<BufferStore>,
625 ) -> Task<Result<ProjectTransaction>> {
626 let request = self.upstream_client.request(proto::ReloadBuffers {
627 project_id: self.project_id,
628 buffer_ids: buffers
629 .iter()
630 .map(|buffer| buffer.read(cx).remote_id().to_proto())
631 .collect(),
632 });
633
634 cx.spawn(|this, mut cx| async move {
635 let response = request
636 .await?
637 .transaction
638 .ok_or_else(|| anyhow!("missing transaction"))?;
639 this.update(&mut cx, |this, cx| {
640 this.deserialize_project_transaction(response, push_to_history, cx)
641 })?
642 .await
643 })
644 }
645}
646
647impl LocalBufferStore {
648 fn worktree_for_buffer(
649 &self,
650 buffer: &Entity<Buffer>,
651 cx: &App,
652 ) -> Option<(Entity<Worktree>, Arc<Path>)> {
653 let file = buffer.read(cx).file()?;
654 let worktree_id = file.worktree_id(cx);
655 let path = file.path().clone();
656 let worktree = self
657 .worktree_store
658 .read(cx)
659 .worktree_for_id(worktree_id, cx)?;
660 Some((worktree, path))
661 }
662
663 fn load_staged_text(&self, buffer: &Entity<Buffer>, cx: &App) -> Task<Result<Option<String>>> {
664 if let Some((worktree, path)) = self.worktree_for_buffer(buffer, cx) {
665 worktree.read(cx).load_staged_file(path.as_ref(), cx)
666 } else {
667 return Task::ready(Err(anyhow!("no such worktree")));
668 }
669 }
670
671 fn load_committed_text(
672 &self,
673 buffer: &Entity<Buffer>,
674 cx: &App,
675 ) -> Task<Result<Option<String>>> {
676 if let Some((worktree, path)) = self.worktree_for_buffer(buffer, cx) {
677 worktree.read(cx).load_committed_file(path.as_ref(), cx)
678 } else {
679 Task::ready(Err(anyhow!("no such worktree")))
680 }
681 }
682
683 fn save_local_buffer(
684 &self,
685 buffer_handle: Entity<Buffer>,
686 worktree: Entity<Worktree>,
687 path: Arc<Path>,
688 mut has_changed_file: bool,
689 cx: &mut Context<BufferStore>,
690 ) -> Task<Result<()>> {
691 let buffer = buffer_handle.read(cx);
692
693 let text = buffer.as_rope().clone();
694 let line_ending = buffer.line_ending();
695 let version = buffer.version();
696 let buffer_id = buffer.remote_id();
697 if buffer
698 .file()
699 .is_some_and(|file| file.disk_state() == DiskState::New)
700 {
701 has_changed_file = true;
702 }
703
704 let save = worktree.update(cx, |worktree, cx| {
705 worktree.write_file(path.as_ref(), text, line_ending, cx)
706 });
707
708 cx.spawn(move |this, mut cx| async move {
709 let new_file = save.await?;
710 let mtime = new_file.disk_state().mtime();
711 this.update(&mut cx, |this, cx| {
712 if let Some((downstream_client, project_id)) = this.downstream_client.clone() {
713 if has_changed_file {
714 downstream_client
715 .send(proto::UpdateBufferFile {
716 project_id,
717 buffer_id: buffer_id.to_proto(),
718 file: Some(language::File::to_proto(&*new_file, cx)),
719 })
720 .log_err();
721 }
722 downstream_client
723 .send(proto::BufferSaved {
724 project_id,
725 buffer_id: buffer_id.to_proto(),
726 version: serialize_version(&version),
727 mtime: mtime.map(|time| time.into()),
728 })
729 .log_err();
730 }
731 })?;
732 buffer_handle.update(&mut cx, |buffer, cx| {
733 if has_changed_file {
734 buffer.file_updated(new_file, cx);
735 }
736 buffer.did_save(version.clone(), mtime, cx);
737 })
738 })
739 }
740
741 fn subscribe_to_worktree(
742 &mut self,
743 worktree: &Entity<Worktree>,
744 cx: &mut Context<BufferStore>,
745 ) {
746 cx.subscribe(worktree, |this, worktree, event, cx| {
747 if worktree.read(cx).is_local() {
748 match event {
749 worktree::Event::UpdatedEntries(changes) => {
750 Self::local_worktree_entries_changed(this, &worktree, changes, cx);
751 }
752 worktree::Event::UpdatedGitRepositories(updated_repos) => {
753 Self::local_worktree_git_repos_changed(
754 this,
755 worktree.clone(),
756 updated_repos,
757 cx,
758 )
759 }
760 _ => {}
761 }
762 }
763 })
764 .detach();
765 }
766
767 fn local_worktree_entries_changed(
768 this: &mut BufferStore,
769 worktree_handle: &Entity<Worktree>,
770 changes: &[(Arc<Path>, ProjectEntryId, PathChange)],
771 cx: &mut Context<BufferStore>,
772 ) {
773 let snapshot = worktree_handle.read(cx).snapshot();
774 for (path, entry_id, _) in changes {
775 Self::local_worktree_entry_changed(
776 this,
777 *entry_id,
778 path,
779 worktree_handle,
780 &snapshot,
781 cx,
782 );
783 }
784 }
785
786 fn local_worktree_git_repos_changed(
787 this: &mut BufferStore,
788 worktree_handle: Entity<Worktree>,
789 changed_repos: &UpdatedGitRepositoriesSet,
790 cx: &mut Context<BufferStore>,
791 ) {
792 debug_assert!(worktree_handle.read(cx).is_local());
793
794 let mut diff_state_updates = Vec::new();
795 for buffer in this.opened_buffers.values() {
796 let OpenBuffer::Complete { buffer, diff_state } = buffer else {
797 continue;
798 };
799 let Some(buffer) = buffer.upgrade() else {
800 continue;
801 };
802 let buffer = buffer.read(cx);
803 let Some(file) = File::from_dyn(buffer.file()) else {
804 continue;
805 };
806 if file.worktree != worktree_handle {
807 continue;
808 }
809 let diff_state = diff_state.read(cx);
810 if changed_repos
811 .iter()
812 .any(|(work_dir, _)| file.path.starts_with(work_dir))
813 {
814 let snapshot = buffer.text_snapshot();
815 diff_state_updates.push((
816 snapshot.clone(),
817 file.path.clone(),
818 diff_state
819 .unstaged_diff
820 .as_ref()
821 .and_then(|set| set.upgrade())
822 .is_some(),
823 diff_state
824 .uncommitted_diff
825 .as_ref()
826 .and_then(|set| set.upgrade())
827 .is_some(),
828 ))
829 }
830 }
831
832 if diff_state_updates.is_empty() {
833 return;
834 }
835
836 cx.spawn(move |this, mut cx| async move {
837 let snapshot =
838 worktree_handle.update(&mut cx, |tree, _| tree.as_local().unwrap().snapshot())?;
839 let diff_bases_changes_by_buffer = cx
840 .background_executor()
841 .spawn(async move {
842 diff_state_updates
843 .into_iter()
844 .filter_map(
845 |(buffer_snapshot, path, needs_staged_text, needs_committed_text)| {
846 let local_repo = snapshot.local_repo_for_path(&path)?;
847 let relative_path = local_repo.relativize(&path).ok()?;
848 let staged_text = if needs_staged_text {
849 local_repo.repo().load_index_text(&relative_path)
850 } else {
851 None
852 };
853 let committed_text = if needs_committed_text {
854 local_repo.repo().load_committed_text(&relative_path)
855 } else {
856 None
857 };
858 let diff_bases_change =
859 match (needs_staged_text, needs_committed_text) {
860 (true, true) => Some(if staged_text == committed_text {
861 DiffBasesChange::SetBoth(committed_text)
862 } else {
863 DiffBasesChange::SetEach {
864 index: staged_text,
865 head: committed_text,
866 }
867 }),
868 (true, false) => {
869 Some(DiffBasesChange::SetIndex(staged_text))
870 }
871 (false, true) => {
872 Some(DiffBasesChange::SetHead(committed_text))
873 }
874 (false, false) => None,
875 };
876 Some((buffer_snapshot, diff_bases_change))
877 },
878 )
879 .collect::<Vec<_>>()
880 })
881 .await;
882
883 this.update(&mut cx, |this, cx| {
884 for (buffer_snapshot, diff_bases_change) in diff_bases_changes_by_buffer {
885 let Some(OpenBuffer::Complete { diff_state, .. }) =
886 this.opened_buffers.get_mut(&buffer_snapshot.remote_id())
887 else {
888 continue;
889 };
890 let Some(diff_bases_change) = diff_bases_change else {
891 continue;
892 };
893
894 diff_state.update(cx, |diff_state, cx| {
895 use proto::update_diff_bases::Mode;
896
897 if let Some((client, project_id)) = this.downstream_client.as_ref() {
898 let buffer_id = buffer_snapshot.remote_id().to_proto();
899 let (staged_text, committed_text, mode) = match diff_bases_change
900 .clone()
901 {
902 DiffBasesChange::SetIndex(index) => (index, None, Mode::IndexOnly),
903 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
904 DiffBasesChange::SetEach { index, head } => {
905 (index, head, Mode::IndexAndHead)
906 }
907 DiffBasesChange::SetBoth(text) => {
908 (None, text, Mode::IndexMatchesHead)
909 }
910 };
911 let message = proto::UpdateDiffBases {
912 project_id: *project_id,
913 buffer_id,
914 staged_text,
915 committed_text,
916 mode: mode as i32,
917 };
918
919 client.send(message).log_err();
920 }
921
922 let _ =
923 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
924 });
925 }
926 })
927 })
928 .detach_and_log_err(cx);
929 }
930
931 fn local_worktree_entry_changed(
932 this: &mut BufferStore,
933 entry_id: ProjectEntryId,
934 path: &Arc<Path>,
935 worktree: &Entity<worktree::Worktree>,
936 snapshot: &worktree::Snapshot,
937 cx: &mut Context<BufferStore>,
938 ) -> Option<()> {
939 let project_path = ProjectPath {
940 worktree_id: snapshot.id(),
941 path: path.clone(),
942 };
943
944 let buffer_id = {
945 let local = this.as_local_mut()?;
946 match local.local_buffer_ids_by_entry_id.get(&entry_id) {
947 Some(&buffer_id) => buffer_id,
948 None => local.local_buffer_ids_by_path.get(&project_path).copied()?,
949 }
950 };
951
952 let buffer = if let Some(buffer) = this.get(buffer_id) {
953 Some(buffer)
954 } else {
955 this.opened_buffers.remove(&buffer_id);
956 None
957 };
958
959 let buffer = if let Some(buffer) = buffer {
960 buffer
961 } else {
962 let this = this.as_local_mut()?;
963 this.local_buffer_ids_by_path.remove(&project_path);
964 this.local_buffer_ids_by_entry_id.remove(&entry_id);
965 return None;
966 };
967
968 let events = buffer.update(cx, |buffer, cx| {
969 let local = this.as_local_mut()?;
970 let file = buffer.file()?;
971 let old_file = File::from_dyn(Some(file))?;
972 if old_file.worktree != *worktree {
973 return None;
974 }
975
976 let snapshot_entry = old_file
977 .entry_id
978 .and_then(|entry_id| snapshot.entry_for_id(entry_id))
979 .or_else(|| snapshot.entry_for_path(old_file.path.as_ref()));
980
981 let new_file = if let Some(entry) = snapshot_entry {
982 File {
983 disk_state: match entry.mtime {
984 Some(mtime) => DiskState::Present { mtime },
985 None => old_file.disk_state,
986 },
987 is_local: true,
988 entry_id: Some(entry.id),
989 path: entry.path.clone(),
990 worktree: worktree.clone(),
991 is_private: entry.is_private,
992 }
993 } else {
994 File {
995 disk_state: DiskState::Deleted,
996 is_local: true,
997 entry_id: old_file.entry_id,
998 path: old_file.path.clone(),
999 worktree: worktree.clone(),
1000 is_private: old_file.is_private,
1001 }
1002 };
1003
1004 if new_file == *old_file {
1005 return None;
1006 }
1007
1008 let mut events = Vec::new();
1009 if new_file.path != old_file.path {
1010 local.local_buffer_ids_by_path.remove(&ProjectPath {
1011 path: old_file.path.clone(),
1012 worktree_id: old_file.worktree_id(cx),
1013 });
1014 local.local_buffer_ids_by_path.insert(
1015 ProjectPath {
1016 worktree_id: new_file.worktree_id(cx),
1017 path: new_file.path.clone(),
1018 },
1019 buffer_id,
1020 );
1021 events.push(BufferStoreEvent::BufferChangedFilePath {
1022 buffer: cx.entity(),
1023 old_file: buffer.file().cloned(),
1024 });
1025 }
1026
1027 if new_file.entry_id != old_file.entry_id {
1028 if let Some(entry_id) = old_file.entry_id {
1029 local.local_buffer_ids_by_entry_id.remove(&entry_id);
1030 }
1031 if let Some(entry_id) = new_file.entry_id {
1032 local
1033 .local_buffer_ids_by_entry_id
1034 .insert(entry_id, buffer_id);
1035 }
1036 }
1037
1038 if let Some((client, project_id)) = &this.downstream_client {
1039 client
1040 .send(proto::UpdateBufferFile {
1041 project_id: *project_id,
1042 buffer_id: buffer_id.to_proto(),
1043 file: Some(new_file.to_proto(cx)),
1044 })
1045 .ok();
1046 }
1047
1048 buffer.file_updated(Arc::new(new_file), cx);
1049 Some(events)
1050 })?;
1051
1052 for event in events {
1053 cx.emit(event);
1054 }
1055
1056 None
1057 }
1058
1059 fn buffer_changed_file(&mut self, buffer: Entity<Buffer>, cx: &mut App) -> Option<()> {
1060 let file = File::from_dyn(buffer.read(cx).file())?;
1061
1062 let remote_id = buffer.read(cx).remote_id();
1063 if let Some(entry_id) = file.entry_id {
1064 match self.local_buffer_ids_by_entry_id.get(&entry_id) {
1065 Some(_) => {
1066 return None;
1067 }
1068 None => {
1069 self.local_buffer_ids_by_entry_id
1070 .insert(entry_id, remote_id);
1071 }
1072 }
1073 };
1074 self.local_buffer_ids_by_path.insert(
1075 ProjectPath {
1076 worktree_id: file.worktree_id(cx),
1077 path: file.path.clone(),
1078 },
1079 remote_id,
1080 );
1081
1082 Some(())
1083 }
1084
1085 fn save_buffer(
1086 &self,
1087 buffer: Entity<Buffer>,
1088 cx: &mut Context<BufferStore>,
1089 ) -> Task<Result<()>> {
1090 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1091 return Task::ready(Err(anyhow!("buffer doesn't have a file")));
1092 };
1093 let worktree = file.worktree.clone();
1094 self.save_local_buffer(buffer, worktree, file.path.clone(), false, cx)
1095 }
1096
1097 fn save_buffer_as(
1098 &self,
1099 buffer: Entity<Buffer>,
1100 path: ProjectPath,
1101 cx: &mut Context<BufferStore>,
1102 ) -> Task<Result<()>> {
1103 let Some(worktree) = self
1104 .worktree_store
1105 .read(cx)
1106 .worktree_for_id(path.worktree_id, cx)
1107 else {
1108 return Task::ready(Err(anyhow!("no such worktree")));
1109 };
1110 self.save_local_buffer(buffer, worktree, path.path.clone(), true, cx)
1111 }
1112
1113 fn open_buffer(
1114 &self,
1115 path: Arc<Path>,
1116 worktree: Entity<Worktree>,
1117 cx: &mut Context<BufferStore>,
1118 ) -> Task<Result<Entity<Buffer>>> {
1119 let load_buffer = worktree.update(cx, |worktree, cx| {
1120 let load_file = worktree.load_file(path.as_ref(), cx);
1121 let reservation = cx.reserve_entity();
1122 let buffer_id = BufferId::from(reservation.entity_id().as_non_zero_u64());
1123 cx.spawn(move |_, mut cx| async move {
1124 let loaded = load_file.await?;
1125 let text_buffer = cx
1126 .background_executor()
1127 .spawn(async move { text::Buffer::new(0, buffer_id, loaded.text) })
1128 .await;
1129 cx.insert_entity(reservation, |_| {
1130 Buffer::build(text_buffer, Some(loaded.file), Capability::ReadWrite)
1131 })
1132 })
1133 });
1134
1135 cx.spawn(move |this, mut cx| async move {
1136 let buffer = match load_buffer.await {
1137 Ok(buffer) => Ok(buffer),
1138 Err(error) if is_not_found_error(&error) => cx.new(|cx| {
1139 let buffer_id = BufferId::from(cx.entity_id().as_non_zero_u64());
1140 let text_buffer = text::Buffer::new(0, buffer_id, "".into());
1141 Buffer::build(
1142 text_buffer,
1143 Some(Arc::new(File {
1144 worktree,
1145 path,
1146 disk_state: DiskState::New,
1147 entry_id: None,
1148 is_local: true,
1149 is_private: false,
1150 })),
1151 Capability::ReadWrite,
1152 )
1153 }),
1154 Err(e) => Err(e),
1155 }?;
1156 this.update(&mut cx, |this, cx| {
1157 this.add_buffer(buffer.clone(), cx)?;
1158 let buffer_id = buffer.read(cx).remote_id();
1159 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
1160 let this = this.as_local_mut().unwrap();
1161 this.local_buffer_ids_by_path.insert(
1162 ProjectPath {
1163 worktree_id: file.worktree_id(cx),
1164 path: file.path.clone(),
1165 },
1166 buffer_id,
1167 );
1168
1169 if let Some(entry_id) = file.entry_id {
1170 this.local_buffer_ids_by_entry_id
1171 .insert(entry_id, buffer_id);
1172 }
1173 }
1174
1175 anyhow::Ok(())
1176 })??;
1177
1178 Ok(buffer)
1179 })
1180 }
1181
1182 fn create_buffer(&self, cx: &mut Context<BufferStore>) -> Task<Result<Entity<Buffer>>> {
1183 cx.spawn(|buffer_store, mut cx| async move {
1184 let buffer =
1185 cx.new(|cx| Buffer::local("", cx).with_language(language::PLAIN_TEXT.clone(), cx))?;
1186 buffer_store.update(&mut cx, |buffer_store, cx| {
1187 buffer_store.add_buffer(buffer.clone(), cx).log_err();
1188 })?;
1189 Ok(buffer)
1190 })
1191 }
1192
1193 fn reload_buffers(
1194 &self,
1195 buffers: HashSet<Entity<Buffer>>,
1196 push_to_history: bool,
1197 cx: &mut Context<BufferStore>,
1198 ) -> Task<Result<ProjectTransaction>> {
1199 cx.spawn(move |_, mut cx| async move {
1200 let mut project_transaction = ProjectTransaction::default();
1201 for buffer in buffers {
1202 let transaction = buffer
1203 .update(&mut cx, |buffer, cx| buffer.reload(cx))?
1204 .await?;
1205 buffer.update(&mut cx, |buffer, cx| {
1206 if let Some(transaction) = transaction {
1207 if !push_to_history {
1208 buffer.forget_transaction(transaction.id);
1209 }
1210 project_transaction.0.insert(cx.entity(), transaction);
1211 }
1212 })?;
1213 }
1214
1215 Ok(project_transaction)
1216 })
1217 }
1218}
1219
1220impl BufferStore {
1221 pub fn init(client: &AnyProtoClient) {
1222 client.add_entity_message_handler(Self::handle_buffer_reloaded);
1223 client.add_entity_message_handler(Self::handle_buffer_saved);
1224 client.add_entity_message_handler(Self::handle_update_buffer_file);
1225 client.add_entity_request_handler(Self::handle_save_buffer);
1226 client.add_entity_request_handler(Self::handle_blame_buffer);
1227 client.add_entity_request_handler(Self::handle_reload_buffers);
1228 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
1229 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
1230 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
1231 client.add_entity_message_handler(Self::handle_update_diff_bases);
1232 }
1233
1234 /// Creates a buffer store, optionally retaining its buffers.
1235 pub fn local(worktree_store: Entity<WorktreeStore>, cx: &mut Context<Self>) -> Self {
1236 Self {
1237 state: BufferStoreState::Local(LocalBufferStore {
1238 local_buffer_ids_by_path: Default::default(),
1239 local_buffer_ids_by_entry_id: Default::default(),
1240 worktree_store: worktree_store.clone(),
1241 _subscription: cx.subscribe(&worktree_store, |this, _, event, cx| {
1242 if let WorktreeStoreEvent::WorktreeAdded(worktree) = event {
1243 let this = this.as_local_mut().unwrap();
1244 this.subscribe_to_worktree(worktree, cx);
1245 }
1246 }),
1247 }),
1248 downstream_client: None,
1249 opened_buffers: Default::default(),
1250 shared_buffers: Default::default(),
1251 loading_buffers: Default::default(),
1252 loading_diffs: Default::default(),
1253 worktree_store,
1254 }
1255 }
1256
1257 pub fn remote(
1258 worktree_store: Entity<WorktreeStore>,
1259 upstream_client: AnyProtoClient,
1260 remote_id: u64,
1261 _cx: &mut Context<Self>,
1262 ) -> Self {
1263 Self {
1264 state: BufferStoreState::Remote(RemoteBufferStore {
1265 shared_with_me: Default::default(),
1266 loading_remote_buffers_by_id: Default::default(),
1267 remote_buffer_listeners: Default::default(),
1268 project_id: remote_id,
1269 upstream_client,
1270 worktree_store: worktree_store.clone(),
1271 }),
1272 downstream_client: None,
1273 opened_buffers: Default::default(),
1274 loading_buffers: Default::default(),
1275 loading_diffs: Default::default(),
1276 shared_buffers: Default::default(),
1277 worktree_store,
1278 }
1279 }
1280
1281 fn as_local_mut(&mut self) -> Option<&mut LocalBufferStore> {
1282 match &mut self.state {
1283 BufferStoreState::Local(state) => Some(state),
1284 _ => None,
1285 }
1286 }
1287
1288 fn as_remote_mut(&mut self) -> Option<&mut RemoteBufferStore> {
1289 match &mut self.state {
1290 BufferStoreState::Remote(state) => Some(state),
1291 _ => None,
1292 }
1293 }
1294
1295 fn as_remote(&self) -> Option<&RemoteBufferStore> {
1296 match &self.state {
1297 BufferStoreState::Remote(state) => Some(state),
1298 _ => None,
1299 }
1300 }
1301
1302 pub fn open_buffer(
1303 &mut self,
1304 project_path: ProjectPath,
1305 cx: &mut Context<Self>,
1306 ) -> Task<Result<Entity<Buffer>>> {
1307 if let Some(buffer) = self.get_by_path(&project_path, cx) {
1308 return Task::ready(Ok(buffer));
1309 }
1310
1311 let task = match self.loading_buffers.entry(project_path.clone()) {
1312 hash_map::Entry::Occupied(e) => e.get().clone(),
1313 hash_map::Entry::Vacant(entry) => {
1314 let path = project_path.path.clone();
1315 let Some(worktree) = self
1316 .worktree_store
1317 .read(cx)
1318 .worktree_for_id(project_path.worktree_id, cx)
1319 else {
1320 return Task::ready(Err(anyhow!("no such worktree")));
1321 };
1322 let load_buffer = match &self.state {
1323 BufferStoreState::Local(this) => this.open_buffer(path, worktree, cx),
1324 BufferStoreState::Remote(this) => this.open_buffer(path, worktree, cx),
1325 };
1326
1327 entry
1328 .insert(
1329 cx.spawn(move |this, mut cx| async move {
1330 let load_result = load_buffer.await;
1331 this.update(&mut cx, |this, _cx| {
1332 // Record the fact that the buffer is no longer loading.
1333 this.loading_buffers.remove(&project_path);
1334 })
1335 .ok();
1336 load_result.map_err(Arc::new)
1337 })
1338 .shared(),
1339 )
1340 .clone()
1341 }
1342 };
1343
1344 cx.background_executor()
1345 .spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
1346 }
1347
1348 pub fn open_unstaged_diff(
1349 &mut self,
1350 buffer: Entity<Buffer>,
1351 cx: &mut Context<Self>,
1352 ) -> Task<Result<Entity<BufferDiff>>> {
1353 let buffer_id = buffer.read(cx).remote_id();
1354 if let Some(diff) = self.get_unstaged_diff(buffer_id, cx) {
1355 return Task::ready(Ok(diff));
1356 }
1357
1358 let task = match self.loading_diffs.entry((buffer_id, DiffKind::Unstaged)) {
1359 hash_map::Entry::Occupied(e) => e.get().clone(),
1360 hash_map::Entry::Vacant(entry) => {
1361 let staged_text = match &self.state {
1362 BufferStoreState::Local(this) => this.load_staged_text(&buffer, cx),
1363 BufferStoreState::Remote(this) => this.open_unstaged_diff(buffer_id, cx),
1364 };
1365
1366 entry
1367 .insert(
1368 cx.spawn(move |this, cx| async move {
1369 Self::open_diff_internal(
1370 this,
1371 DiffKind::Unstaged,
1372 staged_text.await.map(DiffBasesChange::SetIndex),
1373 buffer,
1374 cx,
1375 )
1376 .await
1377 .map_err(Arc::new)
1378 })
1379 .shared(),
1380 )
1381 .clone()
1382 }
1383 };
1384
1385 cx.background_executor()
1386 .spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
1387 }
1388
1389 pub fn open_uncommitted_diff(
1390 &mut self,
1391 buffer: Entity<Buffer>,
1392 cx: &mut Context<Self>,
1393 ) -> Task<Result<Entity<BufferDiff>>> {
1394 let buffer_id = buffer.read(cx).remote_id();
1395 if let Some(diff) = self.get_uncommitted_diff(buffer_id, cx) {
1396 return Task::ready(Ok(diff));
1397 }
1398
1399 let task = match self.loading_diffs.entry((buffer_id, DiffKind::Uncommitted)) {
1400 hash_map::Entry::Occupied(e) => e.get().clone(),
1401 hash_map::Entry::Vacant(entry) => {
1402 let changes = match &self.state {
1403 BufferStoreState::Local(this) => {
1404 let committed_text = this.load_committed_text(&buffer, cx);
1405 let staged_text = this.load_staged_text(&buffer, cx);
1406 cx.background_executor().spawn(async move {
1407 let committed_text = committed_text.await?;
1408 let staged_text = staged_text.await?;
1409 let diff_bases_change = if committed_text == staged_text {
1410 DiffBasesChange::SetBoth(committed_text)
1411 } else {
1412 DiffBasesChange::SetEach {
1413 index: staged_text,
1414 head: committed_text,
1415 }
1416 };
1417 Ok(diff_bases_change)
1418 })
1419 }
1420 BufferStoreState::Remote(this) => this.open_uncommitted_diff(buffer_id, cx),
1421 };
1422
1423 entry
1424 .insert(
1425 cx.spawn(move |this, cx| async move {
1426 Self::open_diff_internal(
1427 this,
1428 DiffKind::Uncommitted,
1429 changes.await,
1430 buffer,
1431 cx,
1432 )
1433 .await
1434 .map_err(Arc::new)
1435 })
1436 .shared(),
1437 )
1438 .clone()
1439 }
1440 };
1441
1442 cx.background_executor()
1443 .spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
1444 }
1445
1446 async fn open_diff_internal(
1447 this: WeakEntity<Self>,
1448 kind: DiffKind,
1449 texts: Result<DiffBasesChange>,
1450 buffer: Entity<Buffer>,
1451 mut cx: AsyncApp,
1452 ) -> Result<Entity<BufferDiff>> {
1453 let diff_bases_change = match texts {
1454 Err(e) => {
1455 this.update(&mut cx, |this, cx| {
1456 let buffer_id = buffer.read(cx).remote_id();
1457 this.loading_diffs.remove(&(buffer_id, kind));
1458 })?;
1459 return Err(e);
1460 }
1461 Ok(change) => change,
1462 };
1463
1464 this.update(&mut cx, |this, cx| {
1465 let buffer_id = buffer.read(cx).remote_id();
1466 this.loading_diffs.remove(&(buffer_id, kind));
1467
1468 if let Some(OpenBuffer::Complete { diff_state, .. }) =
1469 this.opened_buffers.get_mut(&buffer.read(cx).remote_id())
1470 {
1471 diff_state.update(cx, |diff_state, cx| {
1472 let buffer_id = buffer.read(cx).remote_id();
1473 diff_state.buffer_subscription.get_or_insert_with(|| {
1474 cx.subscribe(&buffer, |this, buffer, event, cx| match event {
1475 BufferEvent::LanguageChanged => {
1476 this.buffer_language_changed(buffer, cx)
1477 }
1478 _ => {}
1479 })
1480 });
1481
1482 let diff = cx.new(|cx| BufferDiff {
1483 buffer_id,
1484 snapshot: BufferDiffSnapshot::new(&buffer.read(cx).text_snapshot()),
1485 unstaged_diff: None,
1486 });
1487 match kind {
1488 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
1489 DiffKind::Uncommitted => {
1490 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
1491 diff
1492 } else {
1493 let unstaged_diff = cx.new(|cx| BufferDiff {
1494 buffer_id,
1495 snapshot: BufferDiffSnapshot::new(
1496 &buffer.read(cx).text_snapshot(),
1497 ),
1498 unstaged_diff: None,
1499 });
1500 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
1501 unstaged_diff
1502 };
1503
1504 diff.update(cx, |diff, _| {
1505 diff.unstaged_diff = Some(unstaged_diff);
1506 });
1507 diff_state.uncommitted_diff = Some(diff.downgrade())
1508 }
1509 };
1510
1511 let buffer = buffer.read(cx).text_snapshot();
1512 let rx = diff_state.diff_bases_changed(buffer, diff_bases_change, cx);
1513
1514 Ok(async move {
1515 rx.await.ok();
1516 Ok(diff)
1517 })
1518 })
1519 } else {
1520 Err(anyhow!("buffer was closed"))
1521 }
1522 })??
1523 .await
1524 }
1525
1526 pub fn create_buffer(&mut self, cx: &mut Context<Self>) -> Task<Result<Entity<Buffer>>> {
1527 match &self.state {
1528 BufferStoreState::Local(this) => this.create_buffer(cx),
1529 BufferStoreState::Remote(this) => this.create_buffer(cx),
1530 }
1531 }
1532
1533 pub fn save_buffer(
1534 &mut self,
1535 buffer: Entity<Buffer>,
1536 cx: &mut Context<Self>,
1537 ) -> Task<Result<()>> {
1538 match &mut self.state {
1539 BufferStoreState::Local(this) => this.save_buffer(buffer, cx),
1540 BufferStoreState::Remote(this) => this.save_remote_buffer(buffer.clone(), None, cx),
1541 }
1542 }
1543
1544 pub fn save_buffer_as(
1545 &mut self,
1546 buffer: Entity<Buffer>,
1547 path: ProjectPath,
1548 cx: &mut Context<Self>,
1549 ) -> Task<Result<()>> {
1550 let old_file = buffer.read(cx).file().cloned();
1551 let task = match &self.state {
1552 BufferStoreState::Local(this) => this.save_buffer_as(buffer.clone(), path, cx),
1553 BufferStoreState::Remote(this) => {
1554 this.save_remote_buffer(buffer.clone(), Some(path.to_proto()), cx)
1555 }
1556 };
1557 cx.spawn(|this, mut cx| async move {
1558 task.await?;
1559 this.update(&mut cx, |_, cx| {
1560 cx.emit(BufferStoreEvent::BufferChangedFilePath { buffer, old_file });
1561 })
1562 })
1563 }
1564
1565 pub fn blame_buffer(
1566 &self,
1567 buffer: &Entity<Buffer>,
1568 version: Option<clock::Global>,
1569 cx: &App,
1570 ) -> Task<Result<Option<Blame>>> {
1571 let buffer = buffer.read(cx);
1572 let Some(file) = File::from_dyn(buffer.file()) else {
1573 return Task::ready(Err(anyhow!("buffer has no file")));
1574 };
1575
1576 match file.worktree.clone().read(cx) {
1577 Worktree::Local(worktree) => {
1578 let worktree = worktree.snapshot();
1579 let blame_params = maybe!({
1580 let local_repo = match worktree.local_repo_for_path(&file.path) {
1581 Some(repo_for_path) => repo_for_path,
1582 None => return Ok(None),
1583 };
1584
1585 let relative_path = local_repo
1586 .relativize(&file.path)
1587 .context("failed to relativize buffer path")?;
1588
1589 let repo = local_repo.repo().clone();
1590
1591 let content = match version {
1592 Some(version) => buffer.rope_for_version(&version).clone(),
1593 None => buffer.as_rope().clone(),
1594 };
1595
1596 anyhow::Ok(Some((repo, relative_path, content)))
1597 });
1598
1599 cx.background_executor().spawn(async move {
1600 let Some((repo, relative_path, content)) = blame_params? else {
1601 return Ok(None);
1602 };
1603 repo.blame(&relative_path, content)
1604 .with_context(|| format!("Failed to blame {:?}", relative_path.0))
1605 .map(Some)
1606 })
1607 }
1608 Worktree::Remote(worktree) => {
1609 let buffer_id = buffer.remote_id();
1610 let version = buffer.version();
1611 let project_id = worktree.project_id();
1612 let client = worktree.client();
1613 cx.spawn(|_| async move {
1614 let response = client
1615 .request(proto::BlameBuffer {
1616 project_id,
1617 buffer_id: buffer_id.into(),
1618 version: serialize_version(&version),
1619 })
1620 .await?;
1621 Ok(deserialize_blame_buffer_response(response))
1622 })
1623 }
1624 }
1625 }
1626
1627 pub fn get_permalink_to_line(
1628 &self,
1629 buffer: &Entity<Buffer>,
1630 selection: Range<u32>,
1631 cx: &App,
1632 ) -> Task<Result<url::Url>> {
1633 let buffer = buffer.read(cx);
1634 let Some(file) = File::from_dyn(buffer.file()) else {
1635 return Task::ready(Err(anyhow!("buffer has no file")));
1636 };
1637
1638 match file.worktree.read(cx) {
1639 Worktree::Local(worktree) => {
1640 let worktree_path = worktree.abs_path().clone();
1641 let Some((repo_entry, repo)) =
1642 worktree.repository_for_path(file.path()).and_then(|entry| {
1643 let repo = worktree.get_local_repo(&entry)?.repo().clone();
1644 Some((entry, repo))
1645 })
1646 else {
1647 // If we're not in a Git repo, check whether this is a Rust source
1648 // file in the Cargo registry (presumably opened with go-to-definition
1649 // from a normal Rust file). If so, we can put together a permalink
1650 // using crate metadata.
1651 if !buffer
1652 .language()
1653 .is_some_and(|lang| lang.name() == "Rust".into())
1654 {
1655 return Task::ready(Err(anyhow!("no permalink available")));
1656 }
1657 let file_path = worktree_path.join(file.path());
1658 return cx.spawn(|cx| async move {
1659 let provider_registry =
1660 cx.update(GitHostingProviderRegistry::default_global)?;
1661 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1662 .map_err(|_| anyhow!("no permalink available"))
1663 });
1664 };
1665
1666 let path = match repo_entry.relativize(file.path()) {
1667 Ok(RepoPath(path)) => path,
1668 Err(e) => return Task::ready(Err(e)),
1669 };
1670
1671 cx.spawn(|cx| async move {
1672 const REMOTE_NAME: &str = "origin";
1673 let origin_url = repo
1674 .remote_url(REMOTE_NAME)
1675 .ok_or_else(|| anyhow!("remote \"{REMOTE_NAME}\" not found"))?;
1676
1677 let sha = repo
1678 .head_sha()
1679 .ok_or_else(|| anyhow!("failed to read HEAD SHA"))?;
1680
1681 let provider_registry =
1682 cx.update(GitHostingProviderRegistry::default_global)?;
1683
1684 let (provider, remote) =
1685 parse_git_remote_url(provider_registry, &origin_url)
1686 .ok_or_else(|| anyhow!("failed to parse Git remote URL"))?;
1687
1688 let path = path
1689 .to_str()
1690 .ok_or_else(|| anyhow!("failed to convert path to string"))?;
1691
1692 Ok(provider.build_permalink(
1693 remote,
1694 BuildPermalinkParams {
1695 sha: &sha,
1696 path,
1697 selection: Some(selection),
1698 },
1699 ))
1700 })
1701 }
1702 Worktree::Remote(worktree) => {
1703 let buffer_id = buffer.remote_id();
1704 let project_id = worktree.project_id();
1705 let client = worktree.client();
1706 cx.spawn(|_| async move {
1707 let response = client
1708 .request(proto::GetPermalinkToLine {
1709 project_id,
1710 buffer_id: buffer_id.into(),
1711 selection: Some(proto::Range {
1712 start: selection.start as u64,
1713 end: selection.end as u64,
1714 }),
1715 })
1716 .await?;
1717
1718 url::Url::parse(&response.permalink).context("failed to parse permalink")
1719 })
1720 }
1721 }
1722 }
1723
1724 fn add_buffer(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) -> Result<()> {
1725 let remote_id = buffer.read(cx).remote_id();
1726 let is_remote = buffer.read(cx).replica_id() != 0;
1727 let open_buffer = OpenBuffer::Complete {
1728 buffer: buffer.downgrade(),
1729 diff_state: cx.new(|_| BufferDiffState::default()),
1730 };
1731
1732 let handle = cx.entity().downgrade();
1733 buffer.update(cx, move |_, cx| {
1734 cx.on_release(move |buffer, cx| {
1735 handle
1736 .update(cx, |_, cx| {
1737 cx.emit(BufferStoreEvent::BufferDropped(buffer.remote_id()))
1738 })
1739 .ok();
1740 })
1741 .detach()
1742 });
1743
1744 match self.opened_buffers.entry(remote_id) {
1745 hash_map::Entry::Vacant(entry) => {
1746 entry.insert(open_buffer);
1747 }
1748 hash_map::Entry::Occupied(mut entry) => {
1749 if let OpenBuffer::Operations(operations) = entry.get_mut() {
1750 buffer.update(cx, |b, cx| b.apply_ops(operations.drain(..), cx));
1751 } else if entry.get().upgrade().is_some() {
1752 if is_remote {
1753 return Ok(());
1754 } else {
1755 debug_panic!("buffer {} was already registered", remote_id);
1756 Err(anyhow!("buffer {} was already registered", remote_id))?;
1757 }
1758 }
1759 entry.insert(open_buffer);
1760 }
1761 }
1762
1763 cx.subscribe(&buffer, Self::on_buffer_event).detach();
1764 cx.emit(BufferStoreEvent::BufferAdded(buffer));
1765 Ok(())
1766 }
1767
1768 pub fn buffers(&self) -> impl '_ + Iterator<Item = Entity<Buffer>> {
1769 self.opened_buffers
1770 .values()
1771 .filter_map(|buffer| buffer.upgrade())
1772 }
1773
1774 pub fn loading_buffers(
1775 &self,
1776 ) -> impl Iterator<Item = (&ProjectPath, impl Future<Output = Result<Entity<Buffer>>>)> {
1777 self.loading_buffers.iter().map(|(path, task)| {
1778 let task = task.clone();
1779 (path, async move { task.await.map_err(|e| anyhow!("{e}")) })
1780 })
1781 }
1782
1783 pub fn get_by_path(&self, path: &ProjectPath, cx: &App) -> Option<Entity<Buffer>> {
1784 self.buffers().find_map(|buffer| {
1785 let file = File::from_dyn(buffer.read(cx).file())?;
1786 if file.worktree_id(cx) == path.worktree_id && file.path == path.path {
1787 Some(buffer)
1788 } else {
1789 None
1790 }
1791 })
1792 }
1793
1794 pub fn get(&self, buffer_id: BufferId) -> Option<Entity<Buffer>> {
1795 self.opened_buffers.get(&buffer_id)?.upgrade()
1796 }
1797
1798 pub fn get_existing(&self, buffer_id: BufferId) -> Result<Entity<Buffer>> {
1799 self.get(buffer_id)
1800 .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
1801 }
1802
1803 pub fn get_possibly_incomplete(&self, buffer_id: BufferId) -> Option<Entity<Buffer>> {
1804 self.get(buffer_id).or_else(|| {
1805 self.as_remote()
1806 .and_then(|remote| remote.loading_remote_buffers_by_id.get(&buffer_id).cloned())
1807 })
1808 }
1809
1810 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
1811 if let OpenBuffer::Complete { diff_state, .. } = self.opened_buffers.get(&buffer_id)? {
1812 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
1813 } else {
1814 None
1815 }
1816 }
1817
1818 pub fn get_uncommitted_diff(
1819 &self,
1820 buffer_id: BufferId,
1821 cx: &App,
1822 ) -> Option<Entity<BufferDiff>> {
1823 if let OpenBuffer::Complete { diff_state, .. } = self.opened_buffers.get(&buffer_id)? {
1824 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
1825 } else {
1826 None
1827 }
1828 }
1829
1830 pub fn buffer_version_info(&self, cx: &App) -> (Vec<proto::BufferVersion>, Vec<BufferId>) {
1831 let buffers = self
1832 .buffers()
1833 .map(|buffer| {
1834 let buffer = buffer.read(cx);
1835 proto::BufferVersion {
1836 id: buffer.remote_id().into(),
1837 version: language::proto::serialize_version(&buffer.version),
1838 }
1839 })
1840 .collect();
1841 let incomplete_buffer_ids = self
1842 .as_remote()
1843 .map(|remote| remote.incomplete_buffer_ids())
1844 .unwrap_or_default();
1845 (buffers, incomplete_buffer_ids)
1846 }
1847
1848 pub fn disconnected_from_host(&mut self, cx: &mut App) {
1849 for open_buffer in self.opened_buffers.values_mut() {
1850 if let Some(buffer) = open_buffer.upgrade() {
1851 buffer.update(cx, |buffer, _| buffer.give_up_waiting());
1852 }
1853 }
1854
1855 for buffer in self.buffers() {
1856 buffer.update(cx, |buffer, cx| {
1857 buffer.set_capability(Capability::ReadOnly, cx)
1858 });
1859 }
1860
1861 if let Some(remote) = self.as_remote_mut() {
1862 // Wake up all futures currently waiting on a buffer to get opened,
1863 // to give them a chance to fail now that we've disconnected.
1864 remote.remote_buffer_listeners.clear()
1865 }
1866 }
1867
1868 pub fn shared(&mut self, remote_id: u64, downstream_client: AnyProtoClient, _cx: &mut App) {
1869 self.downstream_client = Some((downstream_client, remote_id));
1870 }
1871
1872 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
1873 self.downstream_client.take();
1874 self.forget_shared_buffers();
1875 }
1876
1877 pub fn discard_incomplete(&mut self) {
1878 self.opened_buffers
1879 .retain(|_, buffer| !matches!(buffer, OpenBuffer::Operations(_)));
1880 }
1881
1882 pub fn find_search_candidates(
1883 &mut self,
1884 query: &SearchQuery,
1885 mut limit: usize,
1886 fs: Arc<dyn Fs>,
1887 cx: &mut Context<Self>,
1888 ) -> Receiver<Entity<Buffer>> {
1889 let (tx, rx) = smol::channel::unbounded();
1890 let mut open_buffers = HashSet::default();
1891 let mut unnamed_buffers = Vec::new();
1892 for handle in self.buffers() {
1893 let buffer = handle.read(cx);
1894 if let Some(entry_id) = buffer.entry_id(cx) {
1895 open_buffers.insert(entry_id);
1896 } else {
1897 limit = limit.saturating_sub(1);
1898 unnamed_buffers.push(handle)
1899 };
1900 }
1901
1902 const MAX_CONCURRENT_BUFFER_OPENS: usize = 64;
1903 let project_paths_rx = self
1904 .worktree_store
1905 .update(cx, |worktree_store, cx| {
1906 worktree_store.find_search_candidates(query.clone(), limit, open_buffers, fs, cx)
1907 })
1908 .chunks(MAX_CONCURRENT_BUFFER_OPENS);
1909
1910 cx.spawn(|this, mut cx| async move {
1911 for buffer in unnamed_buffers {
1912 tx.send(buffer).await.ok();
1913 }
1914
1915 let mut project_paths_rx = pin!(project_paths_rx);
1916 while let Some(project_paths) = project_paths_rx.next().await {
1917 let buffers = this.update(&mut cx, |this, cx| {
1918 project_paths
1919 .into_iter()
1920 .map(|project_path| this.open_buffer(project_path, cx))
1921 .collect::<Vec<_>>()
1922 })?;
1923 for buffer_task in buffers {
1924 if let Some(buffer) = buffer_task.await.log_err() {
1925 if tx.send(buffer).await.is_err() {
1926 return anyhow::Ok(());
1927 }
1928 }
1929 }
1930 }
1931 anyhow::Ok(())
1932 })
1933 .detach();
1934 rx
1935 }
1936
1937 pub fn recalculate_buffer_diffs(
1938 &mut self,
1939 buffers: Vec<Entity<Buffer>>,
1940 cx: &mut Context<Self>,
1941 ) -> impl Future<Output = ()> {
1942 let mut futures = Vec::new();
1943 for buffer in buffers {
1944 if let Some(OpenBuffer::Complete { diff_state, .. }) =
1945 self.opened_buffers.get_mut(&buffer.read(cx).remote_id())
1946 {
1947 let buffer = buffer.read(cx).text_snapshot();
1948 futures.push(diff_state.update(cx, |diff_state, cx| {
1949 diff_state.recalculate_diffs(buffer, cx)
1950 }));
1951 }
1952 }
1953 async move {
1954 futures::future::join_all(futures).await;
1955 }
1956 }
1957
1958 fn on_buffer_event(
1959 &mut self,
1960 buffer: Entity<Buffer>,
1961 event: &BufferEvent,
1962 cx: &mut Context<Self>,
1963 ) {
1964 match event {
1965 BufferEvent::FileHandleChanged => {
1966 if let Some(local) = self.as_local_mut() {
1967 local.buffer_changed_file(buffer, cx);
1968 }
1969 }
1970 BufferEvent::Reloaded => {
1971 let Some((downstream_client, project_id)) = self.downstream_client.as_ref() else {
1972 return;
1973 };
1974 let buffer = buffer.read(cx);
1975 downstream_client
1976 .send(proto::BufferReloaded {
1977 project_id: *project_id,
1978 buffer_id: buffer.remote_id().to_proto(),
1979 version: serialize_version(&buffer.version()),
1980 mtime: buffer.saved_mtime().map(|t| t.into()),
1981 line_ending: serialize_line_ending(buffer.line_ending()) as i32,
1982 })
1983 .log_err();
1984 }
1985 _ => {}
1986 }
1987 }
1988
1989 pub async fn handle_update_buffer(
1990 this: Entity<Self>,
1991 envelope: TypedEnvelope<proto::UpdateBuffer>,
1992 mut cx: AsyncApp,
1993 ) -> Result<proto::Ack> {
1994 let payload = envelope.payload.clone();
1995 let buffer_id = BufferId::new(payload.buffer_id)?;
1996 let ops = payload
1997 .operations
1998 .into_iter()
1999 .map(language::proto::deserialize_operation)
2000 .collect::<Result<Vec<_>, _>>()?;
2001 this.update(&mut cx, |this, cx| {
2002 match this.opened_buffers.entry(buffer_id) {
2003 hash_map::Entry::Occupied(mut e) => match e.get_mut() {
2004 OpenBuffer::Operations(operations) => operations.extend_from_slice(&ops),
2005 OpenBuffer::Complete { buffer, .. } => {
2006 if let Some(buffer) = buffer.upgrade() {
2007 buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx));
2008 }
2009 }
2010 },
2011 hash_map::Entry::Vacant(e) => {
2012 e.insert(OpenBuffer::Operations(ops));
2013 }
2014 }
2015 Ok(proto::Ack {})
2016 })?
2017 }
2018
2019 pub fn register_shared_lsp_handle(
2020 &mut self,
2021 peer_id: proto::PeerId,
2022 buffer_id: BufferId,
2023 handle: OpenLspBufferHandle,
2024 ) {
2025 if let Some(shared_buffers) = self.shared_buffers.get_mut(&peer_id) {
2026 if let Some(buffer) = shared_buffers.get_mut(&buffer_id) {
2027 buffer.lsp_handle = Some(handle);
2028 return;
2029 }
2030 }
2031 debug_panic!("tried to register shared lsp handle, but buffer was not shared")
2032 }
2033
2034 pub fn handle_synchronize_buffers(
2035 &mut self,
2036 envelope: TypedEnvelope<proto::SynchronizeBuffers>,
2037 cx: &mut Context<Self>,
2038 client: Arc<Client>,
2039 ) -> Result<proto::SynchronizeBuffersResponse> {
2040 let project_id = envelope.payload.project_id;
2041 let mut response = proto::SynchronizeBuffersResponse {
2042 buffers: Default::default(),
2043 };
2044 let Some(guest_id) = envelope.original_sender_id else {
2045 anyhow::bail!("missing original_sender_id on SynchronizeBuffers request");
2046 };
2047
2048 self.shared_buffers.entry(guest_id).or_default().clear();
2049 for buffer in envelope.payload.buffers {
2050 let buffer_id = BufferId::new(buffer.id)?;
2051 let remote_version = language::proto::deserialize_version(&buffer.version);
2052 if let Some(buffer) = self.get(buffer_id) {
2053 self.shared_buffers
2054 .entry(guest_id)
2055 .or_default()
2056 .entry(buffer_id)
2057 .or_insert_with(|| SharedBuffer {
2058 buffer: buffer.clone(),
2059 diff: None,
2060 lsp_handle: None,
2061 });
2062
2063 let buffer = buffer.read(cx);
2064 response.buffers.push(proto::BufferVersion {
2065 id: buffer_id.into(),
2066 version: language::proto::serialize_version(&buffer.version),
2067 });
2068
2069 let operations = buffer.serialize_ops(Some(remote_version), cx);
2070 let client = client.clone();
2071 if let Some(file) = buffer.file() {
2072 client
2073 .send(proto::UpdateBufferFile {
2074 project_id,
2075 buffer_id: buffer_id.into(),
2076 file: Some(file.to_proto(cx)),
2077 })
2078 .log_err();
2079 }
2080
2081 // TODO(max): do something
2082 // client
2083 // .send(proto::UpdateStagedText {
2084 // project_id,
2085 // buffer_id: buffer_id.into(),
2086 // diff_base: buffer.diff_base().map(ToString::to_string),
2087 // })
2088 // .log_err();
2089
2090 client
2091 .send(proto::BufferReloaded {
2092 project_id,
2093 buffer_id: buffer_id.into(),
2094 version: language::proto::serialize_version(buffer.saved_version()),
2095 mtime: buffer.saved_mtime().map(|time| time.into()),
2096 line_ending: language::proto::serialize_line_ending(buffer.line_ending())
2097 as i32,
2098 })
2099 .log_err();
2100
2101 cx.background_executor()
2102 .spawn(
2103 async move {
2104 let operations = operations.await;
2105 for chunk in split_operations(operations) {
2106 client
2107 .request(proto::UpdateBuffer {
2108 project_id,
2109 buffer_id: buffer_id.into(),
2110 operations: chunk,
2111 })
2112 .await?;
2113 }
2114 anyhow::Ok(())
2115 }
2116 .log_err(),
2117 )
2118 .detach();
2119 }
2120 }
2121 Ok(response)
2122 }
2123
2124 pub fn handle_create_buffer_for_peer(
2125 &mut self,
2126 envelope: TypedEnvelope<proto::CreateBufferForPeer>,
2127 replica_id: u16,
2128 capability: Capability,
2129 cx: &mut Context<Self>,
2130 ) -> Result<()> {
2131 let Some(remote) = self.as_remote_mut() else {
2132 return Err(anyhow!("buffer store is not a remote"));
2133 };
2134
2135 if let Some(buffer) =
2136 remote.handle_create_buffer_for_peer(envelope, replica_id, capability, cx)?
2137 {
2138 self.add_buffer(buffer, cx)?;
2139 }
2140
2141 Ok(())
2142 }
2143
2144 pub async fn handle_update_buffer_file(
2145 this: Entity<Self>,
2146 envelope: TypedEnvelope<proto::UpdateBufferFile>,
2147 mut cx: AsyncApp,
2148 ) -> Result<()> {
2149 let buffer_id = envelope.payload.buffer_id;
2150 let buffer_id = BufferId::new(buffer_id)?;
2151
2152 this.update(&mut cx, |this, cx| {
2153 let payload = envelope.payload.clone();
2154 if let Some(buffer) = this.get_possibly_incomplete(buffer_id) {
2155 let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
2156 let worktree = this
2157 .worktree_store
2158 .read(cx)
2159 .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
2160 .ok_or_else(|| anyhow!("no such worktree"))?;
2161 let file = File::from_proto(file, worktree, cx)?;
2162 let old_file = buffer.update(cx, |buffer, cx| {
2163 let old_file = buffer.file().cloned();
2164 let new_path = file.path.clone();
2165 buffer.file_updated(Arc::new(file), cx);
2166 if old_file
2167 .as_ref()
2168 .map_or(true, |old| *old.path() != new_path)
2169 {
2170 Some(old_file)
2171 } else {
2172 None
2173 }
2174 });
2175 if let Some(old_file) = old_file {
2176 cx.emit(BufferStoreEvent::BufferChangedFilePath { buffer, old_file });
2177 }
2178 }
2179 if let Some((downstream_client, project_id)) = this.downstream_client.as_ref() {
2180 downstream_client
2181 .send(proto::UpdateBufferFile {
2182 project_id: *project_id,
2183 buffer_id: buffer_id.into(),
2184 file: envelope.payload.file,
2185 })
2186 .log_err();
2187 }
2188 Ok(())
2189 })?
2190 }
2191
2192 pub async fn handle_save_buffer(
2193 this: Entity<Self>,
2194 envelope: TypedEnvelope<proto::SaveBuffer>,
2195 mut cx: AsyncApp,
2196 ) -> Result<proto::BufferSaved> {
2197 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2198 let (buffer, project_id) = this.update(&mut cx, |this, _| {
2199 anyhow::Ok((
2200 this.get_existing(buffer_id)?,
2201 this.downstream_client
2202 .as_ref()
2203 .map(|(_, project_id)| *project_id)
2204 .context("project is not shared")?,
2205 ))
2206 })??;
2207 buffer
2208 .update(&mut cx, |buffer, _| {
2209 buffer.wait_for_version(deserialize_version(&envelope.payload.version))
2210 })?
2211 .await?;
2212 let buffer_id = buffer.update(&mut cx, |buffer, _| buffer.remote_id())?;
2213
2214 if let Some(new_path) = envelope.payload.new_path {
2215 let new_path = ProjectPath::from_proto(new_path);
2216 this.update(&mut cx, |this, cx| {
2217 this.save_buffer_as(buffer.clone(), new_path, cx)
2218 })?
2219 .await?;
2220 } else {
2221 this.update(&mut cx, |this, cx| this.save_buffer(buffer.clone(), cx))?
2222 .await?;
2223 }
2224
2225 buffer.update(&mut cx, |buffer, _| proto::BufferSaved {
2226 project_id,
2227 buffer_id: buffer_id.into(),
2228 version: serialize_version(buffer.saved_version()),
2229 mtime: buffer.saved_mtime().map(|time| time.into()),
2230 })
2231 }
2232
2233 pub async fn handle_close_buffer(
2234 this: Entity<Self>,
2235 envelope: TypedEnvelope<proto::CloseBuffer>,
2236 mut cx: AsyncApp,
2237 ) -> Result<()> {
2238 let peer_id = envelope.sender_id;
2239 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2240 this.update(&mut cx, |this, _| {
2241 if let Some(shared) = this.shared_buffers.get_mut(&peer_id) {
2242 if shared.remove(&buffer_id).is_some() {
2243 if shared.is_empty() {
2244 this.shared_buffers.remove(&peer_id);
2245 }
2246 return;
2247 }
2248 }
2249 debug_panic!(
2250 "peer_id {} closed buffer_id {} which was either not open or already closed",
2251 peer_id,
2252 buffer_id
2253 )
2254 })
2255 }
2256
2257 pub async fn handle_buffer_saved(
2258 this: Entity<Self>,
2259 envelope: TypedEnvelope<proto::BufferSaved>,
2260 mut cx: AsyncApp,
2261 ) -> Result<()> {
2262 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2263 let version = deserialize_version(&envelope.payload.version);
2264 let mtime = envelope.payload.mtime.clone().map(|time| time.into());
2265 this.update(&mut cx, move |this, cx| {
2266 if let Some(buffer) = this.get_possibly_incomplete(buffer_id) {
2267 buffer.update(cx, |buffer, cx| {
2268 buffer.did_save(version, mtime, cx);
2269 });
2270 }
2271
2272 if let Some((downstream_client, project_id)) = this.downstream_client.as_ref() {
2273 downstream_client
2274 .send(proto::BufferSaved {
2275 project_id: *project_id,
2276 buffer_id: buffer_id.into(),
2277 mtime: envelope.payload.mtime,
2278 version: envelope.payload.version,
2279 })
2280 .log_err();
2281 }
2282 })
2283 }
2284
2285 pub async fn handle_buffer_reloaded(
2286 this: Entity<Self>,
2287 envelope: TypedEnvelope<proto::BufferReloaded>,
2288 mut cx: AsyncApp,
2289 ) -> Result<()> {
2290 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2291 let version = deserialize_version(&envelope.payload.version);
2292 let mtime = envelope.payload.mtime.clone().map(|time| time.into());
2293 let line_ending = deserialize_line_ending(
2294 proto::LineEnding::from_i32(envelope.payload.line_ending)
2295 .ok_or_else(|| anyhow!("missing line ending"))?,
2296 );
2297 this.update(&mut cx, |this, cx| {
2298 if let Some(buffer) = this.get_possibly_incomplete(buffer_id) {
2299 buffer.update(cx, |buffer, cx| {
2300 buffer.did_reload(version, line_ending, mtime, cx);
2301 });
2302 }
2303
2304 if let Some((downstream_client, project_id)) = this.downstream_client.as_ref() {
2305 downstream_client
2306 .send(proto::BufferReloaded {
2307 project_id: *project_id,
2308 buffer_id: buffer_id.into(),
2309 mtime: envelope.payload.mtime,
2310 version: envelope.payload.version,
2311 line_ending: envelope.payload.line_ending,
2312 })
2313 .log_err();
2314 }
2315 })
2316 }
2317
2318 pub async fn handle_blame_buffer(
2319 this: Entity<Self>,
2320 envelope: TypedEnvelope<proto::BlameBuffer>,
2321 mut cx: AsyncApp,
2322 ) -> Result<proto::BlameBufferResponse> {
2323 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2324 let version = deserialize_version(&envelope.payload.version);
2325 let buffer = this.read_with(&cx, |this, _| this.get_existing(buffer_id))??;
2326 buffer
2327 .update(&mut cx, |buffer, _| {
2328 buffer.wait_for_version(version.clone())
2329 })?
2330 .await?;
2331 let blame = this
2332 .update(&mut cx, |this, cx| {
2333 this.blame_buffer(&buffer, Some(version), cx)
2334 })?
2335 .await?;
2336 Ok(serialize_blame_buffer_response(blame))
2337 }
2338
2339 pub async fn handle_get_permalink_to_line(
2340 this: Entity<Self>,
2341 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2342 mut cx: AsyncApp,
2343 ) -> Result<proto::GetPermalinkToLineResponse> {
2344 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2345 // let version = deserialize_version(&envelope.payload.version);
2346 let selection = {
2347 let proto_selection = envelope
2348 .payload
2349 .selection
2350 .context("no selection to get permalink for defined")?;
2351 proto_selection.start as u32..proto_selection.end as u32
2352 };
2353 let buffer = this.read_with(&cx, |this, _| this.get_existing(buffer_id))??;
2354 let permalink = this
2355 .update(&mut cx, |this, cx| {
2356 this.get_permalink_to_line(&buffer, selection, cx)
2357 })?
2358 .await?;
2359 Ok(proto::GetPermalinkToLineResponse {
2360 permalink: permalink.to_string(),
2361 })
2362 }
2363
2364 pub async fn handle_open_unstaged_diff(
2365 this: Entity<Self>,
2366 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2367 mut cx: AsyncApp,
2368 ) -> Result<proto::OpenUnstagedDiffResponse> {
2369 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2370 let diff = this
2371 .update(&mut cx, |this, cx| {
2372 let buffer = this.get(buffer_id)?;
2373 Some(this.open_unstaged_diff(buffer, cx))
2374 })?
2375 .ok_or_else(|| anyhow!("no such buffer"))?
2376 .await?;
2377 this.update(&mut cx, |this, _| {
2378 let shared_buffers = this
2379 .shared_buffers
2380 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2381 .or_default();
2382 debug_assert!(shared_buffers.contains_key(&buffer_id));
2383 if let Some(shared) = shared_buffers.get_mut(&buffer_id) {
2384 shared.diff = Some(diff.clone());
2385 }
2386 })?;
2387 let staged_text = diff.read_with(&cx, |diff, _| {
2388 diff.snapshot.base_text.as_ref().map(|buffer| buffer.text())
2389 })?;
2390 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2391 }
2392
2393 pub async fn handle_open_uncommitted_diff(
2394 this: Entity<Self>,
2395 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2396 mut cx: AsyncApp,
2397 ) -> Result<proto::OpenUncommittedDiffResponse> {
2398 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2399 let diff = this
2400 .update(&mut cx, |this, cx| {
2401 let buffer = this.get(buffer_id)?;
2402 Some(this.open_uncommitted_diff(buffer, cx))
2403 })?
2404 .ok_or_else(|| anyhow!("no such buffer"))?
2405 .await?;
2406 this.update(&mut cx, |this, _| {
2407 let shared_buffers = this
2408 .shared_buffers
2409 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2410 .or_default();
2411 debug_assert!(shared_buffers.contains_key(&buffer_id));
2412 if let Some(shared) = shared_buffers.get_mut(&buffer_id) {
2413 shared.diff = Some(diff.clone());
2414 }
2415 })?;
2416 diff.read_with(&cx, |diff, cx| {
2417 use proto::open_uncommitted_diff_response::Mode;
2418
2419 let staged_buffer = diff
2420 .unstaged_diff
2421 .as_ref()
2422 .and_then(|diff| diff.read(cx).snapshot.base_text.as_ref());
2423
2424 let mode;
2425 let staged_text;
2426 let committed_text;
2427 if let Some(committed_buffer) = &diff.snapshot.base_text {
2428 committed_text = Some(committed_buffer.text());
2429 if let Some(staged_buffer) = staged_buffer {
2430 if staged_buffer.remote_id() == committed_buffer.remote_id() {
2431 mode = Mode::IndexMatchesHead;
2432 staged_text = None;
2433 } else {
2434 mode = Mode::IndexAndHead;
2435 staged_text = Some(staged_buffer.text());
2436 }
2437 } else {
2438 mode = Mode::IndexAndHead;
2439 staged_text = None;
2440 }
2441 } else {
2442 mode = Mode::IndexAndHead;
2443 committed_text = None;
2444 staged_text = staged_buffer.as_ref().map(|buffer| buffer.text());
2445 }
2446
2447 proto::OpenUncommittedDiffResponse {
2448 committed_text,
2449 staged_text,
2450 mode: mode.into(),
2451 }
2452 })
2453 }
2454
2455 pub async fn handle_update_diff_bases(
2456 this: Entity<Self>,
2457 request: TypedEnvelope<proto::UpdateDiffBases>,
2458 mut cx: AsyncApp,
2459 ) -> Result<()> {
2460 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2461 this.update(&mut cx, |this, cx| {
2462 if let Some(OpenBuffer::Complete { diff_state, buffer }) =
2463 this.opened_buffers.get_mut(&buffer_id)
2464 {
2465 if let Some(buffer) = buffer.upgrade() {
2466 let buffer = buffer.read(cx).text_snapshot();
2467 diff_state.update(cx, |diff_state, cx| {
2468 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2469 })
2470 }
2471 }
2472 })
2473 }
2474
2475 pub fn reload_buffers(
2476 &self,
2477 buffers: HashSet<Entity<Buffer>>,
2478 push_to_history: bool,
2479 cx: &mut Context<Self>,
2480 ) -> Task<Result<ProjectTransaction>> {
2481 if buffers.is_empty() {
2482 return Task::ready(Ok(ProjectTransaction::default()));
2483 }
2484 match &self.state {
2485 BufferStoreState::Local(this) => this.reload_buffers(buffers, push_to_history, cx),
2486 BufferStoreState::Remote(this) => this.reload_buffers(buffers, push_to_history, cx),
2487 }
2488 }
2489
2490 async fn handle_reload_buffers(
2491 this: Entity<Self>,
2492 envelope: TypedEnvelope<proto::ReloadBuffers>,
2493 mut cx: AsyncApp,
2494 ) -> Result<proto::ReloadBuffersResponse> {
2495 let sender_id = envelope.original_sender_id().unwrap_or_default();
2496 let reload = this.update(&mut cx, |this, cx| {
2497 let mut buffers = HashSet::default();
2498 for buffer_id in &envelope.payload.buffer_ids {
2499 let buffer_id = BufferId::new(*buffer_id)?;
2500 buffers.insert(this.get_existing(buffer_id)?);
2501 }
2502 Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
2503 })??;
2504
2505 let project_transaction = reload.await?;
2506 let project_transaction = this.update(&mut cx, |this, cx| {
2507 this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
2508 })?;
2509 Ok(proto::ReloadBuffersResponse {
2510 transaction: Some(project_transaction),
2511 })
2512 }
2513
2514 pub fn create_buffer_for_peer(
2515 &mut self,
2516 buffer: &Entity<Buffer>,
2517 peer_id: proto::PeerId,
2518 cx: &mut Context<Self>,
2519 ) -> Task<Result<()>> {
2520 let buffer_id = buffer.read(cx).remote_id();
2521 let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
2522 if shared_buffers.contains_key(&buffer_id) {
2523 return Task::ready(Ok(()));
2524 }
2525 shared_buffers.insert(
2526 buffer_id,
2527 SharedBuffer {
2528 buffer: buffer.clone(),
2529 diff: None,
2530 lsp_handle: None,
2531 },
2532 );
2533
2534 let Some((client, project_id)) = self.downstream_client.clone() else {
2535 return Task::ready(Ok(()));
2536 };
2537
2538 cx.spawn(|this, mut cx| async move {
2539 let Some(buffer) = this.update(&mut cx, |this, _| this.get(buffer_id))? else {
2540 return anyhow::Ok(());
2541 };
2542
2543 let operations = buffer.update(&mut cx, |b, cx| b.serialize_ops(None, cx))?;
2544 let operations = operations.await;
2545 let state = buffer.update(&mut cx, |buffer, cx| buffer.to_proto(cx))?;
2546
2547 let initial_state = proto::CreateBufferForPeer {
2548 project_id,
2549 peer_id: Some(peer_id),
2550 variant: Some(proto::create_buffer_for_peer::Variant::State(state)),
2551 };
2552
2553 if client.send(initial_state).log_err().is_some() {
2554 let client = client.clone();
2555 cx.background_executor()
2556 .spawn(async move {
2557 let mut chunks = split_operations(operations).peekable();
2558 while let Some(chunk) = chunks.next() {
2559 let is_last = chunks.peek().is_none();
2560 client.send(proto::CreateBufferForPeer {
2561 project_id,
2562 peer_id: Some(peer_id),
2563 variant: Some(proto::create_buffer_for_peer::Variant::Chunk(
2564 proto::BufferChunk {
2565 buffer_id: buffer_id.into(),
2566 operations: chunk,
2567 is_last,
2568 },
2569 )),
2570 })?;
2571 }
2572 anyhow::Ok(())
2573 })
2574 .await
2575 .log_err();
2576 }
2577 Ok(())
2578 })
2579 }
2580
2581 pub fn forget_shared_buffers(&mut self) {
2582 self.shared_buffers.clear();
2583 }
2584
2585 pub fn forget_shared_buffers_for(&mut self, peer_id: &proto::PeerId) {
2586 self.shared_buffers.remove(peer_id);
2587 }
2588
2589 pub fn update_peer_id(&mut self, old_peer_id: &proto::PeerId, new_peer_id: proto::PeerId) {
2590 if let Some(buffers) = self.shared_buffers.remove(old_peer_id) {
2591 self.shared_buffers.insert(new_peer_id, buffers);
2592 }
2593 }
2594
2595 pub fn has_shared_buffers(&self) -> bool {
2596 !self.shared_buffers.is_empty()
2597 }
2598
2599 pub fn create_local_buffer(
2600 &mut self,
2601 text: &str,
2602 language: Option<Arc<Language>>,
2603 cx: &mut Context<Self>,
2604 ) -> Entity<Buffer> {
2605 let buffer = cx.new(|cx| {
2606 Buffer::local(text, cx)
2607 .with_language(language.unwrap_or_else(|| language::PLAIN_TEXT.clone()), cx)
2608 });
2609
2610 self.add_buffer(buffer.clone(), cx).log_err();
2611 let buffer_id = buffer.read(cx).remote_id();
2612
2613 let this = self
2614 .as_local_mut()
2615 .expect("local-only method called in a non-local context");
2616 if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
2617 this.local_buffer_ids_by_path.insert(
2618 ProjectPath {
2619 worktree_id: file.worktree_id(cx),
2620 path: file.path.clone(),
2621 },
2622 buffer_id,
2623 );
2624
2625 if let Some(entry_id) = file.entry_id {
2626 this.local_buffer_ids_by_entry_id
2627 .insert(entry_id, buffer_id);
2628 }
2629 }
2630 buffer
2631 }
2632
2633 pub fn deserialize_project_transaction(
2634 &mut self,
2635 message: proto::ProjectTransaction,
2636 push_to_history: bool,
2637 cx: &mut Context<Self>,
2638 ) -> Task<Result<ProjectTransaction>> {
2639 if let Some(this) = self.as_remote_mut() {
2640 this.deserialize_project_transaction(message, push_to_history, cx)
2641 } else {
2642 debug_panic!("not a remote buffer store");
2643 Task::ready(Err(anyhow!("not a remote buffer store")))
2644 }
2645 }
2646
2647 pub fn wait_for_remote_buffer(
2648 &mut self,
2649 id: BufferId,
2650 cx: &mut Context<BufferStore>,
2651 ) -> Task<Result<Entity<Buffer>>> {
2652 if let Some(this) = self.as_remote_mut() {
2653 this.wait_for_remote_buffer(id, cx)
2654 } else {
2655 debug_panic!("not a remote buffer store");
2656 Task::ready(Err(anyhow!("not a remote buffer store")))
2657 }
2658 }
2659
2660 pub fn serialize_project_transaction_for_peer(
2661 &mut self,
2662 project_transaction: ProjectTransaction,
2663 peer_id: proto::PeerId,
2664 cx: &mut Context<Self>,
2665 ) -> proto::ProjectTransaction {
2666 let mut serialized_transaction = proto::ProjectTransaction {
2667 buffer_ids: Default::default(),
2668 transactions: Default::default(),
2669 };
2670 for (buffer, transaction) in project_transaction.0 {
2671 self.create_buffer_for_peer(&buffer, peer_id, cx)
2672 .detach_and_log_err(cx);
2673 serialized_transaction
2674 .buffer_ids
2675 .push(buffer.read(cx).remote_id().into());
2676 serialized_transaction
2677 .transactions
2678 .push(language::proto::serialize_transaction(&transaction));
2679 }
2680 serialized_transaction
2681 }
2682}
2683
2684impl OpenBuffer {
2685 fn upgrade(&self) -> Option<Entity<Buffer>> {
2686 match self {
2687 OpenBuffer::Complete { buffer, .. } => buffer.upgrade(),
2688 OpenBuffer::Operations(_) => None,
2689 }
2690 }
2691}
2692
2693fn is_not_found_error(error: &anyhow::Error) -> bool {
2694 error
2695 .root_cause()
2696 .downcast_ref::<io::Error>()
2697 .is_some_and(|err| err.kind() == io::ErrorKind::NotFound)
2698}
2699
2700fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
2701 let Some(blame) = blame else {
2702 return proto::BlameBufferResponse {
2703 blame_response: None,
2704 };
2705 };
2706
2707 let entries = blame
2708 .entries
2709 .into_iter()
2710 .map(|entry| proto::BlameEntry {
2711 sha: entry.sha.as_bytes().into(),
2712 start_line: entry.range.start,
2713 end_line: entry.range.end,
2714 original_line_number: entry.original_line_number,
2715 author: entry.author.clone(),
2716 author_mail: entry.author_mail.clone(),
2717 author_time: entry.author_time,
2718 author_tz: entry.author_tz.clone(),
2719 committer: entry.committer.clone(),
2720 committer_mail: entry.committer_mail.clone(),
2721 committer_time: entry.committer_time,
2722 committer_tz: entry.committer_tz.clone(),
2723 summary: entry.summary.clone(),
2724 previous: entry.previous.clone(),
2725 filename: entry.filename.clone(),
2726 })
2727 .collect::<Vec<_>>();
2728
2729 let messages = blame
2730 .messages
2731 .into_iter()
2732 .map(|(oid, message)| proto::CommitMessage {
2733 oid: oid.as_bytes().into(),
2734 message,
2735 })
2736 .collect::<Vec<_>>();
2737
2738 let permalinks = blame
2739 .permalinks
2740 .into_iter()
2741 .map(|(oid, url)| proto::CommitPermalink {
2742 oid: oid.as_bytes().into(),
2743 permalink: url.to_string(),
2744 })
2745 .collect::<Vec<_>>();
2746
2747 proto::BlameBufferResponse {
2748 blame_response: Some(proto::blame_buffer_response::BlameResponse {
2749 entries,
2750 messages,
2751 permalinks,
2752 remote_url: blame.remote_url,
2753 }),
2754 }
2755}
2756
2757fn deserialize_blame_buffer_response(
2758 response: proto::BlameBufferResponse,
2759) -> Option<git::blame::Blame> {
2760 let response = response.blame_response?;
2761 let entries = response
2762 .entries
2763 .into_iter()
2764 .filter_map(|entry| {
2765 Some(git::blame::BlameEntry {
2766 sha: git::Oid::from_bytes(&entry.sha).ok()?,
2767 range: entry.start_line..entry.end_line,
2768 original_line_number: entry.original_line_number,
2769 committer: entry.committer,
2770 committer_time: entry.committer_time,
2771 committer_tz: entry.committer_tz,
2772 committer_mail: entry.committer_mail,
2773 author: entry.author,
2774 author_mail: entry.author_mail,
2775 author_time: entry.author_time,
2776 author_tz: entry.author_tz,
2777 summary: entry.summary,
2778 previous: entry.previous,
2779 filename: entry.filename,
2780 })
2781 })
2782 .collect::<Vec<_>>();
2783
2784 let messages = response
2785 .messages
2786 .into_iter()
2787 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
2788 .collect::<HashMap<_, _>>();
2789
2790 let permalinks = response
2791 .permalinks
2792 .into_iter()
2793 .filter_map(|permalink| {
2794 Some((
2795 git::Oid::from_bytes(&permalink.oid).ok()?,
2796 Url::from_str(&permalink.permalink).ok()?,
2797 ))
2798 })
2799 .collect::<HashMap<_, _>>();
2800
2801 Some(Blame {
2802 entries,
2803 permalinks,
2804 messages,
2805 remote_url: response.remote_url,
2806 })
2807}
2808
2809fn get_permalink_in_rust_registry_src(
2810 provider_registry: Arc<GitHostingProviderRegistry>,
2811 path: PathBuf,
2812 selection: Range<u32>,
2813) -> Result<url::Url> {
2814 #[derive(Deserialize)]
2815 struct CargoVcsGit {
2816 sha1: String,
2817 }
2818
2819 #[derive(Deserialize)]
2820 struct CargoVcsInfo {
2821 git: CargoVcsGit,
2822 path_in_vcs: String,
2823 }
2824
2825 #[derive(Deserialize)]
2826 struct CargoPackage {
2827 repository: String,
2828 }
2829
2830 #[derive(Deserialize)]
2831 struct CargoToml {
2832 package: CargoPackage,
2833 }
2834
2835 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
2836 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
2837 Some((dir, json))
2838 }) else {
2839 bail!("No .cargo_vcs_info.json found in parent directories")
2840 };
2841 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
2842 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
2843 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
2844 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
2845 .ok_or_else(|| anyhow!("Failed to parse package.repository field of manifest"))?;
2846 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
2847 let permalink = provider.build_permalink(
2848 remote,
2849 BuildPermalinkParams {
2850 sha: &cargo_vcs_info.git.sha1,
2851 path: &path.to_string_lossy(),
2852 selection: Some(selection),
2853 },
2854 );
2855 Ok(permalink)
2856}