buffer_store.rs

   1use crate::{
   2    lsp_store::OpenLspBufferHandle,
   3    search::SearchQuery,
   4    worktree_store::{WorktreeStore, WorktreeStoreEvent},
   5    ProjectItem as _, ProjectPath,
   6};
   7use ::git::{parse_git_remote_url, BuildPermalinkParams, GitHostingProviderRegistry};
   8use anyhow::{anyhow, bail, Context as _, Result};
   9use client::Client;
  10use collections::{hash_map, HashMap, HashSet};
  11use fs::Fs;
  12use futures::{channel::oneshot, future::Shared, Future, FutureExt as _, StreamExt};
  13use git::{blame::Blame, repository::RepoPath};
  14use gpui::{
  15    App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Subscription, Task, WeakEntity,
  16};
  17use language::{
  18    proto::{
  19        deserialize_line_ending, deserialize_version, serialize_line_ending, serialize_version,
  20        split_operations,
  21    },
  22    Buffer, BufferEvent, Capability, DiskState, File as _, Language, Operation,
  23};
  24use rpc::{
  25    proto::{self, ToProto},
  26    AnyProtoClient, ErrorExt as _, TypedEnvelope,
  27};
  28use serde::Deserialize;
  29use smol::channel::Receiver;
  30use std::{
  31    io,
  32    ops::Range,
  33    path::{Path, PathBuf},
  34    pin::pin,
  35    sync::Arc,
  36    time::Instant,
  37};
  38use text::BufferId;
  39use util::{debug_panic, maybe, ResultExt as _, TryFutureExt};
  40use worktree::{File, PathChange, ProjectEntryId, Worktree, WorktreeId};
  41
  42/// A set of open buffers.
  43pub struct BufferStore {
  44    state: BufferStoreState,
  45    #[allow(clippy::type_complexity)]
  46    loading_buffers: HashMap<ProjectPath, Shared<Task<Result<Entity<Buffer>, Arc<anyhow::Error>>>>>,
  47    worktree_store: Entity<WorktreeStore>,
  48    opened_buffers: HashMap<BufferId, OpenBuffer>,
  49    downstream_client: Option<(AnyProtoClient, u64)>,
  50    shared_buffers: HashMap<proto::PeerId, HashMap<BufferId, SharedBuffer>>,
  51}
  52
  53#[derive(Hash, Eq, PartialEq, Clone)]
  54struct SharedBuffer {
  55    buffer: Entity<Buffer>,
  56    lsp_handle: Option<OpenLspBufferHandle>,
  57}
  58
  59enum BufferStoreState {
  60    Local(LocalBufferStore),
  61    Remote(RemoteBufferStore),
  62}
  63
  64struct RemoteBufferStore {
  65    shared_with_me: HashSet<Entity<Buffer>>,
  66    upstream_client: AnyProtoClient,
  67    project_id: u64,
  68    loading_remote_buffers_by_id: HashMap<BufferId, Entity<Buffer>>,
  69    remote_buffer_listeners:
  70        HashMap<BufferId, Vec<oneshot::Sender<Result<Entity<Buffer>, anyhow::Error>>>>,
  71    worktree_store: Entity<WorktreeStore>,
  72}
  73
  74struct LocalBufferStore {
  75    local_buffer_ids_by_path: HashMap<ProjectPath, BufferId>,
  76    local_buffer_ids_by_entry_id: HashMap<ProjectEntryId, BufferId>,
  77    worktree_store: Entity<WorktreeStore>,
  78    _subscription: Subscription,
  79}
  80
  81enum OpenBuffer {
  82    Complete { buffer: WeakEntity<Buffer> },
  83    Operations(Vec<Operation>),
  84}
  85
  86pub enum BufferStoreEvent {
  87    BufferAdded(Entity<Buffer>),
  88    BufferOpened {
  89        buffer: Entity<Buffer>,
  90        project_path: ProjectPath,
  91    },
  92    SharedBufferClosed(proto::PeerId, BufferId),
  93    BufferDropped(BufferId),
  94    BufferChangedFilePath {
  95        buffer: Entity<Buffer>,
  96        old_file: Option<Arc<dyn language::File>>,
  97    },
  98}
  99
 100#[derive(Default, Debug)]
 101pub struct ProjectTransaction(pub HashMap<Entity<Buffer>, language::Transaction>);
 102
 103impl EventEmitter<BufferStoreEvent> for BufferStore {}
 104
 105impl RemoteBufferStore {
 106    pub fn wait_for_remote_buffer(
 107        &mut self,
 108        id: BufferId,
 109        cx: &mut Context<BufferStore>,
 110    ) -> Task<Result<Entity<Buffer>>> {
 111        let (tx, rx) = oneshot::channel();
 112        self.remote_buffer_listeners.entry(id).or_default().push(tx);
 113
 114        cx.spawn(async move |this, cx| {
 115            if let Some(buffer) = this
 116                .read_with(cx, |buffer_store, _| buffer_store.get(id))
 117                .ok()
 118                .flatten()
 119            {
 120                return Ok(buffer);
 121            }
 122
 123            cx.background_spawn(async move { rx.await? }).await
 124        })
 125    }
 126
 127    fn save_remote_buffer(
 128        &self,
 129        buffer_handle: Entity<Buffer>,
 130        new_path: Option<proto::ProjectPath>,
 131        cx: &Context<BufferStore>,
 132    ) -> Task<Result<()>> {
 133        let buffer = buffer_handle.read(cx);
 134        let buffer_id = buffer.remote_id().into();
 135        let version = buffer.version();
 136        let rpc = self.upstream_client.clone();
 137        let project_id = self.project_id;
 138        cx.spawn(async move |_, cx| {
 139            let response = rpc
 140                .request(proto::SaveBuffer {
 141                    project_id,
 142                    buffer_id,
 143                    new_path,
 144                    version: serialize_version(&version),
 145                })
 146                .await?;
 147            let version = deserialize_version(&response.version);
 148            let mtime = response.mtime.map(|mtime| mtime.into());
 149
 150            buffer_handle.update(cx, |buffer, cx| {
 151                buffer.did_save(version.clone(), mtime, cx);
 152            })?;
 153
 154            Ok(())
 155        })
 156    }
 157
 158    pub fn handle_create_buffer_for_peer(
 159        &mut self,
 160        envelope: TypedEnvelope<proto::CreateBufferForPeer>,
 161        replica_id: u16,
 162        capability: Capability,
 163        cx: &mut Context<BufferStore>,
 164    ) -> Result<Option<Entity<Buffer>>> {
 165        match envelope
 166            .payload
 167            .variant
 168            .ok_or_else(|| anyhow!("missing variant"))?
 169        {
 170            proto::create_buffer_for_peer::Variant::State(mut state) => {
 171                let buffer_id = BufferId::new(state.id)?;
 172
 173                let buffer_result = maybe!({
 174                    let mut buffer_file = None;
 175                    if let Some(file) = state.file.take() {
 176                        let worktree_id = worktree::WorktreeId::from_proto(file.worktree_id);
 177                        let worktree = self
 178                            .worktree_store
 179                            .read(cx)
 180                            .worktree_for_id(worktree_id, cx)
 181                            .ok_or_else(|| {
 182                                anyhow!("no worktree found for id {}", file.worktree_id)
 183                            })?;
 184                        buffer_file = Some(Arc::new(File::from_proto(file, worktree.clone(), cx)?)
 185                            as Arc<dyn language::File>);
 186                    }
 187                    Buffer::from_proto(replica_id, capability, state, buffer_file)
 188                });
 189
 190                match buffer_result {
 191                    Ok(buffer) => {
 192                        let buffer = cx.new(|_| buffer);
 193                        self.loading_remote_buffers_by_id.insert(buffer_id, buffer);
 194                    }
 195                    Err(error) => {
 196                        if let Some(listeners) = self.remote_buffer_listeners.remove(&buffer_id) {
 197                            for listener in listeners {
 198                                listener.send(Err(anyhow!(error.cloned()))).ok();
 199                            }
 200                        }
 201                    }
 202                }
 203            }
 204            proto::create_buffer_for_peer::Variant::Chunk(chunk) => {
 205                let buffer_id = BufferId::new(chunk.buffer_id)?;
 206                let buffer = self
 207                    .loading_remote_buffers_by_id
 208                    .get(&buffer_id)
 209                    .cloned()
 210                    .ok_or_else(|| {
 211                        anyhow!(
 212                            "received chunk for buffer {} without initial state",
 213                            chunk.buffer_id
 214                        )
 215                    })?;
 216
 217                let result = maybe!({
 218                    let operations = chunk
 219                        .operations
 220                        .into_iter()
 221                        .map(language::proto::deserialize_operation)
 222                        .collect::<Result<Vec<_>>>()?;
 223                    buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx));
 224                    anyhow::Ok(())
 225                });
 226
 227                if let Err(error) = result {
 228                    self.loading_remote_buffers_by_id.remove(&buffer_id);
 229                    if let Some(listeners) = self.remote_buffer_listeners.remove(&buffer_id) {
 230                        for listener in listeners {
 231                            listener.send(Err(error.cloned())).ok();
 232                        }
 233                    }
 234                } else if chunk.is_last {
 235                    self.loading_remote_buffers_by_id.remove(&buffer_id);
 236                    if self.upstream_client.is_via_collab() {
 237                        // retain buffers sent by peers to avoid races.
 238                        self.shared_with_me.insert(buffer.clone());
 239                    }
 240
 241                    if let Some(senders) = self.remote_buffer_listeners.remove(&buffer_id) {
 242                        for sender in senders {
 243                            sender.send(Ok(buffer.clone())).ok();
 244                        }
 245                    }
 246                    return Ok(Some(buffer));
 247                }
 248            }
 249        }
 250        return Ok(None);
 251    }
 252
 253    pub fn incomplete_buffer_ids(&self) -> Vec<BufferId> {
 254        self.loading_remote_buffers_by_id
 255            .keys()
 256            .copied()
 257            .collect::<Vec<_>>()
 258    }
 259
 260    pub fn deserialize_project_transaction(
 261        &self,
 262        message: proto::ProjectTransaction,
 263        push_to_history: bool,
 264        cx: &mut Context<BufferStore>,
 265    ) -> Task<Result<ProjectTransaction>> {
 266        cx.spawn(async move |this, cx| {
 267            let mut project_transaction = ProjectTransaction::default();
 268            for (buffer_id, transaction) in message.buffer_ids.into_iter().zip(message.transactions)
 269            {
 270                let buffer_id = BufferId::new(buffer_id)?;
 271                let buffer = this
 272                    .update(cx, |this, cx| this.wait_for_remote_buffer(buffer_id, cx))?
 273                    .await?;
 274                let transaction = language::proto::deserialize_transaction(transaction)?;
 275                project_transaction.0.insert(buffer, transaction);
 276            }
 277
 278            for (buffer, transaction) in &project_transaction.0 {
 279                buffer
 280                    .update(cx, |buffer, _| {
 281                        buffer.wait_for_edits(transaction.edit_ids.iter().copied())
 282                    })?
 283                    .await?;
 284
 285                if push_to_history {
 286                    buffer.update(cx, |buffer, _| {
 287                        buffer.push_transaction(transaction.clone(), Instant::now());
 288                    })?;
 289                }
 290            }
 291
 292            Ok(project_transaction)
 293        })
 294    }
 295
 296    fn open_buffer(
 297        &self,
 298        path: Arc<Path>,
 299        worktree: Entity<Worktree>,
 300        cx: &mut Context<BufferStore>,
 301    ) -> Task<Result<Entity<Buffer>>> {
 302        let worktree_id = worktree.read(cx).id().to_proto();
 303        let project_id = self.project_id;
 304        let client = self.upstream_client.clone();
 305        cx.spawn(async move |this, cx| {
 306            let response = client
 307                .request(proto::OpenBufferByPath {
 308                    project_id,
 309                    worktree_id,
 310                    path: path.to_proto(),
 311                })
 312                .await?;
 313            let buffer_id = BufferId::new(response.buffer_id)?;
 314
 315            let buffer = this
 316                .update(cx, {
 317                    |this, cx| this.wait_for_remote_buffer(buffer_id, cx)
 318                })?
 319                .await?;
 320
 321            Ok(buffer)
 322        })
 323    }
 324
 325    fn create_buffer(&self, cx: &mut Context<BufferStore>) -> Task<Result<Entity<Buffer>>> {
 326        let create = self.upstream_client.request(proto::OpenNewBuffer {
 327            project_id: self.project_id,
 328        });
 329        cx.spawn(async move |this, cx| {
 330            let response = create.await?;
 331            let buffer_id = BufferId::new(response.buffer_id)?;
 332
 333            this.update(cx, |this, cx| this.wait_for_remote_buffer(buffer_id, cx))?
 334                .await
 335        })
 336    }
 337
 338    fn reload_buffers(
 339        &self,
 340        buffers: HashSet<Entity<Buffer>>,
 341        push_to_history: bool,
 342        cx: &mut Context<BufferStore>,
 343    ) -> Task<Result<ProjectTransaction>> {
 344        let request = self.upstream_client.request(proto::ReloadBuffers {
 345            project_id: self.project_id,
 346            buffer_ids: buffers
 347                .iter()
 348                .map(|buffer| buffer.read(cx).remote_id().to_proto())
 349                .collect(),
 350        });
 351
 352        cx.spawn(async move |this, cx| {
 353            let response = request
 354                .await?
 355                .transaction
 356                .ok_or_else(|| anyhow!("missing transaction"))?;
 357            this.update(cx, |this, cx| {
 358                this.deserialize_project_transaction(response, push_to_history, cx)
 359            })?
 360            .await
 361        })
 362    }
 363}
 364
 365impl LocalBufferStore {
 366    fn save_local_buffer(
 367        &self,
 368        buffer_handle: Entity<Buffer>,
 369        worktree: Entity<Worktree>,
 370        path: Arc<Path>,
 371        mut has_changed_file: bool,
 372        cx: &mut Context<BufferStore>,
 373    ) -> Task<Result<()>> {
 374        let buffer = buffer_handle.read(cx);
 375
 376        let text = buffer.as_rope().clone();
 377        let line_ending = buffer.line_ending();
 378        let version = buffer.version();
 379        let buffer_id = buffer.remote_id();
 380        if buffer
 381            .file()
 382            .is_some_and(|file| file.disk_state() == DiskState::New)
 383        {
 384            has_changed_file = true;
 385        }
 386
 387        let save = worktree.update(cx, |worktree, cx| {
 388            worktree.write_file(path.as_ref(), text, line_ending, cx)
 389        });
 390
 391        cx.spawn(async move |this, cx| {
 392            let new_file = save.await?;
 393            let mtime = new_file.disk_state().mtime();
 394            this.update(cx, |this, cx| {
 395                if let Some((downstream_client, project_id)) = this.downstream_client.clone() {
 396                    if has_changed_file {
 397                        downstream_client
 398                            .send(proto::UpdateBufferFile {
 399                                project_id,
 400                                buffer_id: buffer_id.to_proto(),
 401                                file: Some(language::File::to_proto(&*new_file, cx)),
 402                            })
 403                            .log_err();
 404                    }
 405                    downstream_client
 406                        .send(proto::BufferSaved {
 407                            project_id,
 408                            buffer_id: buffer_id.to_proto(),
 409                            version: serialize_version(&version),
 410                            mtime: mtime.map(|time| time.into()),
 411                        })
 412                        .log_err();
 413                }
 414            })?;
 415            buffer_handle.update(cx, |buffer, cx| {
 416                if has_changed_file {
 417                    buffer.file_updated(new_file, cx);
 418                }
 419                buffer.did_save(version.clone(), mtime, cx);
 420            })
 421        })
 422    }
 423
 424    fn subscribe_to_worktree(
 425        &mut self,
 426        worktree: &Entity<Worktree>,
 427        cx: &mut Context<BufferStore>,
 428    ) {
 429        cx.subscribe(worktree, |this, worktree, event, cx| {
 430            if worktree.read(cx).is_local() {
 431                match event {
 432                    worktree::Event::UpdatedEntries(changes) => {
 433                        Self::local_worktree_entries_changed(this, &worktree, changes, cx);
 434                    }
 435                    _ => {}
 436                }
 437            }
 438        })
 439        .detach();
 440    }
 441
 442    fn local_worktree_entries_changed(
 443        this: &mut BufferStore,
 444        worktree_handle: &Entity<Worktree>,
 445        changes: &[(Arc<Path>, ProjectEntryId, PathChange)],
 446        cx: &mut Context<BufferStore>,
 447    ) {
 448        let snapshot = worktree_handle.read(cx).snapshot();
 449        for (path, entry_id, _) in changes {
 450            Self::local_worktree_entry_changed(
 451                this,
 452                *entry_id,
 453                path,
 454                worktree_handle,
 455                &snapshot,
 456                cx,
 457            );
 458        }
 459    }
 460
 461    fn local_worktree_entry_changed(
 462        this: &mut BufferStore,
 463        entry_id: ProjectEntryId,
 464        path: &Arc<Path>,
 465        worktree: &Entity<worktree::Worktree>,
 466        snapshot: &worktree::Snapshot,
 467        cx: &mut Context<BufferStore>,
 468    ) -> Option<()> {
 469        let project_path = ProjectPath {
 470            worktree_id: snapshot.id(),
 471            path: path.clone(),
 472        };
 473
 474        let buffer_id = {
 475            let local = this.as_local_mut()?;
 476            match local.local_buffer_ids_by_entry_id.get(&entry_id) {
 477                Some(&buffer_id) => buffer_id,
 478                None => local.local_buffer_ids_by_path.get(&project_path).copied()?,
 479            }
 480        };
 481
 482        let buffer = if let Some(buffer) = this.get(buffer_id) {
 483            Some(buffer)
 484        } else {
 485            this.opened_buffers.remove(&buffer_id);
 486            None
 487        };
 488
 489        let buffer = if let Some(buffer) = buffer {
 490            buffer
 491        } else {
 492            let this = this.as_local_mut()?;
 493            this.local_buffer_ids_by_path.remove(&project_path);
 494            this.local_buffer_ids_by_entry_id.remove(&entry_id);
 495            return None;
 496        };
 497
 498        let events = buffer.update(cx, |buffer, cx| {
 499            let local = this.as_local_mut()?;
 500            let file = buffer.file()?;
 501            let old_file = File::from_dyn(Some(file))?;
 502            if old_file.worktree != *worktree {
 503                return None;
 504            }
 505
 506            let snapshot_entry = old_file
 507                .entry_id
 508                .and_then(|entry_id| snapshot.entry_for_id(entry_id))
 509                .or_else(|| snapshot.entry_for_path(old_file.path.as_ref()));
 510
 511            let new_file = if let Some(entry) = snapshot_entry {
 512                File {
 513                    disk_state: match entry.mtime {
 514                        Some(mtime) => DiskState::Present { mtime },
 515                        None => old_file.disk_state,
 516                    },
 517                    is_local: true,
 518                    entry_id: Some(entry.id),
 519                    path: entry.path.clone(),
 520                    worktree: worktree.clone(),
 521                    is_private: entry.is_private,
 522                }
 523            } else {
 524                File {
 525                    disk_state: DiskState::Deleted,
 526                    is_local: true,
 527                    entry_id: old_file.entry_id,
 528                    path: old_file.path.clone(),
 529                    worktree: worktree.clone(),
 530                    is_private: old_file.is_private,
 531                }
 532            };
 533
 534            if new_file == *old_file {
 535                return None;
 536            }
 537
 538            let mut events = Vec::new();
 539            if new_file.path != old_file.path {
 540                local.local_buffer_ids_by_path.remove(&ProjectPath {
 541                    path: old_file.path.clone(),
 542                    worktree_id: old_file.worktree_id(cx),
 543                });
 544                local.local_buffer_ids_by_path.insert(
 545                    ProjectPath {
 546                        worktree_id: new_file.worktree_id(cx),
 547                        path: new_file.path.clone(),
 548                    },
 549                    buffer_id,
 550                );
 551                events.push(BufferStoreEvent::BufferChangedFilePath {
 552                    buffer: cx.entity(),
 553                    old_file: buffer.file().cloned(),
 554                });
 555            }
 556
 557            if new_file.entry_id != old_file.entry_id {
 558                if let Some(entry_id) = old_file.entry_id {
 559                    local.local_buffer_ids_by_entry_id.remove(&entry_id);
 560                }
 561                if let Some(entry_id) = new_file.entry_id {
 562                    local
 563                        .local_buffer_ids_by_entry_id
 564                        .insert(entry_id, buffer_id);
 565                }
 566            }
 567
 568            if let Some((client, project_id)) = &this.downstream_client {
 569                client
 570                    .send(proto::UpdateBufferFile {
 571                        project_id: *project_id,
 572                        buffer_id: buffer_id.to_proto(),
 573                        file: Some(new_file.to_proto(cx)),
 574                    })
 575                    .ok();
 576            }
 577
 578            buffer.file_updated(Arc::new(new_file), cx);
 579            Some(events)
 580        })?;
 581
 582        for event in events {
 583            cx.emit(event);
 584        }
 585
 586        None
 587    }
 588
 589    fn buffer_changed_file(&mut self, buffer: Entity<Buffer>, cx: &mut App) -> Option<()> {
 590        let file = File::from_dyn(buffer.read(cx).file())?;
 591
 592        let remote_id = buffer.read(cx).remote_id();
 593        if let Some(entry_id) = file.entry_id {
 594            match self.local_buffer_ids_by_entry_id.get(&entry_id) {
 595                Some(_) => {
 596                    return None;
 597                }
 598                None => {
 599                    self.local_buffer_ids_by_entry_id
 600                        .insert(entry_id, remote_id);
 601                }
 602            }
 603        };
 604        self.local_buffer_ids_by_path.insert(
 605            ProjectPath {
 606                worktree_id: file.worktree_id(cx),
 607                path: file.path.clone(),
 608            },
 609            remote_id,
 610        );
 611
 612        Some(())
 613    }
 614
 615    fn save_buffer(
 616        &self,
 617        buffer: Entity<Buffer>,
 618        cx: &mut Context<BufferStore>,
 619    ) -> Task<Result<()>> {
 620        let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
 621            return Task::ready(Err(anyhow!("buffer doesn't have a file")));
 622        };
 623        let worktree = file.worktree.clone();
 624        self.save_local_buffer(buffer, worktree, file.path.clone(), false, cx)
 625    }
 626
 627    fn save_buffer_as(
 628        &self,
 629        buffer: Entity<Buffer>,
 630        path: ProjectPath,
 631        cx: &mut Context<BufferStore>,
 632    ) -> Task<Result<()>> {
 633        let Some(worktree) = self
 634            .worktree_store
 635            .read(cx)
 636            .worktree_for_id(path.worktree_id, cx)
 637        else {
 638            return Task::ready(Err(anyhow!("no such worktree")));
 639        };
 640        self.save_local_buffer(buffer, worktree, path.path.clone(), true, cx)
 641    }
 642
 643    fn open_buffer(
 644        &self,
 645        path: Arc<Path>,
 646        worktree: Entity<Worktree>,
 647        cx: &mut Context<BufferStore>,
 648    ) -> Task<Result<Entity<Buffer>>> {
 649        let load_buffer = worktree.update(cx, |worktree, cx| {
 650            let load_file = worktree.load_file(path.as_ref(), cx);
 651            let reservation = cx.reserve_entity();
 652            let buffer_id = BufferId::from(reservation.entity_id().as_non_zero_u64());
 653            cx.spawn(async move |_, cx| {
 654                let loaded = load_file.await?;
 655                let text_buffer = cx
 656                    .background_spawn(async move { text::Buffer::new(0, buffer_id, loaded.text) })
 657                    .await;
 658                cx.insert_entity(reservation, |_| {
 659                    Buffer::build(text_buffer, Some(loaded.file), Capability::ReadWrite)
 660                })
 661            })
 662        });
 663
 664        cx.spawn(async move |this, cx| {
 665            let buffer = match load_buffer.await {
 666                Ok(buffer) => Ok(buffer),
 667                Err(error) if is_not_found_error(&error) => cx.new(|cx| {
 668                    let buffer_id = BufferId::from(cx.entity_id().as_non_zero_u64());
 669                    let text_buffer = text::Buffer::new(0, buffer_id, "".into());
 670                    Buffer::build(
 671                        text_buffer,
 672                        Some(Arc::new(File {
 673                            worktree,
 674                            path,
 675                            disk_state: DiskState::New,
 676                            entry_id: None,
 677                            is_local: true,
 678                            is_private: false,
 679                        })),
 680                        Capability::ReadWrite,
 681                    )
 682                }),
 683                Err(e) => Err(e),
 684            }?;
 685            this.update(cx, |this, cx| {
 686                this.add_buffer(buffer.clone(), cx)?;
 687                let buffer_id = buffer.read(cx).remote_id();
 688                if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
 689                    let this = this.as_local_mut().unwrap();
 690                    this.local_buffer_ids_by_path.insert(
 691                        ProjectPath {
 692                            worktree_id: file.worktree_id(cx),
 693                            path: file.path.clone(),
 694                        },
 695                        buffer_id,
 696                    );
 697
 698                    if let Some(entry_id) = file.entry_id {
 699                        this.local_buffer_ids_by_entry_id
 700                            .insert(entry_id, buffer_id);
 701                    }
 702                }
 703
 704                anyhow::Ok(())
 705            })??;
 706
 707            Ok(buffer)
 708        })
 709    }
 710
 711    fn create_buffer(&self, cx: &mut Context<BufferStore>) -> Task<Result<Entity<Buffer>>> {
 712        cx.spawn(async move |buffer_store, cx| {
 713            let buffer =
 714                cx.new(|cx| Buffer::local("", cx).with_language(language::PLAIN_TEXT.clone(), cx))?;
 715            buffer_store.update(cx, |buffer_store, cx| {
 716                buffer_store.add_buffer(buffer.clone(), cx).log_err();
 717            })?;
 718            Ok(buffer)
 719        })
 720    }
 721
 722    fn reload_buffers(
 723        &self,
 724        buffers: HashSet<Entity<Buffer>>,
 725        push_to_history: bool,
 726        cx: &mut Context<BufferStore>,
 727    ) -> Task<Result<ProjectTransaction>> {
 728        cx.spawn(async move |_, cx| {
 729            let mut project_transaction = ProjectTransaction::default();
 730            for buffer in buffers {
 731                let transaction = buffer.update(cx, |buffer, cx| buffer.reload(cx))?.await?;
 732                buffer.update(cx, |buffer, cx| {
 733                    if let Some(transaction) = transaction {
 734                        if !push_to_history {
 735                            buffer.forget_transaction(transaction.id);
 736                        }
 737                        project_transaction.0.insert(cx.entity(), transaction);
 738                    }
 739                })?;
 740            }
 741
 742            Ok(project_transaction)
 743        })
 744    }
 745}
 746
 747impl BufferStore {
 748    pub fn init(client: &AnyProtoClient) {
 749        client.add_entity_message_handler(Self::handle_buffer_reloaded);
 750        client.add_entity_message_handler(Self::handle_buffer_saved);
 751        client.add_entity_message_handler(Self::handle_update_buffer_file);
 752        client.add_entity_request_handler(Self::handle_save_buffer);
 753        client.add_entity_request_handler(Self::handle_blame_buffer);
 754        client.add_entity_request_handler(Self::handle_reload_buffers);
 755        client.add_entity_request_handler(Self::handle_get_permalink_to_line);
 756    }
 757
 758    /// Creates a buffer store, optionally retaining its buffers.
 759    pub fn local(worktree_store: Entity<WorktreeStore>, cx: &mut Context<Self>) -> Self {
 760        Self {
 761            state: BufferStoreState::Local(LocalBufferStore {
 762                local_buffer_ids_by_path: Default::default(),
 763                local_buffer_ids_by_entry_id: Default::default(),
 764                worktree_store: worktree_store.clone(),
 765                _subscription: cx.subscribe(&worktree_store, |this, _, event, cx| {
 766                    if let WorktreeStoreEvent::WorktreeAdded(worktree) = event {
 767                        let this = this.as_local_mut().unwrap();
 768                        this.subscribe_to_worktree(worktree, cx);
 769                    }
 770                }),
 771            }),
 772            downstream_client: None,
 773            opened_buffers: Default::default(),
 774            shared_buffers: Default::default(),
 775            loading_buffers: Default::default(),
 776            worktree_store,
 777        }
 778    }
 779
 780    pub fn remote(
 781        worktree_store: Entity<WorktreeStore>,
 782        upstream_client: AnyProtoClient,
 783        remote_id: u64,
 784        _cx: &mut Context<Self>,
 785    ) -> Self {
 786        Self {
 787            state: BufferStoreState::Remote(RemoteBufferStore {
 788                shared_with_me: Default::default(),
 789                loading_remote_buffers_by_id: Default::default(),
 790                remote_buffer_listeners: Default::default(),
 791                project_id: remote_id,
 792                upstream_client,
 793                worktree_store: worktree_store.clone(),
 794            }),
 795            downstream_client: None,
 796            opened_buffers: Default::default(),
 797            loading_buffers: Default::default(),
 798            shared_buffers: Default::default(),
 799            worktree_store,
 800        }
 801    }
 802
 803    fn as_local(&self) -> Option<&LocalBufferStore> {
 804        match &self.state {
 805            BufferStoreState::Local(state) => Some(state),
 806            _ => None,
 807        }
 808    }
 809
 810    fn as_local_mut(&mut self) -> Option<&mut LocalBufferStore> {
 811        match &mut self.state {
 812            BufferStoreState::Local(state) => Some(state),
 813            _ => None,
 814        }
 815    }
 816
 817    fn as_remote_mut(&mut self) -> Option<&mut RemoteBufferStore> {
 818        match &mut self.state {
 819            BufferStoreState::Remote(state) => Some(state),
 820            _ => None,
 821        }
 822    }
 823
 824    fn as_remote(&self) -> Option<&RemoteBufferStore> {
 825        match &self.state {
 826            BufferStoreState::Remote(state) => Some(state),
 827            _ => None,
 828        }
 829    }
 830
 831    pub fn open_buffer(
 832        &mut self,
 833        project_path: ProjectPath,
 834        cx: &mut Context<Self>,
 835    ) -> Task<Result<Entity<Buffer>>> {
 836        if let Some(buffer) = self.get_by_path(&project_path, cx) {
 837            cx.emit(BufferStoreEvent::BufferOpened {
 838                buffer: buffer.clone(),
 839                project_path,
 840            });
 841
 842            return Task::ready(Ok(buffer));
 843        }
 844
 845        let task = match self.loading_buffers.entry(project_path.clone()) {
 846            hash_map::Entry::Occupied(e) => e.get().clone(),
 847            hash_map::Entry::Vacant(entry) => {
 848                let path = project_path.path.clone();
 849                let Some(worktree) = self
 850                    .worktree_store
 851                    .read(cx)
 852                    .worktree_for_id(project_path.worktree_id, cx)
 853                else {
 854                    return Task::ready(Err(anyhow!("no such worktree")));
 855                };
 856                let load_buffer = match &self.state {
 857                    BufferStoreState::Local(this) => this.open_buffer(path, worktree, cx),
 858                    BufferStoreState::Remote(this) => this.open_buffer(path, worktree, cx),
 859                };
 860
 861                entry
 862                    .insert(
 863                        cx.spawn(async move |this, cx| {
 864                            let load_result = load_buffer.await;
 865                            this.update(cx, |this, cx| {
 866                                // Record the fact that the buffer is no longer loading.
 867                                this.loading_buffers.remove(&project_path);
 868
 869                                let buffer = load_result.map_err(Arc::new)?;
 870                                cx.emit(BufferStoreEvent::BufferOpened {
 871                                    buffer: buffer.clone(),
 872                                    project_path,
 873                                });
 874
 875                                Ok(buffer)
 876                            })?
 877                        })
 878                        .shared(),
 879                    )
 880                    .clone()
 881            }
 882        };
 883
 884        cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
 885    }
 886
 887    pub(crate) fn worktree_for_buffer(
 888        &self,
 889        buffer: &Entity<Buffer>,
 890        cx: &App,
 891    ) -> Option<(Entity<Worktree>, Arc<Path>)> {
 892        let file = buffer.read(cx).file()?;
 893        let worktree_id = file.worktree_id(cx);
 894        let path = file.path().clone();
 895        let worktree = self
 896            .worktree_store
 897            .read(cx)
 898            .worktree_for_id(worktree_id, cx)?;
 899        Some((worktree, path))
 900    }
 901
 902    pub fn create_buffer(&mut self, cx: &mut Context<Self>) -> Task<Result<Entity<Buffer>>> {
 903        match &self.state {
 904            BufferStoreState::Local(this) => this.create_buffer(cx),
 905            BufferStoreState::Remote(this) => this.create_buffer(cx),
 906        }
 907    }
 908
 909    pub fn save_buffer(
 910        &mut self,
 911        buffer: Entity<Buffer>,
 912        cx: &mut Context<Self>,
 913    ) -> Task<Result<()>> {
 914        match &mut self.state {
 915            BufferStoreState::Local(this) => this.save_buffer(buffer, cx),
 916            BufferStoreState::Remote(this) => this.save_remote_buffer(buffer.clone(), None, cx),
 917        }
 918    }
 919
 920    pub fn save_buffer_as(
 921        &mut self,
 922        buffer: Entity<Buffer>,
 923        path: ProjectPath,
 924        cx: &mut Context<Self>,
 925    ) -> Task<Result<()>> {
 926        let old_file = buffer.read(cx).file().cloned();
 927        let task = match &self.state {
 928            BufferStoreState::Local(this) => this.save_buffer_as(buffer.clone(), path, cx),
 929            BufferStoreState::Remote(this) => {
 930                this.save_remote_buffer(buffer.clone(), Some(path.to_proto()), cx)
 931            }
 932        };
 933        cx.spawn(async move |this, cx| {
 934            task.await?;
 935            this.update(cx, |_, cx| {
 936                cx.emit(BufferStoreEvent::BufferChangedFilePath { buffer, old_file });
 937            })
 938        })
 939    }
 940
 941    pub fn blame_buffer(
 942        &self,
 943        buffer: &Entity<Buffer>,
 944        version: Option<clock::Global>,
 945        cx: &App,
 946    ) -> Task<Result<Option<Blame>>> {
 947        let buffer = buffer.read(cx);
 948        let Some(file) = File::from_dyn(buffer.file()) else {
 949            return Task::ready(Err(anyhow!("buffer has no file")));
 950        };
 951
 952        match file.worktree.clone().read(cx) {
 953            Worktree::Local(worktree) => {
 954                let worktree = worktree.snapshot();
 955                let blame_params = maybe!({
 956                    let local_repo = match worktree.local_repo_for_path(&file.path) {
 957                        Some(repo_for_path) => repo_for_path,
 958                        None => return Ok(None),
 959                    };
 960
 961                    let relative_path = local_repo
 962                        .relativize(&file.path)
 963                        .context("failed to relativize buffer path")?;
 964
 965                    let repo = local_repo.repo().clone();
 966
 967                    let content = match version {
 968                        Some(version) => buffer.rope_for_version(&version).clone(),
 969                        None => buffer.as_rope().clone(),
 970                    };
 971
 972                    anyhow::Ok(Some((repo, relative_path, content)))
 973                });
 974
 975                cx.spawn(async move |cx| {
 976                    let Some((repo, relative_path, content)) = blame_params? else {
 977                        return Ok(None);
 978                    };
 979                    repo.blame(relative_path.clone(), content, cx)
 980                        .await
 981                        .with_context(|| format!("Failed to blame {:?}", relative_path.0))
 982                        .map(Some)
 983                })
 984            }
 985            Worktree::Remote(worktree) => {
 986                let buffer_id = buffer.remote_id();
 987                let version = buffer.version();
 988                let project_id = worktree.project_id();
 989                let client = worktree.client();
 990                cx.spawn(async move |_| {
 991                    let response = client
 992                        .request(proto::BlameBuffer {
 993                            project_id,
 994                            buffer_id: buffer_id.into(),
 995                            version: serialize_version(&version),
 996                        })
 997                        .await?;
 998                    Ok(deserialize_blame_buffer_response(response))
 999                })
1000            }
1001        }
1002    }
1003
1004    pub fn get_permalink_to_line(
1005        &self,
1006        buffer: &Entity<Buffer>,
1007        selection: Range<u32>,
1008        cx: &App,
1009    ) -> Task<Result<url::Url>> {
1010        let buffer = buffer.read(cx);
1011        let Some(file) = File::from_dyn(buffer.file()) else {
1012            return Task::ready(Err(anyhow!("buffer has no file")));
1013        };
1014
1015        match file.worktree.read(cx) {
1016            Worktree::Local(worktree) => {
1017                let worktree_path = worktree.abs_path().clone();
1018                let Some((repo_entry, repo)) =
1019                    worktree.repository_for_path(file.path()).and_then(|entry| {
1020                        let repo = worktree.get_local_repo(&entry)?.repo().clone();
1021                        Some((entry, repo))
1022                    })
1023                else {
1024                    // If we're not in a Git repo, check whether this is a Rust source
1025                    // file in the Cargo registry (presumably opened with go-to-definition
1026                    // from a normal Rust file). If so, we can put together a permalink
1027                    // using crate metadata.
1028                    if buffer
1029                        .language()
1030                        .is_none_or(|lang| lang.name() != "Rust".into())
1031                    {
1032                        return Task::ready(Err(anyhow!("no permalink available")));
1033                    }
1034                    let file_path = worktree_path.join(file.path());
1035                    return cx.spawn(async move |cx| {
1036                        let provider_registry =
1037                            cx.update(GitHostingProviderRegistry::default_global)?;
1038                        get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1039                            .map_err(|_| anyhow!("no permalink available"))
1040                    });
1041                };
1042
1043                let path = match repo_entry.relativize(file.path()) {
1044                    Ok(RepoPath(path)) => path,
1045                    Err(e) => return Task::ready(Err(e)),
1046                };
1047
1048                let remote = repo_entry
1049                    .branch()
1050                    .and_then(|b| b.upstream.as_ref())
1051                    .and_then(|b| b.remote_name())
1052                    .unwrap_or("origin")
1053                    .to_string();
1054
1055                cx.spawn(async move |cx| {
1056                    let origin_url = repo
1057                        .remote_url(&remote)
1058                        .ok_or_else(|| anyhow!("remote \"{remote}\" not found"))?;
1059
1060                    let sha = repo
1061                        .head_sha()
1062                        .ok_or_else(|| anyhow!("failed to read HEAD SHA"))?;
1063
1064                    let provider_registry =
1065                        cx.update(GitHostingProviderRegistry::default_global)?;
1066
1067                    let (provider, remote) =
1068                        parse_git_remote_url(provider_registry, &origin_url)
1069                            .ok_or_else(|| anyhow!("failed to parse Git remote URL"))?;
1070
1071                    let path = path
1072                        .to_str()
1073                        .ok_or_else(|| anyhow!("failed to convert path to string"))?;
1074
1075                    Ok(provider.build_permalink(
1076                        remote,
1077                        BuildPermalinkParams {
1078                            sha: &sha,
1079                            path,
1080                            selection: Some(selection),
1081                        },
1082                    ))
1083                })
1084            }
1085            Worktree::Remote(worktree) => {
1086                let buffer_id = buffer.remote_id();
1087                let project_id = worktree.project_id();
1088                let client = worktree.client();
1089                cx.spawn(async move |_| {
1090                    let response = client
1091                        .request(proto::GetPermalinkToLine {
1092                            project_id,
1093                            buffer_id: buffer_id.into(),
1094                            selection: Some(proto::Range {
1095                                start: selection.start as u64,
1096                                end: selection.end as u64,
1097                            }),
1098                        })
1099                        .await?;
1100
1101                    url::Url::parse(&response.permalink).context("failed to parse permalink")
1102                })
1103            }
1104        }
1105    }
1106
1107    fn add_buffer(&mut self, buffer_entity: Entity<Buffer>, cx: &mut Context<Self>) -> Result<()> {
1108        let buffer = buffer_entity.read(cx);
1109        let remote_id = buffer.remote_id();
1110        let is_remote = buffer.replica_id() != 0;
1111        let open_buffer = OpenBuffer::Complete {
1112            buffer: buffer_entity.downgrade(),
1113        };
1114
1115        let handle = cx.entity().downgrade();
1116        buffer_entity.update(cx, move |_, cx| {
1117            cx.on_release(move |buffer, cx| {
1118                handle
1119                    .update(cx, |_, cx| {
1120                        cx.emit(BufferStoreEvent::BufferDropped(buffer.remote_id()))
1121                    })
1122                    .ok();
1123            })
1124            .detach()
1125        });
1126
1127        match self.opened_buffers.entry(remote_id) {
1128            hash_map::Entry::Vacant(entry) => {
1129                entry.insert(open_buffer);
1130            }
1131            hash_map::Entry::Occupied(mut entry) => {
1132                if let OpenBuffer::Operations(operations) = entry.get_mut() {
1133                    buffer_entity.update(cx, |b, cx| b.apply_ops(operations.drain(..), cx));
1134                } else if entry.get().upgrade().is_some() {
1135                    if is_remote {
1136                        return Ok(());
1137                    } else {
1138                        debug_panic!("buffer {} was already registered", remote_id);
1139                        Err(anyhow!("buffer {} was already registered", remote_id))?;
1140                    }
1141                }
1142                entry.insert(open_buffer);
1143            }
1144        }
1145
1146        cx.subscribe(&buffer_entity, Self::on_buffer_event).detach();
1147        cx.emit(BufferStoreEvent::BufferAdded(buffer_entity));
1148        Ok(())
1149    }
1150
1151    pub fn buffers(&self) -> impl '_ + Iterator<Item = Entity<Buffer>> {
1152        self.opened_buffers
1153            .values()
1154            .filter_map(|buffer| buffer.upgrade())
1155    }
1156
1157    pub fn loading_buffers(
1158        &self,
1159    ) -> impl Iterator<Item = (&ProjectPath, impl Future<Output = Result<Entity<Buffer>>>)> {
1160        self.loading_buffers.iter().map(|(path, task)| {
1161            let task = task.clone();
1162            (path, async move { task.await.map_err(|e| anyhow!("{e}")) })
1163        })
1164    }
1165
1166    pub fn buffer_id_for_project_path(&self, project_path: &ProjectPath) -> Option<&BufferId> {
1167        self.as_local()
1168            .and_then(|state| state.local_buffer_ids_by_path.get(project_path))
1169    }
1170
1171    pub fn get_by_path(&self, path: &ProjectPath, cx: &App) -> Option<Entity<Buffer>> {
1172        self.buffers().find_map(|buffer| {
1173            let file = File::from_dyn(buffer.read(cx).file())?;
1174            if file.worktree_id(cx) == path.worktree_id && file.path == path.path {
1175                Some(buffer)
1176            } else {
1177                None
1178            }
1179        })
1180    }
1181
1182    pub fn get(&self, buffer_id: BufferId) -> Option<Entity<Buffer>> {
1183        self.opened_buffers.get(&buffer_id)?.upgrade()
1184    }
1185
1186    pub fn get_existing(&self, buffer_id: BufferId) -> Result<Entity<Buffer>> {
1187        self.get(buffer_id)
1188            .ok_or_else(|| anyhow!("unknown buffer id {}", buffer_id))
1189    }
1190
1191    pub fn get_possibly_incomplete(&self, buffer_id: BufferId) -> Option<Entity<Buffer>> {
1192        self.get(buffer_id).or_else(|| {
1193            self.as_remote()
1194                .and_then(|remote| remote.loading_remote_buffers_by_id.get(&buffer_id).cloned())
1195        })
1196    }
1197
1198    pub fn buffer_version_info(&self, cx: &App) -> (Vec<proto::BufferVersion>, Vec<BufferId>) {
1199        let buffers = self
1200            .buffers()
1201            .map(|buffer| {
1202                let buffer = buffer.read(cx);
1203                proto::BufferVersion {
1204                    id: buffer.remote_id().into(),
1205                    version: language::proto::serialize_version(&buffer.version),
1206                }
1207            })
1208            .collect();
1209        let incomplete_buffer_ids = self
1210            .as_remote()
1211            .map(|remote| remote.incomplete_buffer_ids())
1212            .unwrap_or_default();
1213        (buffers, incomplete_buffer_ids)
1214    }
1215
1216    pub fn disconnected_from_host(&mut self, cx: &mut App) {
1217        for open_buffer in self.opened_buffers.values_mut() {
1218            if let Some(buffer) = open_buffer.upgrade() {
1219                buffer.update(cx, |buffer, _| buffer.give_up_waiting());
1220            }
1221        }
1222
1223        for buffer in self.buffers() {
1224            buffer.update(cx, |buffer, cx| {
1225                buffer.set_capability(Capability::ReadOnly, cx)
1226            });
1227        }
1228
1229        if let Some(remote) = self.as_remote_mut() {
1230            // Wake up all futures currently waiting on a buffer to get opened,
1231            // to give them a chance to fail now that we've disconnected.
1232            remote.remote_buffer_listeners.clear()
1233        }
1234    }
1235
1236    pub fn shared(&mut self, remote_id: u64, downstream_client: AnyProtoClient, _cx: &mut App) {
1237        self.downstream_client = Some((downstream_client, remote_id));
1238    }
1239
1240    pub fn unshared(&mut self, _cx: &mut Context<Self>) {
1241        self.downstream_client.take();
1242        self.forget_shared_buffers();
1243    }
1244
1245    pub fn discard_incomplete(&mut self) {
1246        self.opened_buffers
1247            .retain(|_, buffer| !matches!(buffer, OpenBuffer::Operations(_)));
1248    }
1249
1250    pub fn find_search_candidates(
1251        &mut self,
1252        query: &SearchQuery,
1253        mut limit: usize,
1254        fs: Arc<dyn Fs>,
1255        cx: &mut Context<Self>,
1256    ) -> Receiver<Entity<Buffer>> {
1257        let (tx, rx) = smol::channel::unbounded();
1258        let mut open_buffers = HashSet::default();
1259        let mut unnamed_buffers = Vec::new();
1260        for handle in self.buffers() {
1261            let buffer = handle.read(cx);
1262            if let Some(entry_id) = buffer.entry_id(cx) {
1263                open_buffers.insert(entry_id);
1264            } else {
1265                limit = limit.saturating_sub(1);
1266                unnamed_buffers.push(handle)
1267            };
1268        }
1269
1270        const MAX_CONCURRENT_BUFFER_OPENS: usize = 64;
1271        let project_paths_rx = self
1272            .worktree_store
1273            .update(cx, |worktree_store, cx| {
1274                worktree_store.find_search_candidates(query.clone(), limit, open_buffers, fs, cx)
1275            })
1276            .chunks(MAX_CONCURRENT_BUFFER_OPENS);
1277
1278        cx.spawn(async move |this, cx| {
1279            for buffer in unnamed_buffers {
1280                tx.send(buffer).await.ok();
1281            }
1282
1283            let mut project_paths_rx = pin!(project_paths_rx);
1284            while let Some(project_paths) = project_paths_rx.next().await {
1285                let buffers = this.update(cx, |this, cx| {
1286                    project_paths
1287                        .into_iter()
1288                        .map(|project_path| this.open_buffer(project_path, cx))
1289                        .collect::<Vec<_>>()
1290                })?;
1291                for buffer_task in buffers {
1292                    if let Some(buffer) = buffer_task.await.log_err() {
1293                        if tx.send(buffer).await.is_err() {
1294                            return anyhow::Ok(());
1295                        }
1296                    }
1297                }
1298            }
1299            anyhow::Ok(())
1300        })
1301        .detach();
1302        rx
1303    }
1304
1305    fn on_buffer_event(
1306        &mut self,
1307        buffer: Entity<Buffer>,
1308        event: &BufferEvent,
1309        cx: &mut Context<Self>,
1310    ) {
1311        match event {
1312            BufferEvent::FileHandleChanged => {
1313                if let Some(local) = self.as_local_mut() {
1314                    local.buffer_changed_file(buffer, cx);
1315                }
1316            }
1317            BufferEvent::Reloaded => {
1318                let Some((downstream_client, project_id)) = self.downstream_client.as_ref() else {
1319                    return;
1320                };
1321                let buffer = buffer.read(cx);
1322                downstream_client
1323                    .send(proto::BufferReloaded {
1324                        project_id: *project_id,
1325                        buffer_id: buffer.remote_id().to_proto(),
1326                        version: serialize_version(&buffer.version()),
1327                        mtime: buffer.saved_mtime().map(|t| t.into()),
1328                        line_ending: serialize_line_ending(buffer.line_ending()) as i32,
1329                    })
1330                    .log_err();
1331            }
1332            BufferEvent::LanguageChanged => {}
1333            _ => {}
1334        }
1335    }
1336
1337    pub async fn handle_update_buffer(
1338        this: Entity<Self>,
1339        envelope: TypedEnvelope<proto::UpdateBuffer>,
1340        mut cx: AsyncApp,
1341    ) -> Result<proto::Ack> {
1342        let payload = envelope.payload.clone();
1343        let buffer_id = BufferId::new(payload.buffer_id)?;
1344        let ops = payload
1345            .operations
1346            .into_iter()
1347            .map(language::proto::deserialize_operation)
1348            .collect::<Result<Vec<_>, _>>()?;
1349        this.update(&mut cx, |this, cx| {
1350            match this.opened_buffers.entry(buffer_id) {
1351                hash_map::Entry::Occupied(mut e) => match e.get_mut() {
1352                    OpenBuffer::Operations(operations) => operations.extend_from_slice(&ops),
1353                    OpenBuffer::Complete { buffer, .. } => {
1354                        if let Some(buffer) = buffer.upgrade() {
1355                            buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx));
1356                        }
1357                    }
1358                },
1359                hash_map::Entry::Vacant(e) => {
1360                    e.insert(OpenBuffer::Operations(ops));
1361                }
1362            }
1363            Ok(proto::Ack {})
1364        })?
1365    }
1366
1367    pub fn register_shared_lsp_handle(
1368        &mut self,
1369        peer_id: proto::PeerId,
1370        buffer_id: BufferId,
1371        handle: OpenLspBufferHandle,
1372    ) {
1373        if let Some(shared_buffers) = self.shared_buffers.get_mut(&peer_id) {
1374            if let Some(buffer) = shared_buffers.get_mut(&buffer_id) {
1375                buffer.lsp_handle = Some(handle);
1376                return;
1377            }
1378        }
1379        debug_panic!("tried to register shared lsp handle, but buffer was not shared")
1380    }
1381
1382    pub fn handle_synchronize_buffers(
1383        &mut self,
1384        envelope: TypedEnvelope<proto::SynchronizeBuffers>,
1385        cx: &mut Context<Self>,
1386        client: Arc<Client>,
1387    ) -> Result<proto::SynchronizeBuffersResponse> {
1388        let project_id = envelope.payload.project_id;
1389        let mut response = proto::SynchronizeBuffersResponse {
1390            buffers: Default::default(),
1391        };
1392        let Some(guest_id) = envelope.original_sender_id else {
1393            anyhow::bail!("missing original_sender_id on SynchronizeBuffers request");
1394        };
1395
1396        self.shared_buffers.entry(guest_id).or_default().clear();
1397        for buffer in envelope.payload.buffers {
1398            let buffer_id = BufferId::new(buffer.id)?;
1399            let remote_version = language::proto::deserialize_version(&buffer.version);
1400            if let Some(buffer) = self.get(buffer_id) {
1401                self.shared_buffers
1402                    .entry(guest_id)
1403                    .or_default()
1404                    .entry(buffer_id)
1405                    .or_insert_with(|| SharedBuffer {
1406                        buffer: buffer.clone(),
1407                        lsp_handle: None,
1408                    });
1409
1410                let buffer = buffer.read(cx);
1411                response.buffers.push(proto::BufferVersion {
1412                    id: buffer_id.into(),
1413                    version: language::proto::serialize_version(&buffer.version),
1414                });
1415
1416                let operations = buffer.serialize_ops(Some(remote_version), cx);
1417                let client = client.clone();
1418                if let Some(file) = buffer.file() {
1419                    client
1420                        .send(proto::UpdateBufferFile {
1421                            project_id,
1422                            buffer_id: buffer_id.into(),
1423                            file: Some(file.to_proto(cx)),
1424                        })
1425                        .log_err();
1426                }
1427
1428                // TODO(max): do something
1429                // client
1430                //     .send(proto::UpdateStagedText {
1431                //         project_id,
1432                //         buffer_id: buffer_id.into(),
1433                //         diff_base: buffer.diff_base().map(ToString::to_string),
1434                //     })
1435                //     .log_err();
1436
1437                client
1438                    .send(proto::BufferReloaded {
1439                        project_id,
1440                        buffer_id: buffer_id.into(),
1441                        version: language::proto::serialize_version(buffer.saved_version()),
1442                        mtime: buffer.saved_mtime().map(|time| time.into()),
1443                        line_ending: language::proto::serialize_line_ending(buffer.line_ending())
1444                            as i32,
1445                    })
1446                    .log_err();
1447
1448                cx.background_spawn(
1449                    async move {
1450                        let operations = operations.await;
1451                        for chunk in split_operations(operations) {
1452                            client
1453                                .request(proto::UpdateBuffer {
1454                                    project_id,
1455                                    buffer_id: buffer_id.into(),
1456                                    operations: chunk,
1457                                })
1458                                .await?;
1459                        }
1460                        anyhow::Ok(())
1461                    }
1462                    .log_err(),
1463                )
1464                .detach();
1465            }
1466        }
1467        Ok(response)
1468    }
1469
1470    pub fn handle_create_buffer_for_peer(
1471        &mut self,
1472        envelope: TypedEnvelope<proto::CreateBufferForPeer>,
1473        replica_id: u16,
1474        capability: Capability,
1475        cx: &mut Context<Self>,
1476    ) -> Result<()> {
1477        let Some(remote) = self.as_remote_mut() else {
1478            return Err(anyhow!("buffer store is not a remote"));
1479        };
1480
1481        if let Some(buffer) =
1482            remote.handle_create_buffer_for_peer(envelope, replica_id, capability, cx)?
1483        {
1484            self.add_buffer(buffer, cx)?;
1485        }
1486
1487        Ok(())
1488    }
1489
1490    pub async fn handle_update_buffer_file(
1491        this: Entity<Self>,
1492        envelope: TypedEnvelope<proto::UpdateBufferFile>,
1493        mut cx: AsyncApp,
1494    ) -> Result<()> {
1495        let buffer_id = envelope.payload.buffer_id;
1496        let buffer_id = BufferId::new(buffer_id)?;
1497
1498        this.update(&mut cx, |this, cx| {
1499            let payload = envelope.payload.clone();
1500            if let Some(buffer) = this.get_possibly_incomplete(buffer_id) {
1501                let file = payload.file.ok_or_else(|| anyhow!("invalid file"))?;
1502                let worktree = this
1503                    .worktree_store
1504                    .read(cx)
1505                    .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
1506                    .ok_or_else(|| anyhow!("no such worktree"))?;
1507                let file = File::from_proto(file, worktree, cx)?;
1508                let old_file = buffer.update(cx, |buffer, cx| {
1509                    let old_file = buffer.file().cloned();
1510                    let new_path = file.path.clone();
1511                    buffer.file_updated(Arc::new(file), cx);
1512                    if old_file
1513                        .as_ref()
1514                        .map_or(true, |old| *old.path() != new_path)
1515                    {
1516                        Some(old_file)
1517                    } else {
1518                        None
1519                    }
1520                });
1521                if let Some(old_file) = old_file {
1522                    cx.emit(BufferStoreEvent::BufferChangedFilePath { buffer, old_file });
1523                }
1524            }
1525            if let Some((downstream_client, project_id)) = this.downstream_client.as_ref() {
1526                downstream_client
1527                    .send(proto::UpdateBufferFile {
1528                        project_id: *project_id,
1529                        buffer_id: buffer_id.into(),
1530                        file: envelope.payload.file,
1531                    })
1532                    .log_err();
1533            }
1534            Ok(())
1535        })?
1536    }
1537
1538    pub async fn handle_save_buffer(
1539        this: Entity<Self>,
1540        envelope: TypedEnvelope<proto::SaveBuffer>,
1541        mut cx: AsyncApp,
1542    ) -> Result<proto::BufferSaved> {
1543        let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
1544        let (buffer, project_id) = this.update(&mut cx, |this, _| {
1545            anyhow::Ok((
1546                this.get_existing(buffer_id)?,
1547                this.downstream_client
1548                    .as_ref()
1549                    .map(|(_, project_id)| *project_id)
1550                    .context("project is not shared")?,
1551            ))
1552        })??;
1553        buffer
1554            .update(&mut cx, |buffer, _| {
1555                buffer.wait_for_version(deserialize_version(&envelope.payload.version))
1556            })?
1557            .await?;
1558        let buffer_id = buffer.update(&mut cx, |buffer, _| buffer.remote_id())?;
1559
1560        if let Some(new_path) = envelope.payload.new_path {
1561            let new_path = ProjectPath::from_proto(new_path);
1562            this.update(&mut cx, |this, cx| {
1563                this.save_buffer_as(buffer.clone(), new_path, cx)
1564            })?
1565            .await?;
1566        } else {
1567            this.update(&mut cx, |this, cx| this.save_buffer(buffer.clone(), cx))?
1568                .await?;
1569        }
1570
1571        buffer.update(&mut cx, |buffer, _| proto::BufferSaved {
1572            project_id,
1573            buffer_id: buffer_id.into(),
1574            version: serialize_version(buffer.saved_version()),
1575            mtime: buffer.saved_mtime().map(|time| time.into()),
1576        })
1577    }
1578
1579    pub async fn handle_close_buffer(
1580        this: Entity<Self>,
1581        envelope: TypedEnvelope<proto::CloseBuffer>,
1582        mut cx: AsyncApp,
1583    ) -> Result<()> {
1584        let peer_id = envelope.sender_id;
1585        let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
1586        this.update(&mut cx, |this, cx| {
1587            if let Some(shared) = this.shared_buffers.get_mut(&peer_id) {
1588                if shared.remove(&buffer_id).is_some() {
1589                    cx.emit(BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id));
1590                    if shared.is_empty() {
1591                        this.shared_buffers.remove(&peer_id);
1592                    }
1593                    return;
1594                }
1595            }
1596            debug_panic!(
1597                "peer_id {} closed buffer_id {} which was either not open or already closed",
1598                peer_id,
1599                buffer_id
1600            )
1601        })
1602    }
1603
1604    pub async fn handle_buffer_saved(
1605        this: Entity<Self>,
1606        envelope: TypedEnvelope<proto::BufferSaved>,
1607        mut cx: AsyncApp,
1608    ) -> Result<()> {
1609        let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
1610        let version = deserialize_version(&envelope.payload.version);
1611        let mtime = envelope.payload.mtime.clone().map(|time| time.into());
1612        this.update(&mut cx, move |this, cx| {
1613            if let Some(buffer) = this.get_possibly_incomplete(buffer_id) {
1614                buffer.update(cx, |buffer, cx| {
1615                    buffer.did_save(version, mtime, cx);
1616                });
1617            }
1618
1619            if let Some((downstream_client, project_id)) = this.downstream_client.as_ref() {
1620                downstream_client
1621                    .send(proto::BufferSaved {
1622                        project_id: *project_id,
1623                        buffer_id: buffer_id.into(),
1624                        mtime: envelope.payload.mtime,
1625                        version: envelope.payload.version,
1626                    })
1627                    .log_err();
1628            }
1629        })
1630    }
1631
1632    pub async fn handle_buffer_reloaded(
1633        this: Entity<Self>,
1634        envelope: TypedEnvelope<proto::BufferReloaded>,
1635        mut cx: AsyncApp,
1636    ) -> Result<()> {
1637        let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
1638        let version = deserialize_version(&envelope.payload.version);
1639        let mtime = envelope.payload.mtime.clone().map(|time| time.into());
1640        let line_ending = deserialize_line_ending(
1641            proto::LineEnding::from_i32(envelope.payload.line_ending)
1642                .ok_or_else(|| anyhow!("missing line ending"))?,
1643        );
1644        this.update(&mut cx, |this, cx| {
1645            if let Some(buffer) = this.get_possibly_incomplete(buffer_id) {
1646                buffer.update(cx, |buffer, cx| {
1647                    buffer.did_reload(version, line_ending, mtime, cx);
1648                });
1649            }
1650
1651            if let Some((downstream_client, project_id)) = this.downstream_client.as_ref() {
1652                downstream_client
1653                    .send(proto::BufferReloaded {
1654                        project_id: *project_id,
1655                        buffer_id: buffer_id.into(),
1656                        mtime: envelope.payload.mtime,
1657                        version: envelope.payload.version,
1658                        line_ending: envelope.payload.line_ending,
1659                    })
1660                    .log_err();
1661            }
1662        })
1663    }
1664
1665    pub async fn handle_blame_buffer(
1666        this: Entity<Self>,
1667        envelope: TypedEnvelope<proto::BlameBuffer>,
1668        mut cx: AsyncApp,
1669    ) -> Result<proto::BlameBufferResponse> {
1670        let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
1671        let version = deserialize_version(&envelope.payload.version);
1672        let buffer = this.read_with(&cx, |this, _| this.get_existing(buffer_id))??;
1673        buffer
1674            .update(&mut cx, |buffer, _| {
1675                buffer.wait_for_version(version.clone())
1676            })?
1677            .await?;
1678        let blame = this
1679            .update(&mut cx, |this, cx| {
1680                this.blame_buffer(&buffer, Some(version), cx)
1681            })?
1682            .await?;
1683        Ok(serialize_blame_buffer_response(blame))
1684    }
1685
1686    pub async fn handle_get_permalink_to_line(
1687        this: Entity<Self>,
1688        envelope: TypedEnvelope<proto::GetPermalinkToLine>,
1689        mut cx: AsyncApp,
1690    ) -> Result<proto::GetPermalinkToLineResponse> {
1691        let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
1692        // let version = deserialize_version(&envelope.payload.version);
1693        let selection = {
1694            let proto_selection = envelope
1695                .payload
1696                .selection
1697                .context("no selection to get permalink for defined")?;
1698            proto_selection.start as u32..proto_selection.end as u32
1699        };
1700        let buffer = this.read_with(&cx, |this, _| this.get_existing(buffer_id))??;
1701        let permalink = this
1702            .update(&mut cx, |this, cx| {
1703                this.get_permalink_to_line(&buffer, selection, cx)
1704            })?
1705            .await?;
1706        Ok(proto::GetPermalinkToLineResponse {
1707            permalink: permalink.to_string(),
1708        })
1709    }
1710
1711    pub fn reload_buffers(
1712        &self,
1713        buffers: HashSet<Entity<Buffer>>,
1714        push_to_history: bool,
1715        cx: &mut Context<Self>,
1716    ) -> Task<Result<ProjectTransaction>> {
1717        if buffers.is_empty() {
1718            return Task::ready(Ok(ProjectTransaction::default()));
1719        }
1720        match &self.state {
1721            BufferStoreState::Local(this) => this.reload_buffers(buffers, push_to_history, cx),
1722            BufferStoreState::Remote(this) => this.reload_buffers(buffers, push_to_history, cx),
1723        }
1724    }
1725
1726    async fn handle_reload_buffers(
1727        this: Entity<Self>,
1728        envelope: TypedEnvelope<proto::ReloadBuffers>,
1729        mut cx: AsyncApp,
1730    ) -> Result<proto::ReloadBuffersResponse> {
1731        let sender_id = envelope.original_sender_id().unwrap_or_default();
1732        let reload = this.update(&mut cx, |this, cx| {
1733            let mut buffers = HashSet::default();
1734            for buffer_id in &envelope.payload.buffer_ids {
1735                let buffer_id = BufferId::new(*buffer_id)?;
1736                buffers.insert(this.get_existing(buffer_id)?);
1737            }
1738            Ok::<_, anyhow::Error>(this.reload_buffers(buffers, false, cx))
1739        })??;
1740
1741        let project_transaction = reload.await?;
1742        let project_transaction = this.update(&mut cx, |this, cx| {
1743            this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
1744        })?;
1745        Ok(proto::ReloadBuffersResponse {
1746            transaction: Some(project_transaction),
1747        })
1748    }
1749
1750    pub fn create_buffer_for_peer(
1751        &mut self,
1752        buffer: &Entity<Buffer>,
1753        peer_id: proto::PeerId,
1754        cx: &mut Context<Self>,
1755    ) -> Task<Result<()>> {
1756        let buffer_id = buffer.read(cx).remote_id();
1757        let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
1758        if shared_buffers.contains_key(&buffer_id) {
1759            return Task::ready(Ok(()));
1760        }
1761        shared_buffers.insert(
1762            buffer_id,
1763            SharedBuffer {
1764                buffer: buffer.clone(),
1765                lsp_handle: None,
1766            },
1767        );
1768
1769        let Some((client, project_id)) = self.downstream_client.clone() else {
1770            return Task::ready(Ok(()));
1771        };
1772
1773        cx.spawn(async move |this, cx| {
1774            let Some(buffer) = this.update(cx, |this, _| this.get(buffer_id))? else {
1775                return anyhow::Ok(());
1776            };
1777
1778            let operations = buffer.update(cx, |b, cx| b.serialize_ops(None, cx))?;
1779            let operations = operations.await;
1780            let state = buffer.update(cx, |buffer, cx| buffer.to_proto(cx))?;
1781
1782            let initial_state = proto::CreateBufferForPeer {
1783                project_id,
1784                peer_id: Some(peer_id),
1785                variant: Some(proto::create_buffer_for_peer::Variant::State(state)),
1786            };
1787
1788            if client.send(initial_state).log_err().is_some() {
1789                let client = client.clone();
1790                cx.background_spawn(async move {
1791                    let mut chunks = split_operations(operations).peekable();
1792                    while let Some(chunk) = chunks.next() {
1793                        let is_last = chunks.peek().is_none();
1794                        client.send(proto::CreateBufferForPeer {
1795                            project_id,
1796                            peer_id: Some(peer_id),
1797                            variant: Some(proto::create_buffer_for_peer::Variant::Chunk(
1798                                proto::BufferChunk {
1799                                    buffer_id: buffer_id.into(),
1800                                    operations: chunk,
1801                                    is_last,
1802                                },
1803                            )),
1804                        })?;
1805                    }
1806                    anyhow::Ok(())
1807                })
1808                .await
1809                .log_err();
1810            }
1811            Ok(())
1812        })
1813    }
1814
1815    pub fn forget_shared_buffers(&mut self) {
1816        self.shared_buffers.clear();
1817    }
1818
1819    pub fn forget_shared_buffers_for(&mut self, peer_id: &proto::PeerId) {
1820        self.shared_buffers.remove(peer_id);
1821    }
1822
1823    pub fn update_peer_id(&mut self, old_peer_id: &proto::PeerId, new_peer_id: proto::PeerId) {
1824        if let Some(buffers) = self.shared_buffers.remove(old_peer_id) {
1825            self.shared_buffers.insert(new_peer_id, buffers);
1826        }
1827    }
1828
1829    pub fn has_shared_buffers(&self) -> bool {
1830        !self.shared_buffers.is_empty()
1831    }
1832
1833    pub fn create_local_buffer(
1834        &mut self,
1835        text: &str,
1836        language: Option<Arc<Language>>,
1837        cx: &mut Context<Self>,
1838    ) -> Entity<Buffer> {
1839        let buffer = cx.new(|cx| {
1840            Buffer::local(text, cx)
1841                .with_language(language.unwrap_or_else(|| language::PLAIN_TEXT.clone()), cx)
1842        });
1843
1844        self.add_buffer(buffer.clone(), cx).log_err();
1845        let buffer_id = buffer.read(cx).remote_id();
1846
1847        let this = self
1848            .as_local_mut()
1849            .expect("local-only method called in a non-local context");
1850        if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
1851            this.local_buffer_ids_by_path.insert(
1852                ProjectPath {
1853                    worktree_id: file.worktree_id(cx),
1854                    path: file.path.clone(),
1855                },
1856                buffer_id,
1857            );
1858
1859            if let Some(entry_id) = file.entry_id {
1860                this.local_buffer_ids_by_entry_id
1861                    .insert(entry_id, buffer_id);
1862            }
1863        }
1864        buffer
1865    }
1866
1867    pub fn deserialize_project_transaction(
1868        &mut self,
1869        message: proto::ProjectTransaction,
1870        push_to_history: bool,
1871        cx: &mut Context<Self>,
1872    ) -> Task<Result<ProjectTransaction>> {
1873        if let Some(this) = self.as_remote_mut() {
1874            this.deserialize_project_transaction(message, push_to_history, cx)
1875        } else {
1876            debug_panic!("not a remote buffer store");
1877            Task::ready(Err(anyhow!("not a remote buffer store")))
1878        }
1879    }
1880
1881    pub fn wait_for_remote_buffer(
1882        &mut self,
1883        id: BufferId,
1884        cx: &mut Context<BufferStore>,
1885    ) -> Task<Result<Entity<Buffer>>> {
1886        if let Some(this) = self.as_remote_mut() {
1887            this.wait_for_remote_buffer(id, cx)
1888        } else {
1889            debug_panic!("not a remote buffer store");
1890            Task::ready(Err(anyhow!("not a remote buffer store")))
1891        }
1892    }
1893
1894    pub fn serialize_project_transaction_for_peer(
1895        &mut self,
1896        project_transaction: ProjectTransaction,
1897        peer_id: proto::PeerId,
1898        cx: &mut Context<Self>,
1899    ) -> proto::ProjectTransaction {
1900        let mut serialized_transaction = proto::ProjectTransaction {
1901            buffer_ids: Default::default(),
1902            transactions: Default::default(),
1903        };
1904        for (buffer, transaction) in project_transaction.0 {
1905            self.create_buffer_for_peer(&buffer, peer_id, cx)
1906                .detach_and_log_err(cx);
1907            serialized_transaction
1908                .buffer_ids
1909                .push(buffer.read(cx).remote_id().into());
1910            serialized_transaction
1911                .transactions
1912                .push(language::proto::serialize_transaction(&transaction));
1913        }
1914        serialized_transaction
1915    }
1916}
1917
1918impl OpenBuffer {
1919    fn upgrade(&self) -> Option<Entity<Buffer>> {
1920        match self {
1921            OpenBuffer::Complete { buffer, .. } => buffer.upgrade(),
1922            OpenBuffer::Operations(_) => None,
1923        }
1924    }
1925}
1926
1927fn is_not_found_error(error: &anyhow::Error) -> bool {
1928    error
1929        .root_cause()
1930        .downcast_ref::<io::Error>()
1931        .is_some_and(|err| err.kind() == io::ErrorKind::NotFound)
1932}
1933
1934fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
1935    let Some(blame) = blame else {
1936        return proto::BlameBufferResponse {
1937            blame_response: None,
1938        };
1939    };
1940
1941    let entries = blame
1942        .entries
1943        .into_iter()
1944        .map(|entry| proto::BlameEntry {
1945            sha: entry.sha.as_bytes().into(),
1946            start_line: entry.range.start,
1947            end_line: entry.range.end,
1948            original_line_number: entry.original_line_number,
1949            author: entry.author.clone(),
1950            author_mail: entry.author_mail.clone(),
1951            author_time: entry.author_time,
1952            author_tz: entry.author_tz.clone(),
1953            committer: entry.committer_name.clone(),
1954            committer_mail: entry.committer_email.clone(),
1955            committer_time: entry.committer_time,
1956            committer_tz: entry.committer_tz.clone(),
1957            summary: entry.summary.clone(),
1958            previous: entry.previous.clone(),
1959            filename: entry.filename.clone(),
1960        })
1961        .collect::<Vec<_>>();
1962
1963    let messages = blame
1964        .messages
1965        .into_iter()
1966        .map(|(oid, message)| proto::CommitMessage {
1967            oid: oid.as_bytes().into(),
1968            message,
1969        })
1970        .collect::<Vec<_>>();
1971
1972    proto::BlameBufferResponse {
1973        blame_response: Some(proto::blame_buffer_response::BlameResponse {
1974            entries,
1975            messages,
1976            remote_url: blame.remote_url,
1977        }),
1978    }
1979}
1980
1981fn deserialize_blame_buffer_response(
1982    response: proto::BlameBufferResponse,
1983) -> Option<git::blame::Blame> {
1984    let response = response.blame_response?;
1985    let entries = response
1986        .entries
1987        .into_iter()
1988        .filter_map(|entry| {
1989            Some(git::blame::BlameEntry {
1990                sha: git::Oid::from_bytes(&entry.sha).ok()?,
1991                range: entry.start_line..entry.end_line,
1992                original_line_number: entry.original_line_number,
1993                committer_name: entry.committer,
1994                committer_time: entry.committer_time,
1995                committer_tz: entry.committer_tz,
1996                committer_email: entry.committer_mail,
1997                author: entry.author,
1998                author_mail: entry.author_mail,
1999                author_time: entry.author_time,
2000                author_tz: entry.author_tz,
2001                summary: entry.summary,
2002                previous: entry.previous,
2003                filename: entry.filename,
2004            })
2005        })
2006        .collect::<Vec<_>>();
2007
2008    let messages = response
2009        .messages
2010        .into_iter()
2011        .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
2012        .collect::<HashMap<_, _>>();
2013
2014    Some(Blame {
2015        entries,
2016        messages,
2017        remote_url: response.remote_url,
2018    })
2019}
2020
2021fn get_permalink_in_rust_registry_src(
2022    provider_registry: Arc<GitHostingProviderRegistry>,
2023    path: PathBuf,
2024    selection: Range<u32>,
2025) -> Result<url::Url> {
2026    #[derive(Deserialize)]
2027    struct CargoVcsGit {
2028        sha1: String,
2029    }
2030
2031    #[derive(Deserialize)]
2032    struct CargoVcsInfo {
2033        git: CargoVcsGit,
2034        path_in_vcs: String,
2035    }
2036
2037    #[derive(Deserialize)]
2038    struct CargoPackage {
2039        repository: String,
2040    }
2041
2042    #[derive(Deserialize)]
2043    struct CargoToml {
2044        package: CargoPackage,
2045    }
2046
2047    let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
2048        let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
2049        Some((dir, json))
2050    }) else {
2051        bail!("No .cargo_vcs_info.json found in parent directories")
2052    };
2053    let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
2054    let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
2055    let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
2056    let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
2057        .ok_or_else(|| anyhow!("Failed to parse package.repository field of manifest"))?;
2058    let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
2059    let permalink = provider.build_permalink(
2060        remote,
2061        BuildPermalinkParams {
2062            sha: &cargo_vcs_info.git.sha1,
2063            path: &path.to_string_lossy(),
2064            selection: Some(selection),
2065        },
2066    );
2067    Ok(permalink)
2068}