buffer_store.rs

   1use crate::{
   2    ProjectItem as _, ProjectPath,
   3    lsp_store::OpenLspBufferHandle,
   4    search::SearchQuery,
   5    worktree_store::{WorktreeStore, WorktreeStoreEvent},
   6};
   7use anyhow::{Context as _, Result, anyhow};
   8use client::Client;
   9use collections::{HashMap, HashSet, hash_map};
  10use encodings::EncodingOptions;
  11use fs::Fs;
  12use futures::StreamExt;
  13use futures::{Future, FutureExt as _, channel::oneshot, future::Shared};
  14use gpui::{
  15    App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Subscription, Task, WeakEntity,
  16};
  17use language::{
  18    Buffer, BufferEvent, Capability, DiskState, File as _, Language, Operation,
  19    proto::{
  20        deserialize_line_ending, deserialize_version, serialize_line_ending, serialize_version,
  21        split_operations,
  22    },
  23};
  24use rpc::{
  25    AnyProtoClient, ErrorCode, ErrorExt as _, TypedEnvelope,
  26    proto::{self},
  27};
  28use smol::channel::Receiver;
  29use std::{io, pin::pin, sync::Arc, time::Instant};
  30use text::{BufferId, ReplicaId};
  31use util::{ResultExt as _, TryFutureExt, debug_panic, maybe, rel_path::RelPath};
  32use worktree::{File, PathChange, ProjectEntryId, Worktree, WorktreeId};
  33
  34/// A set of open buffers.
  35pub struct BufferStore {
  36    state: BufferStoreState,
  37    #[allow(clippy::type_complexity)]
  38    loading_buffers: HashMap<ProjectPath, Shared<Task<Result<Entity<Buffer>, Arc<anyhow::Error>>>>>,
  39    worktree_store: Entity<WorktreeStore>,
  40    opened_buffers: HashMap<BufferId, OpenBuffer>,
  41    path_to_buffer_id: HashMap<ProjectPath, BufferId>,
  42    downstream_client: Option<(AnyProtoClient, u64)>,
  43    shared_buffers: HashMap<proto::PeerId, HashMap<BufferId, SharedBuffer>>,
  44    non_searchable_buffers: HashSet<BufferId>,
  45}
  46
  47#[derive(Hash, Eq, PartialEq, Clone)]
  48struct SharedBuffer {
  49    buffer: Entity<Buffer>,
  50    lsp_handle: Option<OpenLspBufferHandle>,
  51}
  52
  53enum BufferStoreState {
  54    Local(LocalBufferStore),
  55    Remote(RemoteBufferStore),
  56}
  57
  58struct RemoteBufferStore {
  59    shared_with_me: HashSet<Entity<Buffer>>,
  60    upstream_client: AnyProtoClient,
  61    project_id: u64,
  62    loading_remote_buffers_by_id: HashMap<BufferId, Entity<Buffer>>,
  63    remote_buffer_listeners:
  64        HashMap<BufferId, Vec<oneshot::Sender<anyhow::Result<Entity<Buffer>>>>>,
  65    worktree_store: Entity<WorktreeStore>,
  66}
  67
  68struct LocalBufferStore {
  69    local_buffer_ids_by_entry_id: HashMap<ProjectEntryId, BufferId>,
  70    worktree_store: Entity<WorktreeStore>,
  71    _subscription: Subscription,
  72}
  73
  74enum OpenBuffer {
  75    Complete { buffer: WeakEntity<Buffer> },
  76    Operations(Vec<Operation>),
  77}
  78
  79pub enum BufferStoreEvent {
  80    BufferAdded(Entity<Buffer>),
  81    BufferOpened {
  82        buffer: Entity<Buffer>,
  83        project_path: ProjectPath,
  84    },
  85    SharedBufferClosed(proto::PeerId, BufferId),
  86    BufferDropped(BufferId),
  87    BufferChangedFilePath {
  88        buffer: Entity<Buffer>,
  89        old_file: Option<Arc<dyn language::File>>,
  90    },
  91}
  92
  93#[derive(Default, Debug, Clone)]
  94pub struct ProjectTransaction(pub HashMap<Entity<Buffer>, language::Transaction>);
  95
  96impl PartialEq for ProjectTransaction {
  97    fn eq(&self, other: &Self) -> bool {
  98        self.0.len() == other.0.len()
  99            && self.0.iter().all(|(buffer, transaction)| {
 100                other.0.get(buffer).is_some_and(|t| t.id == transaction.id)
 101            })
 102    }
 103}
 104
 105impl EventEmitter<BufferStoreEvent> for BufferStore {}
 106
 107impl RemoteBufferStore {
 108    pub fn wait_for_remote_buffer(
 109        &mut self,
 110        id: BufferId,
 111        cx: &mut Context<BufferStore>,
 112    ) -> Task<Result<Entity<Buffer>>> {
 113        let (tx, rx) = oneshot::channel();
 114        self.remote_buffer_listeners.entry(id).or_default().push(tx);
 115
 116        cx.spawn(async move |this, cx| {
 117            if let Some(buffer) = this
 118                .read_with(cx, |buffer_store, _| buffer_store.get(id))
 119                .ok()
 120                .flatten()
 121            {
 122                return Ok(buffer);
 123            }
 124
 125            cx.background_spawn(async move { rx.await? }).await
 126        })
 127    }
 128
 129    fn save_remote_buffer(
 130        &self,
 131        buffer_handle: Entity<Buffer>,
 132        new_path: Option<proto::ProjectPath>,
 133        cx: &Context<BufferStore>,
 134    ) -> Task<Result<()>> {
 135        let buffer = buffer_handle.read(cx);
 136        let buffer_id = buffer.remote_id().into();
 137        let version = buffer.version();
 138        let rpc = self.upstream_client.clone();
 139        let project_id = self.project_id;
 140        cx.spawn(async move |_, cx| {
 141            let response = rpc
 142                .request(proto::SaveBuffer {
 143                    project_id,
 144                    buffer_id,
 145                    new_path,
 146                    version: serialize_version(&version),
 147                })
 148                .await?;
 149            let version = deserialize_version(&response.version);
 150            let mtime = response.mtime.map(|mtime| mtime.into());
 151
 152            buffer_handle.update(cx, |buffer, cx| {
 153                buffer.did_save(version.clone(), mtime, cx);
 154            })?;
 155
 156            Ok(())
 157        })
 158    }
 159
 160    pub fn handle_create_buffer_for_peer(
 161        &mut self,
 162        envelope: TypedEnvelope<proto::CreateBufferForPeer>,
 163        replica_id: ReplicaId,
 164        capability: Capability,
 165        cx: &mut Context<BufferStore>,
 166    ) -> Result<Option<Entity<Buffer>>> {
 167        match envelope.payload.variant.context("missing variant")? {
 168            proto::create_buffer_for_peer::Variant::State(mut state) => {
 169                let buffer_id = BufferId::new(state.id)?;
 170
 171                let buffer_result = maybe!({
 172                    let mut buffer_file = None;
 173                    if let Some(file) = state.file.take() {
 174                        let worktree_id = worktree::WorktreeId::from_proto(file.worktree_id);
 175                        let worktree = self
 176                            .worktree_store
 177                            .read(cx)
 178                            .worktree_for_id(worktree_id, cx)
 179                            .with_context(|| {
 180                                format!("no worktree found for id {}", file.worktree_id)
 181                            })?;
 182                        buffer_file = Some(Arc::new(File::from_proto(file, worktree, cx)?)
 183                            as Arc<dyn language::File>);
 184                    }
 185                    Buffer::from_proto(
 186                        replica_id,
 187                        capability,
 188                        state,
 189                        buffer_file,
 190                        cx.background_executor(),
 191                    )
 192                });
 193
 194                match buffer_result {
 195                    Ok(buffer) => {
 196                        let buffer = cx.new(|_| buffer);
 197                        self.loading_remote_buffers_by_id.insert(buffer_id, buffer);
 198                    }
 199                    Err(error) => {
 200                        if let Some(listeners) = self.remote_buffer_listeners.remove(&buffer_id) {
 201                            for listener in listeners {
 202                                listener.send(Err(anyhow!(error.cloned()))).ok();
 203                            }
 204                        }
 205                    }
 206                }
 207            }
 208            proto::create_buffer_for_peer::Variant::Chunk(chunk) => {
 209                let buffer_id = BufferId::new(chunk.buffer_id)?;
 210                let buffer = self
 211                    .loading_remote_buffers_by_id
 212                    .get(&buffer_id)
 213                    .cloned()
 214                    .with_context(|| {
 215                        format!(
 216                            "received chunk for buffer {} without initial state",
 217                            chunk.buffer_id
 218                        )
 219                    })?;
 220
 221                let result = maybe!({
 222                    let operations = chunk
 223                        .operations
 224                        .into_iter()
 225                        .map(language::proto::deserialize_operation)
 226                        .collect::<Result<Vec<_>>>()?;
 227                    buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx));
 228                    anyhow::Ok(())
 229                });
 230
 231                if let Err(error) = result {
 232                    self.loading_remote_buffers_by_id.remove(&buffer_id);
 233                    if let Some(listeners) = self.remote_buffer_listeners.remove(&buffer_id) {
 234                        for listener in listeners {
 235                            listener.send(Err(error.cloned())).ok();
 236                        }
 237                    }
 238                } else if chunk.is_last {
 239                    self.loading_remote_buffers_by_id.remove(&buffer_id);
 240                    if self.upstream_client.is_via_collab() {
 241                        // retain buffers sent by peers to avoid races.
 242                        self.shared_with_me.insert(buffer.clone());
 243                    }
 244
 245                    if let Some(senders) = self.remote_buffer_listeners.remove(&buffer_id) {
 246                        for sender in senders {
 247                            sender.send(Ok(buffer.clone())).ok();
 248                        }
 249                    }
 250                    return Ok(Some(buffer));
 251                }
 252            }
 253        }
 254        Ok(None)
 255    }
 256
 257    pub fn incomplete_buffer_ids(&self) -> Vec<BufferId> {
 258        self.loading_remote_buffers_by_id
 259            .keys()
 260            .copied()
 261            .collect::<Vec<_>>()
 262    }
 263
 264    pub fn deserialize_project_transaction(
 265        &self,
 266        message: proto::ProjectTransaction,
 267        push_to_history: bool,
 268        cx: &mut Context<BufferStore>,
 269    ) -> Task<Result<ProjectTransaction>> {
 270        cx.spawn(async move |this, cx| {
 271            let mut project_transaction = ProjectTransaction::default();
 272            for (buffer_id, transaction) in message.buffer_ids.into_iter().zip(message.transactions)
 273            {
 274                let buffer_id = BufferId::new(buffer_id)?;
 275                let buffer = this
 276                    .update(cx, |this, cx| this.wait_for_remote_buffer(buffer_id, cx))?
 277                    .await?;
 278                let transaction = language::proto::deserialize_transaction(transaction)?;
 279                project_transaction.0.insert(buffer, transaction);
 280            }
 281
 282            for (buffer, transaction) in &project_transaction.0 {
 283                buffer
 284                    .update(cx, |buffer, _| {
 285                        buffer.wait_for_edits(transaction.edit_ids.iter().copied())
 286                    })?
 287                    .await?;
 288
 289                if push_to_history {
 290                    buffer.update(cx, |buffer, _| {
 291                        buffer.push_transaction(transaction.clone(), Instant::now());
 292                        buffer.finalize_last_transaction();
 293                    })?;
 294                }
 295            }
 296
 297            Ok(project_transaction)
 298        })
 299    }
 300
 301    fn open_buffer(
 302        &self,
 303        path: Arc<RelPath>,
 304        worktree: Entity<Worktree>,
 305        cx: &mut Context<BufferStore>,
 306    ) -> Task<Result<Entity<Buffer>>> {
 307        let worktree_id = worktree.read(cx).id().to_proto();
 308        let project_id = self.project_id;
 309        let client = self.upstream_client.clone();
 310        cx.spawn(async move |this, cx| {
 311            let response = client
 312                .request(proto::OpenBufferByPath {
 313                    project_id,
 314                    worktree_id,
 315                    path: path.to_proto(),
 316                })
 317                .await?;
 318            let buffer_id = BufferId::new(response.buffer_id)?;
 319
 320            let buffer = this
 321                .update(cx, {
 322                    |this, cx| this.wait_for_remote_buffer(buffer_id, cx)
 323                })?
 324                .await?;
 325
 326            Ok(buffer)
 327        })
 328    }
 329
 330    fn create_buffer(
 331        &self,
 332        project_searchable: bool,
 333        cx: &mut Context<BufferStore>,
 334    ) -> Task<Result<Entity<Buffer>>> {
 335        let create = self.upstream_client.request(proto::OpenNewBuffer {
 336            project_id: self.project_id,
 337        });
 338        cx.spawn(async move |this, cx| {
 339            let response = create.await?;
 340            let buffer_id = BufferId::new(response.buffer_id)?;
 341
 342            this.update(cx, |this, cx| {
 343                if !project_searchable {
 344                    this.non_searchable_buffers.insert(buffer_id);
 345                }
 346                this.wait_for_remote_buffer(buffer_id, cx)
 347            })?
 348            .await
 349        })
 350    }
 351
 352    fn reload_buffers(
 353        &self,
 354        buffers: HashSet<Entity<Buffer>>,
 355        push_to_history: bool,
 356        cx: &mut Context<BufferStore>,
 357    ) -> Task<Result<ProjectTransaction>> {
 358        let request = self.upstream_client.request(proto::ReloadBuffers {
 359            project_id: self.project_id,
 360            buffer_ids: buffers
 361                .iter()
 362                .map(|buffer| buffer.read(cx).remote_id().to_proto())
 363                .collect(),
 364        });
 365
 366        cx.spawn(async move |this, cx| {
 367            let response = request.await?.transaction.context("missing transaction")?;
 368            this.update(cx, |this, cx| {
 369                this.deserialize_project_transaction(response, push_to_history, cx)
 370            })?
 371            .await
 372        })
 373    }
 374}
 375
 376impl LocalBufferStore {
 377    fn save_local_buffer(
 378        &self,
 379        buffer_handle: Entity<Buffer>,
 380        worktree: Entity<Worktree>,
 381        path: Arc<RelPath>,
 382        mut has_changed_file: bool,
 383        cx: &mut Context<BufferStore>,
 384    ) -> Task<Result<()>> {
 385        let buffer = buffer_handle.read(cx);
 386
 387        let text = buffer.as_rope().clone();
 388        let line_ending = buffer.line_ending();
 389        let version = buffer.version();
 390        let buffer_id = buffer.remote_id();
 391        let file = buffer.file().cloned();
 392        let encoding = buffer.encoding().clone();
 393
 394        if file
 395            .as_ref()
 396            .is_some_and(|file| file.disk_state() == DiskState::New)
 397        {
 398            has_changed_file = true;
 399        }
 400
 401        let save = worktree.update(cx, |worktree, cx| {
 402            worktree.write_file(path.clone(), text, line_ending, encoding, cx)
 403        });
 404
 405        cx.spawn(async move |this, cx| {
 406            let new_file = save.await?;
 407            let mtime = new_file.disk_state().mtime();
 408            this.update(cx, |this, cx| {
 409                if let Some((downstream_client, project_id)) = this.downstream_client.clone() {
 410                    if has_changed_file {
 411                        downstream_client
 412                            .send(proto::UpdateBufferFile {
 413                                project_id,
 414                                buffer_id: buffer_id.to_proto(),
 415                                file: Some(language::File::to_proto(&*new_file, cx)),
 416                            })
 417                            .log_err();
 418                    }
 419                    downstream_client
 420                        .send(proto::BufferSaved {
 421                            project_id,
 422                            buffer_id: buffer_id.to_proto(),
 423                            version: serialize_version(&version),
 424                            mtime: mtime.map(|time| time.into()),
 425                        })
 426                        .log_err();
 427                }
 428            })?;
 429            buffer_handle.update(cx, |buffer, cx| {
 430                if has_changed_file {
 431                    buffer.file_updated(new_file, cx);
 432                }
 433                buffer.did_save(version.clone(), mtime, cx);
 434            })
 435        })
 436    }
 437
 438    fn subscribe_to_worktree(
 439        &mut self,
 440        worktree: &Entity<Worktree>,
 441        cx: &mut Context<BufferStore>,
 442    ) {
 443        cx.subscribe(worktree, |this, worktree, event, cx| {
 444            if worktree.read(cx).is_local()
 445                && let worktree::Event::UpdatedEntries(changes) = event
 446            {
 447                Self::local_worktree_entries_changed(this, &worktree, changes, cx);
 448            }
 449        })
 450        .detach();
 451    }
 452
 453    fn local_worktree_entries_changed(
 454        this: &mut BufferStore,
 455        worktree_handle: &Entity<Worktree>,
 456        changes: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
 457        cx: &mut Context<BufferStore>,
 458    ) {
 459        let snapshot = worktree_handle.read(cx).snapshot();
 460        for (path, entry_id, _) in changes {
 461            Self::local_worktree_entry_changed(
 462                this,
 463                *entry_id,
 464                path,
 465                worktree_handle,
 466                &snapshot,
 467                cx,
 468            );
 469        }
 470    }
 471
 472    fn local_worktree_entry_changed(
 473        this: &mut BufferStore,
 474        entry_id: ProjectEntryId,
 475        path: &Arc<RelPath>,
 476        worktree: &Entity<worktree::Worktree>,
 477        snapshot: &worktree::Snapshot,
 478        cx: &mut Context<BufferStore>,
 479    ) -> Option<()> {
 480        let project_path = ProjectPath {
 481            worktree_id: snapshot.id(),
 482            path: path.clone(),
 483        };
 484
 485        let buffer_id = this
 486            .as_local_mut()
 487            .and_then(|local| local.local_buffer_ids_by_entry_id.get(&entry_id))
 488            .copied()
 489            .or_else(|| this.path_to_buffer_id.get(&project_path).copied())?;
 490
 491        let buffer = if let Some(buffer) = this.get(buffer_id) {
 492            Some(buffer)
 493        } else {
 494            this.opened_buffers.remove(&buffer_id);
 495            this.non_searchable_buffers.remove(&buffer_id);
 496            None
 497        };
 498
 499        let buffer = if let Some(buffer) = buffer {
 500            buffer
 501        } else {
 502            this.path_to_buffer_id.remove(&project_path);
 503            let this = this.as_local_mut()?;
 504            this.local_buffer_ids_by_entry_id.remove(&entry_id);
 505            return None;
 506        };
 507
 508        let events = buffer.update(cx, |buffer, cx| {
 509            let file = buffer.file()?;
 510            let old_file = File::from_dyn(Some(file))?;
 511            if old_file.worktree != *worktree {
 512                return None;
 513            }
 514
 515            let snapshot_entry = old_file
 516                .entry_id
 517                .and_then(|entry_id| snapshot.entry_for_id(entry_id))
 518                .or_else(|| snapshot.entry_for_path(old_file.path.as_ref()));
 519
 520            let new_file = if let Some(entry) = snapshot_entry {
 521                File {
 522                    disk_state: match entry.mtime {
 523                        Some(mtime) => DiskState::Present { mtime },
 524                        None => old_file.disk_state,
 525                    },
 526                    is_local: true,
 527                    entry_id: Some(entry.id),
 528                    path: entry.path.clone(),
 529                    worktree: worktree.clone(),
 530                    is_private: entry.is_private,
 531                }
 532            } else {
 533                File {
 534                    disk_state: DiskState::Deleted,
 535                    is_local: true,
 536                    entry_id: old_file.entry_id,
 537                    path: old_file.path.clone(),
 538                    worktree: worktree.clone(),
 539                    is_private: old_file.is_private,
 540                }
 541            };
 542
 543            if new_file == *old_file {
 544                return None;
 545            }
 546
 547            let mut events = Vec::new();
 548            if new_file.path != old_file.path {
 549                this.path_to_buffer_id.remove(&ProjectPath {
 550                    path: old_file.path.clone(),
 551                    worktree_id: old_file.worktree_id(cx),
 552                });
 553                this.path_to_buffer_id.insert(
 554                    ProjectPath {
 555                        worktree_id: new_file.worktree_id(cx),
 556                        path: new_file.path.clone(),
 557                    },
 558                    buffer_id,
 559                );
 560                events.push(BufferStoreEvent::BufferChangedFilePath {
 561                    buffer: cx.entity(),
 562                    old_file: buffer.file().cloned(),
 563                });
 564            }
 565            let local = this.as_local_mut()?;
 566            if new_file.entry_id != old_file.entry_id {
 567                if let Some(entry_id) = old_file.entry_id {
 568                    local.local_buffer_ids_by_entry_id.remove(&entry_id);
 569                }
 570                if let Some(entry_id) = new_file.entry_id {
 571                    local
 572                        .local_buffer_ids_by_entry_id
 573                        .insert(entry_id, buffer_id);
 574                }
 575            }
 576
 577            if let Some((client, project_id)) = &this.downstream_client {
 578                client
 579                    .send(proto::UpdateBufferFile {
 580                        project_id: *project_id,
 581                        buffer_id: buffer_id.to_proto(),
 582                        file: Some(new_file.to_proto(cx)),
 583                    })
 584                    .ok();
 585            }
 586
 587            buffer.file_updated(Arc::new(new_file), cx);
 588            Some(events)
 589        })?;
 590
 591        for event in events {
 592            cx.emit(event);
 593        }
 594
 595        None
 596    }
 597
 598    fn save_buffer(
 599        &self,
 600        buffer: Entity<Buffer>,
 601        cx: &mut Context<BufferStore>,
 602    ) -> Task<Result<()>> {
 603        let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
 604            return Task::ready(Err(anyhow!("buffer doesn't have a file")));
 605        };
 606        let worktree = file.worktree.clone();
 607        self.save_local_buffer(buffer, worktree, file.path.clone(), false, cx)
 608    }
 609
 610    fn save_buffer_as(
 611        &self,
 612        buffer: Entity<Buffer>,
 613        path: ProjectPath,
 614        cx: &mut Context<BufferStore>,
 615    ) -> Task<Result<()>> {
 616        let Some(worktree) = self
 617            .worktree_store
 618            .read(cx)
 619            .worktree_for_id(path.worktree_id, cx)
 620        else {
 621            return Task::ready(Err(anyhow!("no such worktree")));
 622        };
 623        self.save_local_buffer(buffer, worktree, path.path, true, cx)
 624    }
 625
 626    fn open_buffer(
 627        &self,
 628        path: Arc<RelPath>,
 629        worktree: Entity<Worktree>,
 630        options: &EncodingOptions,
 631        cx: &mut Context<BufferStore>,
 632    ) -> Task<Result<Entity<Buffer>>> {
 633        let options = options.clone();
 634
 635        let load_buffer = worktree.update(cx, |worktree, cx| {
 636            let reservation = cx.reserve_entity();
 637            let buffer_id = BufferId::from(reservation.entity_id().as_non_zero_u64());
 638
 639            let load_file_task = worktree.load_file(path.as_ref(), &options, cx);
 640
 641            cx.spawn(async move |_, cx| {
 642                let loaded_file = load_file_task.await?;
 643                let background_executor = cx.background_executor().clone();
 644
 645                let buffer = cx.insert_entity(reservation, |cx| {
 646                    let mut buffer = Buffer::build(
 647                        text::Buffer::new(
 648                            ReplicaId::LOCAL,
 649                            buffer_id,
 650                            loaded_file.text,
 651                            &background_executor,
 652                        ),
 653                        Some(loaded_file.file),
 654                        Capability::ReadWrite,
 655                    );
 656                    buffer.set_encoding(loaded_file.encoding, cx);
 657                    buffer
 658                })?;
 659
 660                Ok(buffer)
 661            })
 662        });
 663
 664        cx.spawn(async move |this, cx| {
 665            let buffer = match load_buffer.await {
 666                Ok(buffer) => buffer,
 667                Err(error) if is_not_found_error(&error) => cx.new(|cx| {
 668                    let buffer_id = BufferId::from(cx.entity_id().as_non_zero_u64());
 669                    let text_buffer = text::Buffer::new(
 670                        ReplicaId::LOCAL,
 671                        buffer_id,
 672                        "",
 673                        cx.background_executor(),
 674                    );
 675                    Buffer::build(
 676                        text_buffer,
 677                        Some(Arc::new(File {
 678                            worktree,
 679                            path,
 680                            disk_state: DiskState::New,
 681                            entry_id: None,
 682                            is_local: true,
 683                            is_private: false,
 684                        })),
 685                        Capability::ReadWrite,
 686                    )
 687                })?,
 688                Err(e) => return Err(e),
 689            };
 690            this.update(cx, |this, cx| {
 691                this.add_buffer(buffer.clone(), cx)?;
 692                let buffer_id = buffer.read(cx).remote_id();
 693                if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
 694                    this.path_to_buffer_id.insert(
 695                        ProjectPath {
 696                            worktree_id: file.worktree_id(cx),
 697                            path: file.path.clone(),
 698                        },
 699                        buffer_id,
 700                    );
 701                    let this = this.as_local_mut().unwrap();
 702                    if let Some(entry_id) = file.entry_id {
 703                        this.local_buffer_ids_by_entry_id
 704                            .insert(entry_id, buffer_id);
 705                    }
 706                }
 707
 708                anyhow::Ok(())
 709            })??;
 710
 711            Ok(buffer)
 712        })
 713    }
 714
 715    fn create_buffer(
 716        &self,
 717        project_searchable: bool,
 718        cx: &mut Context<BufferStore>,
 719    ) -> Task<Result<Entity<Buffer>>> {
 720        cx.spawn(async move |buffer_store, cx| {
 721            let buffer =
 722                cx.new(|cx| Buffer::local("", cx).with_language(language::PLAIN_TEXT.clone(), cx))?;
 723            buffer_store.update(cx, |buffer_store, cx| {
 724                buffer_store.add_buffer(buffer.clone(), cx).log_err();
 725                if !project_searchable {
 726                    buffer_store
 727                        .non_searchable_buffers
 728                        .insert(buffer.read(cx).remote_id());
 729                }
 730            })?;
 731            Ok(buffer)
 732        })
 733    }
 734
 735    fn reload_buffers(
 736        &self,
 737        buffers: HashSet<Entity<Buffer>>,
 738        push_to_history: bool,
 739        cx: &mut Context<BufferStore>,
 740    ) -> Task<Result<ProjectTransaction>> {
 741        cx.spawn(async move |_, cx| {
 742            let mut project_transaction = ProjectTransaction::default();
 743            for buffer in buffers {
 744                let transaction = buffer.update(cx, |buffer, cx| buffer.reload(cx))?.await?;
 745                buffer.update(cx, |buffer, cx| {
 746                    if let Some(transaction) = transaction {
 747                        if !push_to_history {
 748                            buffer.forget_transaction(transaction.id);
 749                        }
 750                        project_transaction.0.insert(cx.entity(), transaction);
 751                    }
 752                })?;
 753            }
 754
 755            Ok(project_transaction)
 756        })
 757    }
 758}
 759
 760impl BufferStore {
 761    pub fn init(client: &AnyProtoClient) {
 762        client.add_entity_message_handler(Self::handle_buffer_reloaded);
 763        client.add_entity_message_handler(Self::handle_buffer_saved);
 764        client.add_entity_message_handler(Self::handle_update_buffer_file);
 765        client.add_entity_request_handler(Self::handle_save_buffer);
 766        client.add_entity_request_handler(Self::handle_reload_buffers);
 767    }
 768
 769    /// Creates a buffer store, optionally retaining its buffers.
 770    pub fn local(worktree_store: Entity<WorktreeStore>, cx: &mut Context<Self>) -> Self {
 771        Self {
 772            state: BufferStoreState::Local(LocalBufferStore {
 773                local_buffer_ids_by_entry_id: Default::default(),
 774                worktree_store: worktree_store.clone(),
 775                _subscription: cx.subscribe(&worktree_store, |this, _, event, cx| {
 776                    if let WorktreeStoreEvent::WorktreeAdded(worktree) = event {
 777                        let this = this.as_local_mut().unwrap();
 778                        this.subscribe_to_worktree(worktree, cx);
 779                    }
 780                }),
 781            }),
 782            downstream_client: None,
 783            opened_buffers: Default::default(),
 784            path_to_buffer_id: Default::default(),
 785            shared_buffers: Default::default(),
 786            loading_buffers: Default::default(),
 787            non_searchable_buffers: Default::default(),
 788            worktree_store,
 789        }
 790    }
 791
 792    pub fn remote(
 793        worktree_store: Entity<WorktreeStore>,
 794        upstream_client: AnyProtoClient,
 795        remote_id: u64,
 796        _cx: &mut Context<Self>,
 797    ) -> Self {
 798        Self {
 799            state: BufferStoreState::Remote(RemoteBufferStore {
 800                shared_with_me: Default::default(),
 801                loading_remote_buffers_by_id: Default::default(),
 802                remote_buffer_listeners: Default::default(),
 803                project_id: remote_id,
 804                upstream_client,
 805                worktree_store: worktree_store.clone(),
 806            }),
 807            downstream_client: None,
 808            opened_buffers: Default::default(),
 809            path_to_buffer_id: Default::default(),
 810            loading_buffers: Default::default(),
 811            shared_buffers: Default::default(),
 812            non_searchable_buffers: Default::default(),
 813            worktree_store,
 814        }
 815    }
 816
 817    fn as_local_mut(&mut self) -> Option<&mut LocalBufferStore> {
 818        match &mut self.state {
 819            BufferStoreState::Local(state) => Some(state),
 820            _ => None,
 821        }
 822    }
 823
 824    fn as_remote_mut(&mut self) -> Option<&mut RemoteBufferStore> {
 825        match &mut self.state {
 826            BufferStoreState::Remote(state) => Some(state),
 827            _ => None,
 828        }
 829    }
 830
 831    fn as_remote(&self) -> Option<&RemoteBufferStore> {
 832        match &self.state {
 833            BufferStoreState::Remote(state) => Some(state),
 834            _ => None,
 835        }
 836    }
 837
 838    pub fn open_buffer(
 839        &mut self,
 840        project_path: ProjectPath,
 841        options: &EncodingOptions,
 842        cx: &mut Context<Self>,
 843    ) -> Task<Result<Entity<Buffer>>> {
 844        if let Some(buffer) = self.get_by_path(&project_path) {
 845            cx.emit(BufferStoreEvent::BufferOpened {
 846                buffer: buffer.clone(),
 847                project_path,
 848            });
 849
 850            return Task::ready(Ok(buffer));
 851        }
 852
 853        let task = match self.loading_buffers.entry(project_path.clone()) {
 854            hash_map::Entry::Occupied(e) => e.get().clone(),
 855            hash_map::Entry::Vacant(entry) => {
 856                let path = project_path.path.clone();
 857                let Some(worktree) = self
 858                    .worktree_store
 859                    .read(cx)
 860                    .worktree_for_id(project_path.worktree_id, cx)
 861                else {
 862                    return Task::ready(Err(anyhow!("no such worktree")));
 863                };
 864                let load_buffer = match &self.state {
 865                    BufferStoreState::Local(this) => this.open_buffer(path, worktree, options, cx),
 866                    BufferStoreState::Remote(this) => this.open_buffer(path, worktree, cx),
 867                };
 868
 869                entry
 870                    .insert(
 871                        // todo(lw): hot foreground spawn
 872                        cx.spawn(async move |this, cx| {
 873                            let load_result = load_buffer.await;
 874                            this.update(cx, |this, cx| {
 875                                // Record the fact that the buffer is no longer loading.
 876                                this.loading_buffers.remove(&project_path);
 877
 878                                let buffer = load_result.map_err(Arc::new)?;
 879                                cx.emit(BufferStoreEvent::BufferOpened {
 880                                    buffer: buffer.clone(),
 881                                    project_path,
 882                                });
 883
 884                                Ok(buffer)
 885                            })?
 886                        })
 887                        .shared(),
 888                    )
 889                    .clone()
 890            }
 891        };
 892
 893        cx.background_spawn(async move {
 894            task.await.map_err(|e| {
 895                if e.error_code() != ErrorCode::Internal {
 896                    anyhow!(e.error_code())
 897                } else {
 898                    anyhow!("{e}")
 899                }
 900            })
 901        })
 902    }
 903
 904    pub fn create_buffer(
 905        &mut self,
 906        project_searchable: bool,
 907        cx: &mut Context<Self>,
 908    ) -> Task<Result<Entity<Buffer>>> {
 909        match &self.state {
 910            BufferStoreState::Local(this) => this.create_buffer(project_searchable, cx),
 911            BufferStoreState::Remote(this) => this.create_buffer(project_searchable, cx),
 912        }
 913    }
 914
 915    pub fn save_buffer(
 916        &mut self,
 917        buffer: Entity<Buffer>,
 918        cx: &mut Context<Self>,
 919    ) -> Task<Result<()>> {
 920        match &mut self.state {
 921            BufferStoreState::Local(this) => this.save_buffer(buffer, cx),
 922            BufferStoreState::Remote(this) => this.save_remote_buffer(buffer, None, cx),
 923        }
 924    }
 925
 926    pub fn save_buffer_as(
 927        &mut self,
 928        buffer: Entity<Buffer>,
 929        path: ProjectPath,
 930        cx: &mut Context<Self>,
 931    ) -> Task<Result<()>> {
 932        let old_file = buffer.read(cx).file().cloned();
 933        let task = match &self.state {
 934            BufferStoreState::Local(this) => this.save_buffer_as(buffer.clone(), path, cx),
 935            BufferStoreState::Remote(this) => {
 936                this.save_remote_buffer(buffer.clone(), Some(path.to_proto()), cx)
 937            }
 938        };
 939        cx.spawn(async move |this, cx| {
 940            task.await?;
 941            this.update(cx, |this, cx| {
 942                old_file.clone().and_then(|file| {
 943                    this.path_to_buffer_id.remove(&ProjectPath {
 944                        worktree_id: file.worktree_id(cx),
 945                        path: file.path().clone(),
 946                    })
 947                });
 948
 949                cx.emit(BufferStoreEvent::BufferChangedFilePath { buffer, old_file });
 950            })
 951        })
 952    }
 953
 954    fn add_buffer(&mut self, buffer_entity: Entity<Buffer>, cx: &mut Context<Self>) -> Result<()> {
 955        let buffer = buffer_entity.read(cx);
 956        let remote_id = buffer.remote_id();
 957        let path = File::from_dyn(buffer.file()).map(|file| ProjectPath {
 958            path: file.path.clone(),
 959            worktree_id: file.worktree_id(cx),
 960        });
 961        let is_remote = buffer.replica_id().is_remote();
 962        let open_buffer = OpenBuffer::Complete {
 963            buffer: buffer_entity.downgrade(),
 964        };
 965
 966        let handle = cx.entity().downgrade();
 967        buffer_entity.update(cx, move |_, cx| {
 968            cx.on_release(move |buffer, cx| {
 969                handle
 970                    .update(cx, |_, cx| {
 971                        cx.emit(BufferStoreEvent::BufferDropped(buffer.remote_id()))
 972                    })
 973                    .ok();
 974            })
 975            .detach()
 976        });
 977        let _expect_path_to_exist;
 978        match self.opened_buffers.entry(remote_id) {
 979            hash_map::Entry::Vacant(entry) => {
 980                entry.insert(open_buffer);
 981                _expect_path_to_exist = false;
 982            }
 983            hash_map::Entry::Occupied(mut entry) => {
 984                if let OpenBuffer::Operations(operations) = entry.get_mut() {
 985                    buffer_entity.update(cx, |b, cx| b.apply_ops(operations.drain(..), cx));
 986                } else if entry.get().upgrade().is_some() {
 987                    if is_remote {
 988                        return Ok(());
 989                    } else {
 990                        debug_panic!("buffer {remote_id} was already registered");
 991                        anyhow::bail!("buffer {remote_id} was already registered");
 992                    }
 993                }
 994                entry.insert(open_buffer);
 995                _expect_path_to_exist = true;
 996            }
 997        }
 998
 999        if let Some(path) = path {
1000            self.path_to_buffer_id.insert(path, remote_id);
1001        }
1002
1003        cx.subscribe(&buffer_entity, Self::on_buffer_event).detach();
1004        cx.emit(BufferStoreEvent::BufferAdded(buffer_entity));
1005        Ok(())
1006    }
1007
1008    pub fn buffers(&self) -> impl '_ + Iterator<Item = Entity<Buffer>> {
1009        self.opened_buffers
1010            .values()
1011            .filter_map(|buffer| buffer.upgrade())
1012    }
1013
1014    pub fn loading_buffers(
1015        &self,
1016    ) -> impl Iterator<Item = (&ProjectPath, impl Future<Output = Result<Entity<Buffer>>>)> {
1017        self.loading_buffers.iter().map(|(path, task)| {
1018            let task = task.clone();
1019            (path, async move {
1020                task.await.map_err(|e| {
1021                    if e.error_code() != ErrorCode::Internal {
1022                        anyhow!(e.error_code())
1023                    } else {
1024                        anyhow!("{e}")
1025                    }
1026                })
1027            })
1028        })
1029    }
1030
1031    pub fn buffer_id_for_project_path(&self, project_path: &ProjectPath) -> Option<&BufferId> {
1032        self.path_to_buffer_id.get(project_path)
1033    }
1034
1035    pub fn get_by_path(&self, path: &ProjectPath) -> Option<Entity<Buffer>> {
1036        self.path_to_buffer_id
1037            .get(path)
1038            .and_then(|buffer_id| self.get(*buffer_id))
1039    }
1040
1041    pub fn get(&self, buffer_id: BufferId) -> Option<Entity<Buffer>> {
1042        self.opened_buffers.get(&buffer_id)?.upgrade()
1043    }
1044
1045    pub fn get_existing(&self, buffer_id: BufferId) -> Result<Entity<Buffer>> {
1046        self.get(buffer_id)
1047            .with_context(|| format!("unknown buffer id {buffer_id}"))
1048    }
1049
1050    pub fn get_possibly_incomplete(&self, buffer_id: BufferId) -> Option<Entity<Buffer>> {
1051        self.get(buffer_id).or_else(|| {
1052            self.as_remote()
1053                .and_then(|remote| remote.loading_remote_buffers_by_id.get(&buffer_id).cloned())
1054        })
1055    }
1056
1057    pub fn buffer_version_info(&self, cx: &App) -> (Vec<proto::BufferVersion>, Vec<BufferId>) {
1058        let buffers = self
1059            .buffers()
1060            .map(|buffer| {
1061                let buffer = buffer.read(cx);
1062                proto::BufferVersion {
1063                    id: buffer.remote_id().into(),
1064                    version: language::proto::serialize_version(&buffer.version),
1065                }
1066            })
1067            .collect();
1068        let incomplete_buffer_ids = self
1069            .as_remote()
1070            .map(|remote| remote.incomplete_buffer_ids())
1071            .unwrap_or_default();
1072        (buffers, incomplete_buffer_ids)
1073    }
1074
1075    pub fn disconnected_from_host(&mut self, cx: &mut App) {
1076        for open_buffer in self.opened_buffers.values_mut() {
1077            if let Some(buffer) = open_buffer.upgrade() {
1078                buffer.update(cx, |buffer, _| buffer.give_up_waiting());
1079            }
1080        }
1081
1082        for buffer in self.buffers() {
1083            buffer.update(cx, |buffer, cx| {
1084                buffer.set_capability(Capability::ReadOnly, cx)
1085            });
1086        }
1087
1088        if let Some(remote) = self.as_remote_mut() {
1089            // Wake up all futures currently waiting on a buffer to get opened,
1090            // to give them a chance to fail now that we've disconnected.
1091            remote.remote_buffer_listeners.clear()
1092        }
1093    }
1094
1095    pub fn shared(&mut self, remote_id: u64, downstream_client: AnyProtoClient, _cx: &mut App) {
1096        self.downstream_client = Some((downstream_client, remote_id));
1097    }
1098
1099    pub fn unshared(&mut self, _cx: &mut Context<Self>) {
1100        self.downstream_client.take();
1101        self.forget_shared_buffers();
1102    }
1103
1104    pub fn discard_incomplete(&mut self) {
1105        self.opened_buffers
1106            .retain(|_, buffer| !matches!(buffer, OpenBuffer::Operations(_)));
1107    }
1108
1109    fn buffer_changed_file(&mut self, buffer: Entity<Buffer>, cx: &mut App) -> Option<()> {
1110        let file = File::from_dyn(buffer.read(cx).file())?;
1111
1112        let remote_id = buffer.read(cx).remote_id();
1113        if let Some(entry_id) = file.entry_id {
1114            if let Some(local) = self.as_local_mut() {
1115                match local.local_buffer_ids_by_entry_id.get(&entry_id) {
1116                    Some(_) => {
1117                        return None;
1118                    }
1119                    None => {
1120                        local
1121                            .local_buffer_ids_by_entry_id
1122                            .insert(entry_id, remote_id);
1123                    }
1124                }
1125            }
1126            self.path_to_buffer_id.insert(
1127                ProjectPath {
1128                    worktree_id: file.worktree_id(cx),
1129                    path: file.path.clone(),
1130                },
1131                remote_id,
1132            );
1133        };
1134
1135        Some(())
1136    }
1137
1138    pub fn find_search_candidates(
1139        &mut self,
1140        query: &SearchQuery,
1141        mut limit: usize,
1142        fs: Arc<dyn Fs>,
1143        cx: &mut Context<Self>,
1144    ) -> Receiver<Entity<Buffer>> {
1145        let (tx, rx) = smol::channel::unbounded();
1146        let mut open_buffers = HashSet::default();
1147        let mut unnamed_buffers = Vec::new();
1148        for handle in self.buffers() {
1149            let buffer = handle.read(cx);
1150            if self.non_searchable_buffers.contains(&buffer.remote_id()) {
1151                continue;
1152            } else if let Some(entry_id) = buffer.entry_id(cx) {
1153                open_buffers.insert(entry_id);
1154            } else {
1155                limit = limit.saturating_sub(1);
1156                unnamed_buffers.push(handle)
1157            };
1158        }
1159
1160        const MAX_CONCURRENT_BUFFER_OPENS: usize = 64;
1161        let project_paths_rx = self
1162            .worktree_store
1163            .update(cx, |worktree_store, cx| {
1164                worktree_store.find_search_candidates(query.clone(), limit, open_buffers, fs, cx)
1165            })
1166            .chunks(MAX_CONCURRENT_BUFFER_OPENS);
1167
1168        cx.spawn(async move |this, cx| {
1169            for buffer in unnamed_buffers {
1170                tx.send(buffer).await.ok();
1171            }
1172
1173            let mut project_paths_rx = pin!(project_paths_rx);
1174            while let Some(project_paths) = project_paths_rx.next().await {
1175                let buffers = this.update(cx, |this, cx| {
1176                    project_paths
1177                        .into_iter()
1178                        .map(|project_path| this.open_buffer(project_path, &Default::default(), cx))
1179                        .collect::<Vec<_>>()
1180                })?;
1181                for buffer_task in buffers {
1182                    if let Some(buffer) = buffer_task.await.log_err()
1183                        && tx.send(buffer).await.is_err()
1184                    {
1185                        return anyhow::Ok(());
1186                    }
1187                }
1188            }
1189            anyhow::Ok(())
1190        })
1191        .detach();
1192        rx
1193    }
1194
1195    fn on_buffer_event(
1196        &mut self,
1197        buffer: Entity<Buffer>,
1198        event: &BufferEvent,
1199        cx: &mut Context<Self>,
1200    ) {
1201        match event {
1202            BufferEvent::FileHandleChanged => {
1203                self.buffer_changed_file(buffer, cx);
1204            }
1205            BufferEvent::Reloaded => {
1206                let Some((downstream_client, project_id)) = self.downstream_client.as_ref() else {
1207                    return;
1208                };
1209                let buffer = buffer.read(cx);
1210                downstream_client
1211                    .send(proto::BufferReloaded {
1212                        project_id: *project_id,
1213                        buffer_id: buffer.remote_id().to_proto(),
1214                        version: serialize_version(&buffer.version()),
1215                        mtime: buffer.saved_mtime().map(|t| t.into()),
1216                        line_ending: serialize_line_ending(buffer.line_ending()) as i32,
1217                    })
1218                    .log_err();
1219            }
1220            BufferEvent::LanguageChanged => {}
1221            _ => {}
1222        }
1223    }
1224
1225    pub async fn handle_update_buffer(
1226        this: Entity<Self>,
1227        envelope: TypedEnvelope<proto::UpdateBuffer>,
1228        mut cx: AsyncApp,
1229    ) -> Result<proto::Ack> {
1230        let payload = envelope.payload;
1231        let buffer_id = BufferId::new(payload.buffer_id)?;
1232        let ops = payload
1233            .operations
1234            .into_iter()
1235            .map(language::proto::deserialize_operation)
1236            .collect::<Result<Vec<_>, _>>()?;
1237        this.update(&mut cx, |this, cx| {
1238            match this.opened_buffers.entry(buffer_id) {
1239                hash_map::Entry::Occupied(mut e) => match e.get_mut() {
1240                    OpenBuffer::Operations(operations) => operations.extend_from_slice(&ops),
1241                    OpenBuffer::Complete { buffer, .. } => {
1242                        if let Some(buffer) = buffer.upgrade() {
1243                            buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx));
1244                        }
1245                    }
1246                },
1247                hash_map::Entry::Vacant(e) => {
1248                    e.insert(OpenBuffer::Operations(ops));
1249                }
1250            }
1251            Ok(proto::Ack {})
1252        })?
1253    }
1254
1255    pub fn register_shared_lsp_handle(
1256        &mut self,
1257        peer_id: proto::PeerId,
1258        buffer_id: BufferId,
1259        handle: OpenLspBufferHandle,
1260    ) {
1261        if let Some(shared_buffers) = self.shared_buffers.get_mut(&peer_id)
1262            && let Some(buffer) = shared_buffers.get_mut(&buffer_id)
1263        {
1264            buffer.lsp_handle = Some(handle);
1265            return;
1266        }
1267        debug_panic!("tried to register shared lsp handle, but buffer was not shared")
1268    }
1269
1270    pub fn handle_synchronize_buffers(
1271        &mut self,
1272        envelope: TypedEnvelope<proto::SynchronizeBuffers>,
1273        cx: &mut Context<Self>,
1274        client: Arc<Client>,
1275    ) -> Result<proto::SynchronizeBuffersResponse> {
1276        let project_id = envelope.payload.project_id;
1277        let mut response = proto::SynchronizeBuffersResponse {
1278            buffers: Default::default(),
1279        };
1280        let Some(guest_id) = envelope.original_sender_id else {
1281            anyhow::bail!("missing original_sender_id on SynchronizeBuffers request");
1282        };
1283
1284        self.shared_buffers.entry(guest_id).or_default().clear();
1285        for buffer in envelope.payload.buffers {
1286            let buffer_id = BufferId::new(buffer.id)?;
1287            let remote_version = language::proto::deserialize_version(&buffer.version);
1288            if let Some(buffer) = self.get(buffer_id) {
1289                self.shared_buffers
1290                    .entry(guest_id)
1291                    .or_default()
1292                    .entry(buffer_id)
1293                    .or_insert_with(|| SharedBuffer {
1294                        buffer: buffer.clone(),
1295                        lsp_handle: None,
1296                    });
1297
1298                let buffer = buffer.read(cx);
1299                response.buffers.push(proto::BufferVersion {
1300                    id: buffer_id.into(),
1301                    version: language::proto::serialize_version(&buffer.version),
1302                });
1303
1304                let operations = buffer.serialize_ops(Some(remote_version), cx);
1305                let client = client.clone();
1306                if let Some(file) = buffer.file() {
1307                    client
1308                        .send(proto::UpdateBufferFile {
1309                            project_id,
1310                            buffer_id: buffer_id.into(),
1311                            file: Some(file.to_proto(cx)),
1312                        })
1313                        .log_err();
1314                }
1315
1316                // TODO(max): do something
1317                // client
1318                //     .send(proto::UpdateStagedText {
1319                //         project_id,
1320                //         buffer_id: buffer_id.into(),
1321                //         diff_base: buffer.diff_base().map(ToString::to_string),
1322                //     })
1323                //     .log_err();
1324
1325                client
1326                    .send(proto::BufferReloaded {
1327                        project_id,
1328                        buffer_id: buffer_id.into(),
1329                        version: language::proto::serialize_version(buffer.saved_version()),
1330                        mtime: buffer.saved_mtime().map(|time| time.into()),
1331                        line_ending: language::proto::serialize_line_ending(buffer.line_ending())
1332                            as i32,
1333                    })
1334                    .log_err();
1335
1336                cx.background_spawn(
1337                    async move {
1338                        let operations = operations.await;
1339                        for chunk in split_operations(operations) {
1340                            client
1341                                .request(proto::UpdateBuffer {
1342                                    project_id,
1343                                    buffer_id: buffer_id.into(),
1344                                    operations: chunk,
1345                                })
1346                                .await?;
1347                        }
1348                        anyhow::Ok(())
1349                    }
1350                    .log_err(),
1351                )
1352                .detach();
1353            }
1354        }
1355        Ok(response)
1356    }
1357
1358    pub fn handle_create_buffer_for_peer(
1359        &mut self,
1360        envelope: TypedEnvelope<proto::CreateBufferForPeer>,
1361        replica_id: ReplicaId,
1362        capability: Capability,
1363        cx: &mut Context<Self>,
1364    ) -> Result<()> {
1365        let remote = self
1366            .as_remote_mut()
1367            .context("buffer store is not a remote")?;
1368
1369        if let Some(buffer) =
1370            remote.handle_create_buffer_for_peer(envelope, replica_id, capability, cx)?
1371        {
1372            self.add_buffer(buffer, cx)?;
1373        }
1374
1375        Ok(())
1376    }
1377
1378    pub async fn handle_update_buffer_file(
1379        this: Entity<Self>,
1380        envelope: TypedEnvelope<proto::UpdateBufferFile>,
1381        mut cx: AsyncApp,
1382    ) -> Result<()> {
1383        let buffer_id = envelope.payload.buffer_id;
1384        let buffer_id = BufferId::new(buffer_id)?;
1385
1386        this.update(&mut cx, |this, cx| {
1387            let payload = envelope.payload.clone();
1388            if let Some(buffer) = this.get_possibly_incomplete(buffer_id) {
1389                let file = payload.file.context("invalid file")?;
1390                let worktree = this
1391                    .worktree_store
1392                    .read(cx)
1393                    .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
1394                    .context("no such worktree")?;
1395                let file = File::from_proto(file, worktree, cx)?;
1396                let old_file = buffer.update(cx, |buffer, cx| {
1397                    let old_file = buffer.file().cloned();
1398                    let new_path = file.path.clone();
1399
1400                    buffer.file_updated(Arc::new(file), cx);
1401                    if old_file.as_ref().is_none_or(|old| *old.path() != new_path) {
1402                        Some(old_file)
1403                    } else {
1404                        None
1405                    }
1406                });
1407                if let Some(old_file) = old_file {
1408                    cx.emit(BufferStoreEvent::BufferChangedFilePath { buffer, old_file });
1409                }
1410            }
1411            if let Some((downstream_client, project_id)) = this.downstream_client.as_ref() {
1412                downstream_client
1413                    .send(proto::UpdateBufferFile {
1414                        project_id: *project_id,
1415                        buffer_id: buffer_id.into(),
1416                        file: envelope.payload.file,
1417                    })
1418                    .log_err();
1419            }
1420            Ok(())
1421        })?
1422    }
1423
1424    pub async fn handle_save_buffer(
1425        this: Entity<Self>,
1426        envelope: TypedEnvelope<proto::SaveBuffer>,
1427        mut cx: AsyncApp,
1428    ) -> Result<proto::BufferSaved> {
1429        let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
1430        let (buffer, project_id) = this.read_with(&cx, |this, _| {
1431            anyhow::Ok((
1432                this.get_existing(buffer_id)?,
1433                this.downstream_client
1434                    .as_ref()
1435                    .map(|(_, project_id)| *project_id)
1436                    .context("project is not shared")?,
1437            ))
1438        })??;
1439        buffer
1440            .update(&mut cx, |buffer, _| {
1441                buffer.wait_for_version(deserialize_version(&envelope.payload.version))
1442            })?
1443            .await?;
1444        let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
1445
1446        if let Some(new_path) = envelope.payload.new_path
1447            && let Some(new_path) = ProjectPath::from_proto(new_path)
1448        {
1449            this.update(&mut cx, |this, cx| {
1450                this.save_buffer_as(buffer.clone(), new_path, cx)
1451            })?
1452            .await?;
1453        } else {
1454            this.update(&mut cx, |this, cx| this.save_buffer(buffer.clone(), cx))?
1455                .await?;
1456        }
1457
1458        buffer.read_with(&cx, |buffer, _| proto::BufferSaved {
1459            project_id,
1460            buffer_id: buffer_id.into(),
1461            version: serialize_version(buffer.saved_version()),
1462            mtime: buffer.saved_mtime().map(|time| time.into()),
1463        })
1464    }
1465
1466    pub async fn handle_close_buffer(
1467        this: Entity<Self>,
1468        envelope: TypedEnvelope<proto::CloseBuffer>,
1469        mut cx: AsyncApp,
1470    ) -> Result<()> {
1471        let peer_id = envelope.sender_id;
1472        let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
1473        this.update(&mut cx, |this, cx| {
1474            if let Some(shared) = this.shared_buffers.get_mut(&peer_id)
1475                && shared.remove(&buffer_id).is_some()
1476            {
1477                cx.emit(BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id));
1478                if shared.is_empty() {
1479                    this.shared_buffers.remove(&peer_id);
1480                }
1481                return;
1482            }
1483            debug_panic!(
1484                "peer_id {} closed buffer_id {} which was either not open or already closed",
1485                peer_id,
1486                buffer_id
1487            )
1488        })
1489    }
1490
1491    pub async fn handle_buffer_saved(
1492        this: Entity<Self>,
1493        envelope: TypedEnvelope<proto::BufferSaved>,
1494        mut cx: AsyncApp,
1495    ) -> Result<()> {
1496        let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
1497        let version = deserialize_version(&envelope.payload.version);
1498        let mtime = envelope.payload.mtime.clone().map(|time| time.into());
1499        this.update(&mut cx, move |this, cx| {
1500            if let Some(buffer) = this.get_possibly_incomplete(buffer_id) {
1501                buffer.update(cx, |buffer, cx| {
1502                    buffer.did_save(version, mtime, cx);
1503                });
1504            }
1505
1506            if let Some((downstream_client, project_id)) = this.downstream_client.as_ref() {
1507                downstream_client
1508                    .send(proto::BufferSaved {
1509                        project_id: *project_id,
1510                        buffer_id: buffer_id.into(),
1511                        mtime: envelope.payload.mtime,
1512                        version: envelope.payload.version,
1513                    })
1514                    .log_err();
1515            }
1516        })
1517    }
1518
1519    pub async fn handle_buffer_reloaded(
1520        this: Entity<Self>,
1521        envelope: TypedEnvelope<proto::BufferReloaded>,
1522        mut cx: AsyncApp,
1523    ) -> Result<()> {
1524        let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
1525        let version = deserialize_version(&envelope.payload.version);
1526        let mtime = envelope.payload.mtime.clone().map(|time| time.into());
1527        let line_ending = deserialize_line_ending(
1528            proto::LineEnding::from_i32(envelope.payload.line_ending)
1529                .context("missing line ending")?,
1530        );
1531        this.update(&mut cx, |this, cx| {
1532            if let Some(buffer) = this.get_possibly_incomplete(buffer_id) {
1533                buffer.update(cx, |buffer, cx| {
1534                    buffer.did_reload(version, line_ending, mtime, cx);
1535                });
1536            }
1537
1538            if let Some((downstream_client, project_id)) = this.downstream_client.as_ref() {
1539                downstream_client
1540                    .send(proto::BufferReloaded {
1541                        project_id: *project_id,
1542                        buffer_id: buffer_id.into(),
1543                        mtime: envelope.payload.mtime,
1544                        version: envelope.payload.version,
1545                        line_ending: envelope.payload.line_ending,
1546                    })
1547                    .log_err();
1548            }
1549        })
1550    }
1551
1552    pub fn reload_buffers(
1553        &self,
1554        buffers: HashSet<Entity<Buffer>>,
1555        push_to_history: bool,
1556        cx: &mut Context<Self>,
1557    ) -> Task<Result<ProjectTransaction>> {
1558        if buffers.is_empty() {
1559            return Task::ready(Ok(ProjectTransaction::default()));
1560        }
1561        match &self.state {
1562            BufferStoreState::Local(this) => this.reload_buffers(buffers, push_to_history, cx),
1563            BufferStoreState::Remote(this) => this.reload_buffers(buffers, push_to_history, cx),
1564        }
1565    }
1566
1567    async fn handle_reload_buffers(
1568        this: Entity<Self>,
1569        envelope: TypedEnvelope<proto::ReloadBuffers>,
1570        mut cx: AsyncApp,
1571    ) -> Result<proto::ReloadBuffersResponse> {
1572        let sender_id = envelope.original_sender_id().unwrap_or_default();
1573        let reload = this.update(&mut cx, |this, cx| {
1574            let mut buffers = HashSet::default();
1575            for buffer_id in &envelope.payload.buffer_ids {
1576                let buffer_id = BufferId::new(*buffer_id)?;
1577                buffers.insert(this.get_existing(buffer_id)?);
1578            }
1579            anyhow::Ok(this.reload_buffers(buffers, false, cx))
1580        })??;
1581
1582        let project_transaction = reload.await?;
1583        let project_transaction = this.update(&mut cx, |this, cx| {
1584            this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
1585        })?;
1586        Ok(proto::ReloadBuffersResponse {
1587            transaction: Some(project_transaction),
1588        })
1589    }
1590
1591    pub fn create_buffer_for_peer(
1592        &mut self,
1593        buffer: &Entity<Buffer>,
1594        peer_id: proto::PeerId,
1595        cx: &mut Context<Self>,
1596    ) -> Task<Result<()>> {
1597        let buffer_id = buffer.read(cx).remote_id();
1598        let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
1599        if shared_buffers.contains_key(&buffer_id) {
1600            return Task::ready(Ok(()));
1601        }
1602        shared_buffers.insert(
1603            buffer_id,
1604            SharedBuffer {
1605                buffer: buffer.clone(),
1606                lsp_handle: None,
1607            },
1608        );
1609
1610        let Some((client, project_id)) = self.downstream_client.clone() else {
1611            return Task::ready(Ok(()));
1612        };
1613
1614        cx.spawn(async move |this, cx| {
1615            let Some(buffer) = this.read_with(cx, |this, _| this.get(buffer_id))? else {
1616                return anyhow::Ok(());
1617            };
1618
1619            let operations = buffer.update(cx, |b, cx| b.serialize_ops(None, cx))?;
1620            let operations = operations.await;
1621            let state = buffer.update(cx, |buffer, cx| buffer.to_proto(cx))?;
1622
1623            let initial_state = proto::CreateBufferForPeer {
1624                project_id,
1625                peer_id: Some(peer_id),
1626                variant: Some(proto::create_buffer_for_peer::Variant::State(state)),
1627            };
1628
1629            if client.send(initial_state).log_err().is_some() {
1630                let client = client.clone();
1631                cx.background_spawn(async move {
1632                    let mut chunks = split_operations(operations).peekable();
1633                    while let Some(chunk) = chunks.next() {
1634                        let is_last = chunks.peek().is_none();
1635                        client.send(proto::CreateBufferForPeer {
1636                            project_id,
1637                            peer_id: Some(peer_id),
1638                            variant: Some(proto::create_buffer_for_peer::Variant::Chunk(
1639                                proto::BufferChunk {
1640                                    buffer_id: buffer_id.into(),
1641                                    operations: chunk,
1642                                    is_last,
1643                                },
1644                            )),
1645                        })?;
1646                    }
1647                    anyhow::Ok(())
1648                })
1649                .await
1650                .log_err();
1651            }
1652            Ok(())
1653        })
1654    }
1655
1656    pub fn forget_shared_buffers(&mut self) {
1657        self.shared_buffers.clear();
1658    }
1659
1660    pub fn forget_shared_buffers_for(&mut self, peer_id: &proto::PeerId) {
1661        self.shared_buffers.remove(peer_id);
1662    }
1663
1664    pub fn update_peer_id(&mut self, old_peer_id: &proto::PeerId, new_peer_id: proto::PeerId) {
1665        if let Some(buffers) = self.shared_buffers.remove(old_peer_id) {
1666            self.shared_buffers.insert(new_peer_id, buffers);
1667        }
1668    }
1669
1670    pub fn has_shared_buffers(&self) -> bool {
1671        !self.shared_buffers.is_empty()
1672    }
1673
1674    pub fn create_local_buffer(
1675        &mut self,
1676        text: &str,
1677        language: Option<Arc<Language>>,
1678        project_searchable: bool,
1679        cx: &mut Context<Self>,
1680    ) -> Entity<Buffer> {
1681        let buffer = cx.new(|cx| {
1682            Buffer::local(text, cx)
1683                .with_language(language.unwrap_or_else(|| language::PLAIN_TEXT.clone()), cx)
1684        });
1685
1686        self.add_buffer(buffer.clone(), cx).log_err();
1687        let buffer_id = buffer.read(cx).remote_id();
1688        if !project_searchable {
1689            self.non_searchable_buffers.insert(buffer_id);
1690        }
1691
1692        if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
1693            self.path_to_buffer_id.insert(
1694                ProjectPath {
1695                    worktree_id: file.worktree_id(cx),
1696                    path: file.path.clone(),
1697                },
1698                buffer_id,
1699            );
1700            let this = self
1701                .as_local_mut()
1702                .expect("local-only method called in a non-local context");
1703            if let Some(entry_id) = file.entry_id {
1704                this.local_buffer_ids_by_entry_id
1705                    .insert(entry_id, buffer_id);
1706            }
1707        }
1708        buffer
1709    }
1710
1711    pub fn deserialize_project_transaction(
1712        &mut self,
1713        message: proto::ProjectTransaction,
1714        push_to_history: bool,
1715        cx: &mut Context<Self>,
1716    ) -> Task<Result<ProjectTransaction>> {
1717        if let Some(this) = self.as_remote_mut() {
1718            this.deserialize_project_transaction(message, push_to_history, cx)
1719        } else {
1720            debug_panic!("not a remote buffer store");
1721            Task::ready(Err(anyhow!("not a remote buffer store")))
1722        }
1723    }
1724
1725    pub fn wait_for_remote_buffer(
1726        &mut self,
1727        id: BufferId,
1728        cx: &mut Context<BufferStore>,
1729    ) -> Task<Result<Entity<Buffer>>> {
1730        if let Some(this) = self.as_remote_mut() {
1731            this.wait_for_remote_buffer(id, cx)
1732        } else {
1733            debug_panic!("not a remote buffer store");
1734            Task::ready(Err(anyhow!("not a remote buffer store")))
1735        }
1736    }
1737
1738    pub fn serialize_project_transaction_for_peer(
1739        &mut self,
1740        project_transaction: ProjectTransaction,
1741        peer_id: proto::PeerId,
1742        cx: &mut Context<Self>,
1743    ) -> proto::ProjectTransaction {
1744        let mut serialized_transaction = proto::ProjectTransaction {
1745            buffer_ids: Default::default(),
1746            transactions: Default::default(),
1747        };
1748        for (buffer, transaction) in project_transaction.0 {
1749            self.create_buffer_for_peer(&buffer, peer_id, cx)
1750                .detach_and_log_err(cx);
1751            serialized_transaction
1752                .buffer_ids
1753                .push(buffer.read(cx).remote_id().into());
1754            serialized_transaction
1755                .transactions
1756                .push(language::proto::serialize_transaction(&transaction));
1757        }
1758        serialized_transaction
1759    }
1760}
1761
1762impl OpenBuffer {
1763    fn upgrade(&self) -> Option<Entity<Buffer>> {
1764        match self {
1765            OpenBuffer::Complete { buffer, .. } => buffer.upgrade(),
1766            OpenBuffer::Operations(_) => None,
1767        }
1768    }
1769}
1770
1771fn is_not_found_error(error: &anyhow::Error) -> bool {
1772    error
1773        .root_cause()
1774        .downcast_ref::<io::Error>()
1775        .is_some_and(|err| err.kind() == io::ErrorKind::NotFound)
1776}