buffer_store.rs

   1use crate::{
   2    ProjectPath,
   3    lsp_store::OpenLspBufferHandle,
   4    worktree_store::{WorktreeStore, WorktreeStoreEvent},
   5};
   6use anyhow::{Context as _, Result, anyhow};
   7use client::Client;
   8use collections::{HashMap, HashSet, hash_map};
   9use futures::{Future, FutureExt as _, channel::oneshot, future::Shared};
  10use gpui::{
  11    App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Subscription, Task, WeakEntity,
  12};
  13use language::{
  14    Buffer, BufferEvent, Capability, DiskState, File as _, Language, Operation,
  15    proto::{
  16        deserialize_line_ending, deserialize_version, serialize_line_ending, serialize_version,
  17        split_operations,
  18    },
  19};
  20use rpc::{
  21    AnyProtoClient, ErrorCode, ErrorExt as _, TypedEnvelope,
  22    proto::{self},
  23};
  24
  25use std::{io, sync::Arc, time::Instant};
  26use text::{BufferId, ReplicaId};
  27use util::{ResultExt as _, TryFutureExt, debug_panic, maybe, rel_path::RelPath};
  28use worktree::{File, PathChange, ProjectEntryId, Worktree, WorktreeId};
  29
  30/// A set of open buffers.
  31pub struct BufferStore {
  32    state: BufferStoreState,
  33    #[allow(clippy::type_complexity)]
  34    loading_buffers: HashMap<ProjectPath, Shared<Task<Result<Entity<Buffer>, Arc<anyhow::Error>>>>>,
  35    worktree_store: Entity<WorktreeStore>,
  36    opened_buffers: HashMap<BufferId, OpenBuffer>,
  37    path_to_buffer_id: HashMap<ProjectPath, BufferId>,
  38    downstream_client: Option<(AnyProtoClient, u64)>,
  39    shared_buffers: HashMap<proto::PeerId, HashMap<BufferId, SharedBuffer>>,
  40    non_searchable_buffers: HashSet<BufferId>,
  41}
  42
  43#[derive(Hash, Eq, PartialEq, Clone)]
  44struct SharedBuffer {
  45    buffer: Entity<Buffer>,
  46    lsp_handle: Option<OpenLspBufferHandle>,
  47}
  48
  49enum BufferStoreState {
  50    Local(LocalBufferStore),
  51    Remote(RemoteBufferStore),
  52}
  53
  54struct RemoteBufferStore {
  55    shared_with_me: HashSet<Entity<Buffer>>,
  56    upstream_client: AnyProtoClient,
  57    project_id: u64,
  58    loading_remote_buffers_by_id: HashMap<BufferId, Entity<Buffer>>,
  59    remote_buffer_listeners:
  60        HashMap<BufferId, Vec<oneshot::Sender<anyhow::Result<Entity<Buffer>>>>>,
  61    worktree_store: Entity<WorktreeStore>,
  62}
  63
  64struct LocalBufferStore {
  65    local_buffer_ids_by_entry_id: HashMap<ProjectEntryId, BufferId>,
  66    worktree_store: Entity<WorktreeStore>,
  67    _subscription: Subscription,
  68}
  69
  70enum OpenBuffer {
  71    Complete { buffer: WeakEntity<Buffer> },
  72    Operations(Vec<Operation>),
  73}
  74
  75pub enum BufferStoreEvent {
  76    BufferAdded(Entity<Buffer>),
  77    BufferOpened {
  78        buffer: Entity<Buffer>,
  79        project_path: ProjectPath,
  80    },
  81    SharedBufferClosed(proto::PeerId, BufferId),
  82    BufferDropped(BufferId),
  83    BufferChangedFilePath {
  84        buffer: Entity<Buffer>,
  85        old_file: Option<Arc<dyn language::File>>,
  86    },
  87}
  88
  89#[derive(Default, Debug, Clone)]
  90pub struct ProjectTransaction(pub HashMap<Entity<Buffer>, language::Transaction>);
  91
  92impl PartialEq for ProjectTransaction {
  93    fn eq(&self, other: &Self) -> bool {
  94        self.0.len() == other.0.len()
  95            && self.0.iter().all(|(buffer, transaction)| {
  96                other.0.get(buffer).is_some_and(|t| t.id == transaction.id)
  97            })
  98    }
  99}
 100
 101impl EventEmitter<BufferStoreEvent> for BufferStore {}
 102
 103impl RemoteBufferStore {
 104    pub fn wait_for_remote_buffer(
 105        &mut self,
 106        id: BufferId,
 107        cx: &mut Context<BufferStore>,
 108    ) -> Task<Result<Entity<Buffer>>> {
 109        let (tx, rx) = oneshot::channel();
 110        self.remote_buffer_listeners.entry(id).or_default().push(tx);
 111
 112        cx.spawn(async move |this, cx| {
 113            if let Some(buffer) = this
 114                .read_with(cx, |buffer_store, _| buffer_store.get(id))
 115                .ok()
 116                .flatten()
 117            {
 118                return Ok(buffer);
 119            }
 120
 121            cx.background_spawn(async move { rx.await? }).await
 122        })
 123    }
 124
 125    fn save_remote_buffer(
 126        &self,
 127        buffer_handle: Entity<Buffer>,
 128        new_path: Option<proto::ProjectPath>,
 129        cx: &Context<BufferStore>,
 130    ) -> Task<Result<()>> {
 131        let buffer = buffer_handle.read(cx);
 132        let buffer_id = buffer.remote_id().into();
 133        let version = buffer.version();
 134        let rpc = self.upstream_client.clone();
 135        let project_id = self.project_id;
 136        cx.spawn(async move |_, cx| {
 137            let response = rpc
 138                .request(proto::SaveBuffer {
 139                    project_id,
 140                    buffer_id,
 141                    new_path,
 142                    version: serialize_version(&version),
 143                })
 144                .await?;
 145            let version = deserialize_version(&response.version);
 146            let mtime = response.mtime.map(|mtime| mtime.into());
 147
 148            buffer_handle.update(cx, |buffer, cx| {
 149                buffer.did_save(version.clone(), mtime, cx);
 150            })?;
 151
 152            Ok(())
 153        })
 154    }
 155
 156    pub fn handle_create_buffer_for_peer(
 157        &mut self,
 158        envelope: TypedEnvelope<proto::CreateBufferForPeer>,
 159        replica_id: ReplicaId,
 160        capability: Capability,
 161        cx: &mut Context<BufferStore>,
 162    ) -> Result<Option<Entity<Buffer>>> {
 163        match envelope.payload.variant.context("missing variant")? {
 164            proto::create_buffer_for_peer::Variant::State(mut state) => {
 165                let buffer_id = BufferId::new(state.id)?;
 166
 167                let buffer_result = maybe!({
 168                    let mut buffer_file = None;
 169                    if let Some(file) = state.file.take() {
 170                        let worktree_id = worktree::WorktreeId::from_proto(file.worktree_id);
 171                        let worktree = self
 172                            .worktree_store
 173                            .read(cx)
 174                            .worktree_for_id(worktree_id, cx)
 175                            .with_context(|| {
 176                                format!("no worktree found for id {}", file.worktree_id)
 177                            })?;
 178                        buffer_file = Some(Arc::new(File::from_proto(file, worktree, cx)?)
 179                            as Arc<dyn language::File>);
 180                    }
 181                    Buffer::from_proto(replica_id, capability, state, buffer_file)
 182                });
 183
 184                match buffer_result {
 185                    Ok(buffer) => {
 186                        let buffer = cx.new(|_| buffer);
 187                        self.loading_remote_buffers_by_id.insert(buffer_id, buffer);
 188                    }
 189                    Err(error) => {
 190                        if let Some(listeners) = self.remote_buffer_listeners.remove(&buffer_id) {
 191                            for listener in listeners {
 192                                listener.send(Err(anyhow!(error.cloned()))).ok();
 193                            }
 194                        }
 195                    }
 196                }
 197            }
 198            proto::create_buffer_for_peer::Variant::Chunk(chunk) => {
 199                let buffer_id = BufferId::new(chunk.buffer_id)?;
 200                let buffer = self
 201                    .loading_remote_buffers_by_id
 202                    .get(&buffer_id)
 203                    .cloned()
 204                    .with_context(|| {
 205                        format!(
 206                            "received chunk for buffer {} without initial state",
 207                            chunk.buffer_id
 208                        )
 209                    })?;
 210
 211                let result = maybe!({
 212                    let operations = chunk
 213                        .operations
 214                        .into_iter()
 215                        .map(language::proto::deserialize_operation)
 216                        .collect::<Result<Vec<_>>>()?;
 217                    buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx));
 218                    anyhow::Ok(())
 219                });
 220
 221                if let Err(error) = result {
 222                    self.loading_remote_buffers_by_id.remove(&buffer_id);
 223                    if let Some(listeners) = self.remote_buffer_listeners.remove(&buffer_id) {
 224                        for listener in listeners {
 225                            listener.send(Err(error.cloned())).ok();
 226                        }
 227                    }
 228                } else if chunk.is_last {
 229                    self.loading_remote_buffers_by_id.remove(&buffer_id);
 230                    if self.upstream_client.is_via_collab() {
 231                        // retain buffers sent by peers to avoid races.
 232                        self.shared_with_me.insert(buffer.clone());
 233                    }
 234
 235                    if let Some(senders) = self.remote_buffer_listeners.remove(&buffer_id) {
 236                        for sender in senders {
 237                            sender.send(Ok(buffer.clone())).ok();
 238                        }
 239                    }
 240                    return Ok(Some(buffer));
 241                }
 242            }
 243        }
 244        Ok(None)
 245    }
 246
 247    pub fn incomplete_buffer_ids(&self) -> Vec<BufferId> {
 248        self.loading_remote_buffers_by_id
 249            .keys()
 250            .copied()
 251            .collect::<Vec<_>>()
 252    }
 253
 254    pub fn deserialize_project_transaction(
 255        &self,
 256        message: proto::ProjectTransaction,
 257        push_to_history: bool,
 258        cx: &mut Context<BufferStore>,
 259    ) -> Task<Result<ProjectTransaction>> {
 260        cx.spawn(async move |this, cx| {
 261            let mut project_transaction = ProjectTransaction::default();
 262            for (buffer_id, transaction) in message.buffer_ids.into_iter().zip(message.transactions)
 263            {
 264                let buffer_id = BufferId::new(buffer_id)?;
 265                let buffer = this
 266                    .update(cx, |this, cx| this.wait_for_remote_buffer(buffer_id, cx))?
 267                    .await?;
 268                let transaction = language::proto::deserialize_transaction(transaction)?;
 269                project_transaction.0.insert(buffer, transaction);
 270            }
 271
 272            for (buffer, transaction) in &project_transaction.0 {
 273                buffer
 274                    .update(cx, |buffer, _| {
 275                        buffer.wait_for_edits(transaction.edit_ids.iter().copied())
 276                    })?
 277                    .await?;
 278
 279                if push_to_history {
 280                    buffer.update(cx, |buffer, _| {
 281                        buffer.push_transaction(transaction.clone(), Instant::now());
 282                        buffer.finalize_last_transaction();
 283                    })?;
 284                }
 285            }
 286
 287            Ok(project_transaction)
 288        })
 289    }
 290
 291    fn open_buffer(
 292        &self,
 293        path: Arc<RelPath>,
 294        worktree: Entity<Worktree>,
 295        cx: &mut Context<BufferStore>,
 296    ) -> Task<Result<Entity<Buffer>>> {
 297        let worktree_id = worktree.read(cx).id().to_proto();
 298        let project_id = self.project_id;
 299        let client = self.upstream_client.clone();
 300        cx.spawn(async move |this, cx| {
 301            let response = client
 302                .request(proto::OpenBufferByPath {
 303                    project_id,
 304                    worktree_id,
 305                    path: path.to_proto(),
 306                })
 307                .await?;
 308            let buffer_id = BufferId::new(response.buffer_id)?;
 309
 310            let buffer = this
 311                .update(cx, {
 312                    |this, cx| this.wait_for_remote_buffer(buffer_id, cx)
 313                })?
 314                .await?;
 315
 316            Ok(buffer)
 317        })
 318    }
 319
 320    fn create_buffer(
 321        &self,
 322        project_searchable: bool,
 323        cx: &mut Context<BufferStore>,
 324    ) -> Task<Result<Entity<Buffer>>> {
 325        let create = self.upstream_client.request(proto::OpenNewBuffer {
 326            project_id: self.project_id,
 327        });
 328        cx.spawn(async move |this, cx| {
 329            let response = create.await?;
 330            let buffer_id = BufferId::new(response.buffer_id)?;
 331
 332            this.update(cx, |this, cx| {
 333                if !project_searchable {
 334                    this.non_searchable_buffers.insert(buffer_id);
 335                }
 336                this.wait_for_remote_buffer(buffer_id, cx)
 337            })?
 338            .await
 339        })
 340    }
 341
 342    fn reload_buffers(
 343        &self,
 344        buffers: HashSet<Entity<Buffer>>,
 345        push_to_history: bool,
 346        cx: &mut Context<BufferStore>,
 347    ) -> Task<Result<ProjectTransaction>> {
 348        let request = self.upstream_client.request(proto::ReloadBuffers {
 349            project_id: self.project_id,
 350            buffer_ids: buffers
 351                .iter()
 352                .map(|buffer| buffer.read(cx).remote_id().to_proto())
 353                .collect(),
 354        });
 355
 356        cx.spawn(async move |this, cx| {
 357            let response = request.await?.transaction.context("missing transaction")?;
 358            this.update(cx, |this, cx| {
 359                this.deserialize_project_transaction(response, push_to_history, cx)
 360            })?
 361            .await
 362        })
 363    }
 364}
 365
 366impl LocalBufferStore {
 367    fn save_local_buffer(
 368        &self,
 369        buffer_handle: Entity<Buffer>,
 370        worktree: Entity<Worktree>,
 371        path: Arc<RelPath>,
 372        mut has_changed_file: bool,
 373        cx: &mut Context<BufferStore>,
 374    ) -> Task<Result<()>> {
 375        let buffer = buffer_handle.read(cx);
 376
 377        let text = buffer.as_rope().clone();
 378        let line_ending = buffer.line_ending();
 379        let version = buffer.version();
 380        let buffer_id = buffer.remote_id();
 381        let file = buffer.file().cloned();
 382        if file
 383            .as_ref()
 384            .is_some_and(|file| file.disk_state() == DiskState::New)
 385        {
 386            has_changed_file = true;
 387        }
 388
 389        let save = worktree.update(cx, |worktree, cx| {
 390            worktree.write_file(path, text, line_ending, cx)
 391        });
 392
 393        cx.spawn(async move |this, cx| {
 394            let new_file = save.await?;
 395            let mtime = new_file.disk_state().mtime();
 396            this.update(cx, |this, cx| {
 397                if let Some((downstream_client, project_id)) = this.downstream_client.clone() {
 398                    if has_changed_file {
 399                        downstream_client
 400                            .send(proto::UpdateBufferFile {
 401                                project_id,
 402                                buffer_id: buffer_id.to_proto(),
 403                                file: Some(language::File::to_proto(&*new_file, cx)),
 404                            })
 405                            .log_err();
 406                    }
 407                    downstream_client
 408                        .send(proto::BufferSaved {
 409                            project_id,
 410                            buffer_id: buffer_id.to_proto(),
 411                            version: serialize_version(&version),
 412                            mtime: mtime.map(|time| time.into()),
 413                        })
 414                        .log_err();
 415                }
 416            })?;
 417            buffer_handle.update(cx, |buffer, cx| {
 418                if has_changed_file {
 419                    buffer.file_updated(new_file, cx);
 420                }
 421                buffer.did_save(version.clone(), mtime, cx);
 422            })
 423        })
 424    }
 425
 426    fn subscribe_to_worktree(
 427        &mut self,
 428        worktree: &Entity<Worktree>,
 429        cx: &mut Context<BufferStore>,
 430    ) {
 431        cx.subscribe(worktree, |this, worktree, event, cx| {
 432            if worktree.read(cx).is_local()
 433                && let worktree::Event::UpdatedEntries(changes) = event
 434            {
 435                Self::local_worktree_entries_changed(this, &worktree, changes, cx);
 436            }
 437        })
 438        .detach();
 439    }
 440
 441    fn local_worktree_entries_changed(
 442        this: &mut BufferStore,
 443        worktree_handle: &Entity<Worktree>,
 444        changes: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
 445        cx: &mut Context<BufferStore>,
 446    ) {
 447        let snapshot = worktree_handle.read(cx).snapshot();
 448        for (path, entry_id, _) in changes {
 449            Self::local_worktree_entry_changed(
 450                this,
 451                *entry_id,
 452                path,
 453                worktree_handle,
 454                &snapshot,
 455                cx,
 456            );
 457        }
 458    }
 459
 460    fn local_worktree_entry_changed(
 461        this: &mut BufferStore,
 462        entry_id: ProjectEntryId,
 463        path: &Arc<RelPath>,
 464        worktree: &Entity<worktree::Worktree>,
 465        snapshot: &worktree::Snapshot,
 466        cx: &mut Context<BufferStore>,
 467    ) -> Option<()> {
 468        let project_path = ProjectPath {
 469            worktree_id: snapshot.id(),
 470            path: path.clone(),
 471        };
 472
 473        let buffer_id = this
 474            .as_local_mut()
 475            .and_then(|local| local.local_buffer_ids_by_entry_id.get(&entry_id))
 476            .copied()
 477            .or_else(|| this.path_to_buffer_id.get(&project_path).copied())?;
 478
 479        let buffer = if let Some(buffer) = this.get(buffer_id) {
 480            Some(buffer)
 481        } else {
 482            this.opened_buffers.remove(&buffer_id);
 483            this.non_searchable_buffers.remove(&buffer_id);
 484            None
 485        };
 486
 487        let buffer = if let Some(buffer) = buffer {
 488            buffer
 489        } else {
 490            this.path_to_buffer_id.remove(&project_path);
 491            let this = this.as_local_mut()?;
 492            this.local_buffer_ids_by_entry_id.remove(&entry_id);
 493            return None;
 494        };
 495
 496        let events = buffer.update(cx, |buffer, cx| {
 497            let file = buffer.file()?;
 498            let old_file = File::from_dyn(Some(file))?;
 499            if old_file.worktree != *worktree {
 500                return None;
 501            }
 502
 503            let snapshot_entry = old_file
 504                .entry_id
 505                .and_then(|entry_id| snapshot.entry_for_id(entry_id))
 506                .or_else(|| snapshot.entry_for_path(old_file.path.as_ref()));
 507
 508            let new_file = if let Some(entry) = snapshot_entry {
 509                File {
 510                    disk_state: match entry.mtime {
 511                        Some(mtime) => DiskState::Present { mtime },
 512                        None => old_file.disk_state,
 513                    },
 514                    is_local: true,
 515                    entry_id: Some(entry.id),
 516                    path: entry.path.clone(),
 517                    worktree: worktree.clone(),
 518                    is_private: entry.is_private,
 519                }
 520            } else {
 521                File {
 522                    disk_state: DiskState::Deleted,
 523                    is_local: true,
 524                    entry_id: old_file.entry_id,
 525                    path: old_file.path.clone(),
 526                    worktree: worktree.clone(),
 527                    is_private: old_file.is_private,
 528                }
 529            };
 530
 531            if new_file == *old_file {
 532                return None;
 533            }
 534
 535            let mut events = Vec::new();
 536            if new_file.path != old_file.path {
 537                this.path_to_buffer_id.remove(&ProjectPath {
 538                    path: old_file.path.clone(),
 539                    worktree_id: old_file.worktree_id(cx),
 540                });
 541                this.path_to_buffer_id.insert(
 542                    ProjectPath {
 543                        worktree_id: new_file.worktree_id(cx),
 544                        path: new_file.path.clone(),
 545                    },
 546                    buffer_id,
 547                );
 548                events.push(BufferStoreEvent::BufferChangedFilePath {
 549                    buffer: cx.entity(),
 550                    old_file: buffer.file().cloned(),
 551                });
 552            }
 553            let local = this.as_local_mut()?;
 554            if new_file.entry_id != old_file.entry_id {
 555                if let Some(entry_id) = old_file.entry_id {
 556                    local.local_buffer_ids_by_entry_id.remove(&entry_id);
 557                }
 558                if let Some(entry_id) = new_file.entry_id {
 559                    local
 560                        .local_buffer_ids_by_entry_id
 561                        .insert(entry_id, buffer_id);
 562                }
 563            }
 564
 565            if let Some((client, project_id)) = &this.downstream_client {
 566                client
 567                    .send(proto::UpdateBufferFile {
 568                        project_id: *project_id,
 569                        buffer_id: buffer_id.to_proto(),
 570                        file: Some(new_file.to_proto(cx)),
 571                    })
 572                    .ok();
 573            }
 574
 575            buffer.file_updated(Arc::new(new_file), cx);
 576            Some(events)
 577        })?;
 578
 579        for event in events {
 580            cx.emit(event);
 581        }
 582
 583        None
 584    }
 585
 586    fn save_buffer(
 587        &self,
 588        buffer: Entity<Buffer>,
 589        cx: &mut Context<BufferStore>,
 590    ) -> Task<Result<()>> {
 591        let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
 592            return Task::ready(Err(anyhow!("buffer doesn't have a file")));
 593        };
 594        let worktree = file.worktree.clone();
 595        self.save_local_buffer(buffer, worktree, file.path.clone(), false, cx)
 596    }
 597
 598    fn save_buffer_as(
 599        &self,
 600        buffer: Entity<Buffer>,
 601        path: ProjectPath,
 602        cx: &mut Context<BufferStore>,
 603    ) -> Task<Result<()>> {
 604        let Some(worktree) = self
 605            .worktree_store
 606            .read(cx)
 607            .worktree_for_id(path.worktree_id, cx)
 608        else {
 609            return Task::ready(Err(anyhow!("no such worktree")));
 610        };
 611        self.save_local_buffer(buffer, worktree, path.path, true, cx)
 612    }
 613
 614    fn open_buffer(
 615        &self,
 616        path: Arc<RelPath>,
 617        worktree: Entity<Worktree>,
 618        cx: &mut Context<BufferStore>,
 619    ) -> Task<Result<Entity<Buffer>>> {
 620        let load_file = worktree.update(cx, |worktree, cx| worktree.load_file(path.as_ref(), cx));
 621        cx.spawn(async move |this, cx| {
 622            let path = path.clone();
 623            let buffer = match load_file.await {
 624                Ok(loaded) => {
 625                    let reservation = cx.reserve_entity::<Buffer>()?;
 626                    let buffer_id = BufferId::from(reservation.entity_id().as_non_zero_u64());
 627                    let text_buffer = cx
 628                        .background_spawn(async move {
 629                            text::Buffer::new(ReplicaId::LOCAL, buffer_id, loaded.text)
 630                        })
 631                        .await;
 632                    cx.insert_entity(reservation, |_| {
 633                        Buffer::build(text_buffer, Some(loaded.file), Capability::ReadWrite)
 634                    })?
 635                }
 636                Err(error) if is_not_found_error(&error) => cx.new(|cx| {
 637                    let buffer_id = BufferId::from(cx.entity_id().as_non_zero_u64());
 638                    let text_buffer = text::Buffer::new(ReplicaId::LOCAL, buffer_id, "");
 639                    Buffer::build(
 640                        text_buffer,
 641                        Some(Arc::new(File {
 642                            worktree,
 643                            path,
 644                            disk_state: DiskState::New,
 645                            entry_id: None,
 646                            is_local: true,
 647                            is_private: false,
 648                        })),
 649                        Capability::ReadWrite,
 650                    )
 651                })?,
 652                Err(e) => return Err(e),
 653            };
 654            this.update(cx, |this, cx| {
 655                this.add_buffer(buffer.clone(), cx)?;
 656                let buffer_id = buffer.read(cx).remote_id();
 657                if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
 658                    this.path_to_buffer_id.insert(
 659                        ProjectPath {
 660                            worktree_id: file.worktree_id(cx),
 661                            path: file.path.clone(),
 662                        },
 663                        buffer_id,
 664                    );
 665                    let this = this.as_local_mut().unwrap();
 666                    if let Some(entry_id) = file.entry_id {
 667                        this.local_buffer_ids_by_entry_id
 668                            .insert(entry_id, buffer_id);
 669                    }
 670                }
 671
 672                anyhow::Ok(())
 673            })??;
 674
 675            Ok(buffer)
 676        })
 677    }
 678
 679    fn create_buffer(
 680        &self,
 681        project_searchable: bool,
 682        cx: &mut Context<BufferStore>,
 683    ) -> Task<Result<Entity<Buffer>>> {
 684        cx.spawn(async move |buffer_store, cx| {
 685            let buffer =
 686                cx.new(|cx| Buffer::local("", cx).with_language(language::PLAIN_TEXT.clone(), cx))?;
 687            buffer_store.update(cx, |buffer_store, cx| {
 688                buffer_store.add_buffer(buffer.clone(), cx).log_err();
 689                if !project_searchable {
 690                    buffer_store
 691                        .non_searchable_buffers
 692                        .insert(buffer.read(cx).remote_id());
 693                }
 694            })?;
 695            Ok(buffer)
 696        })
 697    }
 698
 699    fn reload_buffers(
 700        &self,
 701        buffers: HashSet<Entity<Buffer>>,
 702        push_to_history: bool,
 703        cx: &mut Context<BufferStore>,
 704    ) -> Task<Result<ProjectTransaction>> {
 705        cx.spawn(async move |_, cx| {
 706            let mut project_transaction = ProjectTransaction::default();
 707            for buffer in buffers {
 708                let transaction = buffer.update(cx, |buffer, cx| buffer.reload(cx))?.await?;
 709                buffer.update(cx, |buffer, cx| {
 710                    if let Some(transaction) = transaction {
 711                        if !push_to_history {
 712                            buffer.forget_transaction(transaction.id);
 713                        }
 714                        project_transaction.0.insert(cx.entity(), transaction);
 715                    }
 716                })?;
 717            }
 718
 719            Ok(project_transaction)
 720        })
 721    }
 722}
 723
 724impl BufferStore {
 725    pub fn init(client: &AnyProtoClient) {
 726        client.add_entity_message_handler(Self::handle_buffer_reloaded);
 727        client.add_entity_message_handler(Self::handle_buffer_saved);
 728        client.add_entity_message_handler(Self::handle_update_buffer_file);
 729        client.add_entity_request_handler(Self::handle_save_buffer);
 730        client.add_entity_request_handler(Self::handle_reload_buffers);
 731    }
 732
 733    /// Creates a buffer store, optionally retaining its buffers.
 734    pub fn local(worktree_store: Entity<WorktreeStore>, cx: &mut Context<Self>) -> Self {
 735        Self {
 736            state: BufferStoreState::Local(LocalBufferStore {
 737                local_buffer_ids_by_entry_id: Default::default(),
 738                worktree_store: worktree_store.clone(),
 739                _subscription: cx.subscribe(&worktree_store, |this, _, event, cx| {
 740                    if let WorktreeStoreEvent::WorktreeAdded(worktree) = event {
 741                        let this = this.as_local_mut().unwrap();
 742                        this.subscribe_to_worktree(worktree, cx);
 743                    }
 744                }),
 745            }),
 746            downstream_client: None,
 747            opened_buffers: Default::default(),
 748            path_to_buffer_id: Default::default(),
 749            shared_buffers: Default::default(),
 750            loading_buffers: Default::default(),
 751            non_searchable_buffers: Default::default(),
 752            worktree_store,
 753        }
 754    }
 755
 756    pub fn remote(
 757        worktree_store: Entity<WorktreeStore>,
 758        upstream_client: AnyProtoClient,
 759        remote_id: u64,
 760        _cx: &mut Context<Self>,
 761    ) -> Self {
 762        Self {
 763            state: BufferStoreState::Remote(RemoteBufferStore {
 764                shared_with_me: Default::default(),
 765                loading_remote_buffers_by_id: Default::default(),
 766                remote_buffer_listeners: Default::default(),
 767                project_id: remote_id,
 768                upstream_client,
 769                worktree_store: worktree_store.clone(),
 770            }),
 771            downstream_client: None,
 772            opened_buffers: Default::default(),
 773            path_to_buffer_id: Default::default(),
 774            loading_buffers: Default::default(),
 775            shared_buffers: Default::default(),
 776            non_searchable_buffers: Default::default(),
 777            worktree_store,
 778        }
 779    }
 780
 781    fn as_local_mut(&mut self) -> Option<&mut LocalBufferStore> {
 782        match &mut self.state {
 783            BufferStoreState::Local(state) => Some(state),
 784            _ => None,
 785        }
 786    }
 787
 788    fn as_remote_mut(&mut self) -> Option<&mut RemoteBufferStore> {
 789        match &mut self.state {
 790            BufferStoreState::Remote(state) => Some(state),
 791            _ => None,
 792        }
 793    }
 794
 795    fn as_remote(&self) -> Option<&RemoteBufferStore> {
 796        match &self.state {
 797            BufferStoreState::Remote(state) => Some(state),
 798            _ => None,
 799        }
 800    }
 801
 802    pub fn open_buffer(
 803        &mut self,
 804        project_path: ProjectPath,
 805        cx: &mut Context<Self>,
 806    ) -> Task<Result<Entity<Buffer>>> {
 807        if let Some(buffer) = self.get_by_path(&project_path) {
 808            cx.emit(BufferStoreEvent::BufferOpened {
 809                buffer: buffer.clone(),
 810                project_path,
 811            });
 812
 813            return Task::ready(Ok(buffer));
 814        }
 815
 816        let task = match self.loading_buffers.entry(project_path.clone()) {
 817            hash_map::Entry::Occupied(e) => e.get().clone(),
 818            hash_map::Entry::Vacant(entry) => {
 819                let path = project_path.path.clone();
 820                let Some(worktree) = self
 821                    .worktree_store
 822                    .read(cx)
 823                    .worktree_for_id(project_path.worktree_id, cx)
 824                else {
 825                    return Task::ready(Err(anyhow!("no such worktree")));
 826                };
 827                let load_buffer = match &self.state {
 828                    BufferStoreState::Local(this) => this.open_buffer(path, worktree, cx),
 829                    BufferStoreState::Remote(this) => this.open_buffer(path, worktree, cx),
 830                };
 831
 832                entry
 833                    .insert(
 834                        // todo(lw): hot foreground spawn
 835                        cx.spawn(async move |this, cx| {
 836                            let load_result = load_buffer.await;
 837                            this.update(cx, |this, cx| {
 838                                // Record the fact that the buffer is no longer loading.
 839                                this.loading_buffers.remove(&project_path);
 840
 841                                let buffer = load_result.map_err(Arc::new)?;
 842                                cx.emit(BufferStoreEvent::BufferOpened {
 843                                    buffer: buffer.clone(),
 844                                    project_path,
 845                                });
 846
 847                                Ok(buffer)
 848                            })?
 849                        })
 850                        .shared(),
 851                    )
 852                    .clone()
 853            }
 854        };
 855
 856        cx.background_spawn(async move {
 857            task.await.map_err(|e| {
 858                if e.error_code() != ErrorCode::Internal {
 859                    anyhow!(e.error_code())
 860                } else {
 861                    anyhow!("{e}")
 862                }
 863            })
 864        })
 865    }
 866
 867    pub fn create_buffer(
 868        &mut self,
 869        project_searchable: bool,
 870        cx: &mut Context<Self>,
 871    ) -> Task<Result<Entity<Buffer>>> {
 872        match &self.state {
 873            BufferStoreState::Local(this) => this.create_buffer(project_searchable, cx),
 874            BufferStoreState::Remote(this) => this.create_buffer(project_searchable, cx),
 875        }
 876    }
 877
 878    pub fn save_buffer(
 879        &mut self,
 880        buffer: Entity<Buffer>,
 881        cx: &mut Context<Self>,
 882    ) -> Task<Result<()>> {
 883        match &mut self.state {
 884            BufferStoreState::Local(this) => this.save_buffer(buffer, cx),
 885            BufferStoreState::Remote(this) => this.save_remote_buffer(buffer, None, cx),
 886        }
 887    }
 888
 889    pub fn save_buffer_as(
 890        &mut self,
 891        buffer: Entity<Buffer>,
 892        path: ProjectPath,
 893        cx: &mut Context<Self>,
 894    ) -> Task<Result<()>> {
 895        let old_file = buffer.read(cx).file().cloned();
 896        let task = match &self.state {
 897            BufferStoreState::Local(this) => this.save_buffer_as(buffer.clone(), path, cx),
 898            BufferStoreState::Remote(this) => {
 899                this.save_remote_buffer(buffer.clone(), Some(path.to_proto()), cx)
 900            }
 901        };
 902        cx.spawn(async move |this, cx| {
 903            task.await?;
 904            this.update(cx, |this, cx| {
 905                old_file.clone().and_then(|file| {
 906                    this.path_to_buffer_id.remove(&ProjectPath {
 907                        worktree_id: file.worktree_id(cx),
 908                        path: file.path().clone(),
 909                    })
 910                });
 911
 912                cx.emit(BufferStoreEvent::BufferChangedFilePath { buffer, old_file });
 913            })
 914        })
 915    }
 916
 917    fn add_buffer(&mut self, buffer_entity: Entity<Buffer>, cx: &mut Context<Self>) -> Result<()> {
 918        let buffer = buffer_entity.read(cx);
 919        let remote_id = buffer.remote_id();
 920        let path = File::from_dyn(buffer.file()).map(|file| ProjectPath {
 921            path: file.path.clone(),
 922            worktree_id: file.worktree_id(cx),
 923        });
 924        let is_remote = buffer.replica_id().is_remote();
 925        let open_buffer = OpenBuffer::Complete {
 926            buffer: buffer_entity.downgrade(),
 927        };
 928
 929        let handle = cx.entity().downgrade();
 930        buffer_entity.update(cx, move |_, cx| {
 931            cx.on_release(move |buffer, cx| {
 932                handle
 933                    .update(cx, |_, cx| {
 934                        cx.emit(BufferStoreEvent::BufferDropped(buffer.remote_id()))
 935                    })
 936                    .ok();
 937            })
 938            .detach()
 939        });
 940        let _expect_path_to_exist;
 941        match self.opened_buffers.entry(remote_id) {
 942            hash_map::Entry::Vacant(entry) => {
 943                entry.insert(open_buffer);
 944                _expect_path_to_exist = false;
 945            }
 946            hash_map::Entry::Occupied(mut entry) => {
 947                if let OpenBuffer::Operations(operations) = entry.get_mut() {
 948                    buffer_entity.update(cx, |b, cx| b.apply_ops(operations.drain(..), cx));
 949                } else if entry.get().upgrade().is_some() {
 950                    if is_remote {
 951                        return Ok(());
 952                    } else {
 953                        debug_panic!("buffer {remote_id} was already registered");
 954                        anyhow::bail!("buffer {remote_id} was already registered");
 955                    }
 956                }
 957                entry.insert(open_buffer);
 958                _expect_path_to_exist = true;
 959            }
 960        }
 961
 962        if let Some(path) = path {
 963            self.path_to_buffer_id.insert(path, remote_id);
 964        }
 965
 966        cx.subscribe(&buffer_entity, Self::on_buffer_event).detach();
 967        cx.emit(BufferStoreEvent::BufferAdded(buffer_entity));
 968        Ok(())
 969    }
 970
 971    pub fn buffers(&self) -> impl '_ + Iterator<Item = Entity<Buffer>> {
 972        self.opened_buffers
 973            .values()
 974            .filter_map(|buffer| buffer.upgrade())
 975    }
 976
 977    pub(crate) fn is_searchable(&self, id: &BufferId) -> bool {
 978        !self.non_searchable_buffers.contains(&id)
 979    }
 980
 981    pub fn loading_buffers(
 982        &self,
 983    ) -> impl Iterator<Item = (&ProjectPath, impl Future<Output = Result<Entity<Buffer>>>)> {
 984        self.loading_buffers.iter().map(|(path, task)| {
 985            let task = task.clone();
 986            (path, async move {
 987                task.await.map_err(|e| {
 988                    if e.error_code() != ErrorCode::Internal {
 989                        anyhow!(e.error_code())
 990                    } else {
 991                        anyhow!("{e}")
 992                    }
 993                })
 994            })
 995        })
 996    }
 997
 998    pub fn buffer_id_for_project_path(&self, project_path: &ProjectPath) -> Option<&BufferId> {
 999        self.path_to_buffer_id.get(project_path)
1000    }
1001
1002    pub fn get_by_path(&self, path: &ProjectPath) -> Option<Entity<Buffer>> {
1003        self.path_to_buffer_id
1004            .get(path)
1005            .and_then(|buffer_id| self.get(*buffer_id))
1006    }
1007
1008    pub fn get(&self, buffer_id: BufferId) -> Option<Entity<Buffer>> {
1009        self.opened_buffers.get(&buffer_id)?.upgrade()
1010    }
1011
1012    pub fn get_existing(&self, buffer_id: BufferId) -> Result<Entity<Buffer>> {
1013        self.get(buffer_id)
1014            .with_context(|| format!("unknown buffer id {buffer_id}"))
1015    }
1016
1017    pub fn get_possibly_incomplete(&self, buffer_id: BufferId) -> Option<Entity<Buffer>> {
1018        self.get(buffer_id).or_else(|| {
1019            self.as_remote()
1020                .and_then(|remote| remote.loading_remote_buffers_by_id.get(&buffer_id).cloned())
1021        })
1022    }
1023
1024    pub fn buffer_version_info(&self, cx: &App) -> (Vec<proto::BufferVersion>, Vec<BufferId>) {
1025        let buffers = self
1026            .buffers()
1027            .map(|buffer| {
1028                let buffer = buffer.read(cx);
1029                proto::BufferVersion {
1030                    id: buffer.remote_id().into(),
1031                    version: language::proto::serialize_version(&buffer.version),
1032                }
1033            })
1034            .collect();
1035        let incomplete_buffer_ids = self
1036            .as_remote()
1037            .map(|remote| remote.incomplete_buffer_ids())
1038            .unwrap_or_default();
1039        (buffers, incomplete_buffer_ids)
1040    }
1041
1042    pub fn disconnected_from_host(&mut self, cx: &mut App) {
1043        for open_buffer in self.opened_buffers.values_mut() {
1044            if let Some(buffer) = open_buffer.upgrade() {
1045                buffer.update(cx, |buffer, _| buffer.give_up_waiting());
1046            }
1047        }
1048
1049        for buffer in self.buffers() {
1050            buffer.update(cx, |buffer, cx| {
1051                buffer.set_capability(Capability::ReadOnly, cx)
1052            });
1053        }
1054
1055        if let Some(remote) = self.as_remote_mut() {
1056            // Wake up all futures currently waiting on a buffer to get opened,
1057            // to give them a chance to fail now that we've disconnected.
1058            remote.remote_buffer_listeners.clear()
1059        }
1060    }
1061
1062    pub fn shared(&mut self, remote_id: u64, downstream_client: AnyProtoClient, _cx: &mut App) {
1063        self.downstream_client = Some((downstream_client, remote_id));
1064    }
1065
1066    pub fn unshared(&mut self, _cx: &mut Context<Self>) {
1067        self.downstream_client.take();
1068        self.forget_shared_buffers();
1069    }
1070
1071    pub fn discard_incomplete(&mut self) {
1072        self.opened_buffers
1073            .retain(|_, buffer| !matches!(buffer, OpenBuffer::Operations(_)));
1074    }
1075
1076    fn buffer_changed_file(&mut self, buffer: Entity<Buffer>, cx: &mut App) -> Option<()> {
1077        let file = File::from_dyn(buffer.read(cx).file())?;
1078
1079        let remote_id = buffer.read(cx).remote_id();
1080        if let Some(entry_id) = file.entry_id {
1081            if let Some(local) = self.as_local_mut() {
1082                match local.local_buffer_ids_by_entry_id.get(&entry_id) {
1083                    Some(_) => {
1084                        return None;
1085                    }
1086                    None => {
1087                        local
1088                            .local_buffer_ids_by_entry_id
1089                            .insert(entry_id, remote_id);
1090                    }
1091                }
1092            }
1093            self.path_to_buffer_id.insert(
1094                ProjectPath {
1095                    worktree_id: file.worktree_id(cx),
1096                    path: file.path.clone(),
1097                },
1098                remote_id,
1099            );
1100        };
1101
1102        Some(())
1103    }
1104
1105    fn on_buffer_event(
1106        &mut self,
1107        buffer: Entity<Buffer>,
1108        event: &BufferEvent,
1109        cx: &mut Context<Self>,
1110    ) {
1111        match event {
1112            BufferEvent::FileHandleChanged => {
1113                self.buffer_changed_file(buffer, cx);
1114            }
1115            BufferEvent::Reloaded => {
1116                let Some((downstream_client, project_id)) = self.downstream_client.as_ref() else {
1117                    return;
1118                };
1119                let buffer = buffer.read(cx);
1120                downstream_client
1121                    .send(proto::BufferReloaded {
1122                        project_id: *project_id,
1123                        buffer_id: buffer.remote_id().to_proto(),
1124                        version: serialize_version(&buffer.version()),
1125                        mtime: buffer.saved_mtime().map(|t| t.into()),
1126                        line_ending: serialize_line_ending(buffer.line_ending()) as i32,
1127                    })
1128                    .log_err();
1129            }
1130            BufferEvent::LanguageChanged(_) => {}
1131            _ => {}
1132        }
1133    }
1134
1135    pub async fn handle_update_buffer(
1136        this: Entity<Self>,
1137        envelope: TypedEnvelope<proto::UpdateBuffer>,
1138        mut cx: AsyncApp,
1139    ) -> Result<proto::Ack> {
1140        let payload = envelope.payload;
1141        let buffer_id = BufferId::new(payload.buffer_id)?;
1142        let ops = payload
1143            .operations
1144            .into_iter()
1145            .map(language::proto::deserialize_operation)
1146            .collect::<Result<Vec<_>, _>>()?;
1147        this.update(&mut cx, |this, cx| {
1148            match this.opened_buffers.entry(buffer_id) {
1149                hash_map::Entry::Occupied(mut e) => match e.get_mut() {
1150                    OpenBuffer::Operations(operations) => operations.extend_from_slice(&ops),
1151                    OpenBuffer::Complete { buffer, .. } => {
1152                        if let Some(buffer) = buffer.upgrade() {
1153                            buffer.update(cx, |buffer, cx| buffer.apply_ops(ops, cx));
1154                        }
1155                    }
1156                },
1157                hash_map::Entry::Vacant(e) => {
1158                    e.insert(OpenBuffer::Operations(ops));
1159                }
1160            }
1161            Ok(proto::Ack {})
1162        })?
1163    }
1164
1165    pub fn register_shared_lsp_handle(
1166        &mut self,
1167        peer_id: proto::PeerId,
1168        buffer_id: BufferId,
1169        handle: OpenLspBufferHandle,
1170    ) {
1171        if let Some(shared_buffers) = self.shared_buffers.get_mut(&peer_id)
1172            && let Some(buffer) = shared_buffers.get_mut(&buffer_id)
1173        {
1174            buffer.lsp_handle = Some(handle);
1175            return;
1176        }
1177        debug_panic!("tried to register shared lsp handle, but buffer was not shared")
1178    }
1179
1180    pub fn handle_synchronize_buffers(
1181        &mut self,
1182        envelope: TypedEnvelope<proto::SynchronizeBuffers>,
1183        cx: &mut Context<Self>,
1184        client: Arc<Client>,
1185    ) -> Result<proto::SynchronizeBuffersResponse> {
1186        let project_id = envelope.payload.project_id;
1187        let mut response = proto::SynchronizeBuffersResponse {
1188            buffers: Default::default(),
1189        };
1190        let Some(guest_id) = envelope.original_sender_id else {
1191            anyhow::bail!("missing original_sender_id on SynchronizeBuffers request");
1192        };
1193
1194        self.shared_buffers.entry(guest_id).or_default().clear();
1195        for buffer in envelope.payload.buffers {
1196            let buffer_id = BufferId::new(buffer.id)?;
1197            let remote_version = language::proto::deserialize_version(&buffer.version);
1198            if let Some(buffer) = self.get(buffer_id) {
1199                self.shared_buffers
1200                    .entry(guest_id)
1201                    .or_default()
1202                    .entry(buffer_id)
1203                    .or_insert_with(|| SharedBuffer {
1204                        buffer: buffer.clone(),
1205                        lsp_handle: None,
1206                    });
1207
1208                let buffer = buffer.read(cx);
1209                response.buffers.push(proto::BufferVersion {
1210                    id: buffer_id.into(),
1211                    version: language::proto::serialize_version(&buffer.version),
1212                });
1213
1214                let operations = buffer.serialize_ops(Some(remote_version), cx);
1215                let client = client.clone();
1216                if let Some(file) = buffer.file() {
1217                    client
1218                        .send(proto::UpdateBufferFile {
1219                            project_id,
1220                            buffer_id: buffer_id.into(),
1221                            file: Some(file.to_proto(cx)),
1222                        })
1223                        .log_err();
1224                }
1225
1226                // TODO(max): do something
1227                // client
1228                //     .send(proto::UpdateStagedText {
1229                //         project_id,
1230                //         buffer_id: buffer_id.into(),
1231                //         diff_base: buffer.diff_base().map(ToString::to_string),
1232                //     })
1233                //     .log_err();
1234
1235                client
1236                    .send(proto::BufferReloaded {
1237                        project_id,
1238                        buffer_id: buffer_id.into(),
1239                        version: language::proto::serialize_version(buffer.saved_version()),
1240                        mtime: buffer.saved_mtime().map(|time| time.into()),
1241                        line_ending: language::proto::serialize_line_ending(buffer.line_ending())
1242                            as i32,
1243                    })
1244                    .log_err();
1245
1246                cx.background_spawn(
1247                    async move {
1248                        let operations = operations.await;
1249                        for chunk in split_operations(operations) {
1250                            client
1251                                .request(proto::UpdateBuffer {
1252                                    project_id,
1253                                    buffer_id: buffer_id.into(),
1254                                    operations: chunk,
1255                                })
1256                                .await?;
1257                        }
1258                        anyhow::Ok(())
1259                    }
1260                    .log_err(),
1261                )
1262                .detach();
1263            }
1264        }
1265        Ok(response)
1266    }
1267
1268    pub fn handle_create_buffer_for_peer(
1269        &mut self,
1270        envelope: TypedEnvelope<proto::CreateBufferForPeer>,
1271        replica_id: ReplicaId,
1272        capability: Capability,
1273        cx: &mut Context<Self>,
1274    ) -> Result<()> {
1275        let remote = self
1276            .as_remote_mut()
1277            .context("buffer store is not a remote")?;
1278
1279        if let Some(buffer) =
1280            remote.handle_create_buffer_for_peer(envelope, replica_id, capability, cx)?
1281        {
1282            self.add_buffer(buffer, cx)?;
1283        }
1284
1285        Ok(())
1286    }
1287
1288    pub async fn handle_update_buffer_file(
1289        this: Entity<Self>,
1290        envelope: TypedEnvelope<proto::UpdateBufferFile>,
1291        mut cx: AsyncApp,
1292    ) -> Result<()> {
1293        let buffer_id = envelope.payload.buffer_id;
1294        let buffer_id = BufferId::new(buffer_id)?;
1295
1296        this.update(&mut cx, |this, cx| {
1297            let payload = envelope.payload.clone();
1298            if let Some(buffer) = this.get_possibly_incomplete(buffer_id) {
1299                let file = payload.file.context("invalid file")?;
1300                let worktree = this
1301                    .worktree_store
1302                    .read(cx)
1303                    .worktree_for_id(WorktreeId::from_proto(file.worktree_id), cx)
1304                    .context("no such worktree")?;
1305                let file = File::from_proto(file, worktree, cx)?;
1306                let old_file = buffer.update(cx, |buffer, cx| {
1307                    let old_file = buffer.file().cloned();
1308                    let new_path = file.path.clone();
1309
1310                    buffer.file_updated(Arc::new(file), cx);
1311                    if old_file.as_ref().is_none_or(|old| *old.path() != new_path) {
1312                        Some(old_file)
1313                    } else {
1314                        None
1315                    }
1316                });
1317                if let Some(old_file) = old_file {
1318                    cx.emit(BufferStoreEvent::BufferChangedFilePath { buffer, old_file });
1319                }
1320            }
1321            if let Some((downstream_client, project_id)) = this.downstream_client.as_ref() {
1322                downstream_client
1323                    .send(proto::UpdateBufferFile {
1324                        project_id: *project_id,
1325                        buffer_id: buffer_id.into(),
1326                        file: envelope.payload.file,
1327                    })
1328                    .log_err();
1329            }
1330            Ok(())
1331        })?
1332    }
1333
1334    pub async fn handle_save_buffer(
1335        this: Entity<Self>,
1336        envelope: TypedEnvelope<proto::SaveBuffer>,
1337        mut cx: AsyncApp,
1338    ) -> Result<proto::BufferSaved> {
1339        let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
1340        let (buffer, project_id) = this.read_with(&cx, |this, _| {
1341            anyhow::Ok((
1342                this.get_existing(buffer_id)?,
1343                this.downstream_client
1344                    .as_ref()
1345                    .map(|(_, project_id)| *project_id)
1346                    .context("project is not shared")?,
1347            ))
1348        })??;
1349        buffer
1350            .update(&mut cx, |buffer, _| {
1351                buffer.wait_for_version(deserialize_version(&envelope.payload.version))
1352            })?
1353            .await?;
1354        let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
1355
1356        if let Some(new_path) = envelope.payload.new_path
1357            && let Some(new_path) = ProjectPath::from_proto(new_path)
1358        {
1359            this.update(&mut cx, |this, cx| {
1360                this.save_buffer_as(buffer.clone(), new_path, cx)
1361            })?
1362            .await?;
1363        } else {
1364            this.update(&mut cx, |this, cx| this.save_buffer(buffer.clone(), cx))?
1365                .await?;
1366        }
1367
1368        buffer.read_with(&cx, |buffer, _| proto::BufferSaved {
1369            project_id,
1370            buffer_id: buffer_id.into(),
1371            version: serialize_version(buffer.saved_version()),
1372            mtime: buffer.saved_mtime().map(|time| time.into()),
1373        })
1374    }
1375
1376    pub async fn handle_close_buffer(
1377        this: Entity<Self>,
1378        envelope: TypedEnvelope<proto::CloseBuffer>,
1379        mut cx: AsyncApp,
1380    ) -> Result<()> {
1381        let peer_id = envelope.sender_id;
1382        let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
1383        this.update(&mut cx, |this, cx| {
1384            if let Some(shared) = this.shared_buffers.get_mut(&peer_id)
1385                && shared.remove(&buffer_id).is_some()
1386            {
1387                cx.emit(BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id));
1388                if shared.is_empty() {
1389                    this.shared_buffers.remove(&peer_id);
1390                }
1391                return;
1392            }
1393            debug_panic!(
1394                "peer_id {} closed buffer_id {} which was either not open or already closed",
1395                peer_id,
1396                buffer_id
1397            )
1398        })
1399    }
1400
1401    pub async fn handle_buffer_saved(
1402        this: Entity<Self>,
1403        envelope: TypedEnvelope<proto::BufferSaved>,
1404        mut cx: AsyncApp,
1405    ) -> Result<()> {
1406        let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
1407        let version = deserialize_version(&envelope.payload.version);
1408        let mtime = envelope.payload.mtime.clone().map(|time| time.into());
1409        this.update(&mut cx, move |this, cx| {
1410            if let Some(buffer) = this.get_possibly_incomplete(buffer_id) {
1411                buffer.update(cx, |buffer, cx| {
1412                    buffer.did_save(version, mtime, cx);
1413                });
1414            }
1415
1416            if let Some((downstream_client, project_id)) = this.downstream_client.as_ref() {
1417                downstream_client
1418                    .send(proto::BufferSaved {
1419                        project_id: *project_id,
1420                        buffer_id: buffer_id.into(),
1421                        mtime: envelope.payload.mtime,
1422                        version: envelope.payload.version,
1423                    })
1424                    .log_err();
1425            }
1426        })
1427    }
1428
1429    pub async fn handle_buffer_reloaded(
1430        this: Entity<Self>,
1431        envelope: TypedEnvelope<proto::BufferReloaded>,
1432        mut cx: AsyncApp,
1433    ) -> Result<()> {
1434        let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
1435        let version = deserialize_version(&envelope.payload.version);
1436        let mtime = envelope.payload.mtime.clone().map(|time| time.into());
1437        let line_ending = deserialize_line_ending(
1438            proto::LineEnding::from_i32(envelope.payload.line_ending)
1439                .context("missing line ending")?,
1440        );
1441        this.update(&mut cx, |this, cx| {
1442            if let Some(buffer) = this.get_possibly_incomplete(buffer_id) {
1443                buffer.update(cx, |buffer, cx| {
1444                    buffer.did_reload(version, line_ending, mtime, cx);
1445                });
1446            }
1447
1448            if let Some((downstream_client, project_id)) = this.downstream_client.as_ref() {
1449                downstream_client
1450                    .send(proto::BufferReloaded {
1451                        project_id: *project_id,
1452                        buffer_id: buffer_id.into(),
1453                        mtime: envelope.payload.mtime,
1454                        version: envelope.payload.version,
1455                        line_ending: envelope.payload.line_ending,
1456                    })
1457                    .log_err();
1458            }
1459        })
1460    }
1461
1462    pub fn reload_buffers(
1463        &self,
1464        buffers: HashSet<Entity<Buffer>>,
1465        push_to_history: bool,
1466        cx: &mut Context<Self>,
1467    ) -> Task<Result<ProjectTransaction>> {
1468        if buffers.is_empty() {
1469            return Task::ready(Ok(ProjectTransaction::default()));
1470        }
1471        match &self.state {
1472            BufferStoreState::Local(this) => this.reload_buffers(buffers, push_to_history, cx),
1473            BufferStoreState::Remote(this) => this.reload_buffers(buffers, push_to_history, cx),
1474        }
1475    }
1476
1477    async fn handle_reload_buffers(
1478        this: Entity<Self>,
1479        envelope: TypedEnvelope<proto::ReloadBuffers>,
1480        mut cx: AsyncApp,
1481    ) -> Result<proto::ReloadBuffersResponse> {
1482        let sender_id = envelope.original_sender_id().unwrap_or_default();
1483        let reload = this.update(&mut cx, |this, cx| {
1484            let mut buffers = HashSet::default();
1485            for buffer_id in &envelope.payload.buffer_ids {
1486                let buffer_id = BufferId::new(*buffer_id)?;
1487                buffers.insert(this.get_existing(buffer_id)?);
1488            }
1489            anyhow::Ok(this.reload_buffers(buffers, false, cx))
1490        })??;
1491
1492        let project_transaction = reload.await?;
1493        let project_transaction = this.update(&mut cx, |this, cx| {
1494            this.serialize_project_transaction_for_peer(project_transaction, sender_id, cx)
1495        })?;
1496        Ok(proto::ReloadBuffersResponse {
1497            transaction: Some(project_transaction),
1498        })
1499    }
1500
1501    pub fn create_buffer_for_peer(
1502        &mut self,
1503        buffer: &Entity<Buffer>,
1504        peer_id: proto::PeerId,
1505        cx: &mut Context<Self>,
1506    ) -> Task<Result<()>> {
1507        let buffer_id = buffer.read(cx).remote_id();
1508        let shared_buffers = self.shared_buffers.entry(peer_id).or_default();
1509        if shared_buffers.contains_key(&buffer_id) {
1510            return Task::ready(Ok(()));
1511        }
1512        shared_buffers.insert(
1513            buffer_id,
1514            SharedBuffer {
1515                buffer: buffer.clone(),
1516                lsp_handle: None,
1517            },
1518        );
1519
1520        let Some((client, project_id)) = self.downstream_client.clone() else {
1521            return Task::ready(Ok(()));
1522        };
1523
1524        cx.spawn(async move |this, cx| {
1525            let Some(buffer) = this.read_with(cx, |this, _| this.get(buffer_id))? else {
1526                return anyhow::Ok(());
1527            };
1528
1529            let operations = buffer.update(cx, |b, cx| b.serialize_ops(None, cx))?;
1530            let operations = operations.await;
1531            let state = buffer.update(cx, |buffer, cx| buffer.to_proto(cx))?;
1532
1533            let initial_state = proto::CreateBufferForPeer {
1534                project_id,
1535                peer_id: Some(peer_id),
1536                variant: Some(proto::create_buffer_for_peer::Variant::State(state)),
1537            };
1538
1539            if client.send(initial_state).log_err().is_some() {
1540                let client = client.clone();
1541                cx.background_spawn(async move {
1542                    let mut chunks = split_operations(operations).peekable();
1543                    while let Some(chunk) = chunks.next() {
1544                        let is_last = chunks.peek().is_none();
1545                        client.send(proto::CreateBufferForPeer {
1546                            project_id,
1547                            peer_id: Some(peer_id),
1548                            variant: Some(proto::create_buffer_for_peer::Variant::Chunk(
1549                                proto::BufferChunk {
1550                                    buffer_id: buffer_id.into(),
1551                                    operations: chunk,
1552                                    is_last,
1553                                },
1554                            )),
1555                        })?;
1556                    }
1557                    anyhow::Ok(())
1558                })
1559                .await
1560                .log_err();
1561            }
1562            Ok(())
1563        })
1564    }
1565
1566    pub fn forget_shared_buffers(&mut self) {
1567        self.shared_buffers.clear();
1568    }
1569
1570    pub fn forget_shared_buffers_for(&mut self, peer_id: &proto::PeerId) {
1571        self.shared_buffers.remove(peer_id);
1572    }
1573
1574    pub fn update_peer_id(&mut self, old_peer_id: &proto::PeerId, new_peer_id: proto::PeerId) {
1575        if let Some(buffers) = self.shared_buffers.remove(old_peer_id) {
1576            self.shared_buffers.insert(new_peer_id, buffers);
1577        }
1578    }
1579
1580    pub fn has_shared_buffers(&self) -> bool {
1581        !self.shared_buffers.is_empty()
1582    }
1583
1584    pub fn create_local_buffer(
1585        &mut self,
1586        text: &str,
1587        language: Option<Arc<Language>>,
1588        project_searchable: bool,
1589        cx: &mut Context<Self>,
1590    ) -> Entity<Buffer> {
1591        let buffer = cx.new(|cx| {
1592            Buffer::local(text, cx)
1593                .with_language(language.unwrap_or_else(|| language::PLAIN_TEXT.clone()), cx)
1594        });
1595
1596        self.add_buffer(buffer.clone(), cx).log_err();
1597        let buffer_id = buffer.read(cx).remote_id();
1598        if !project_searchable {
1599            self.non_searchable_buffers.insert(buffer_id);
1600        }
1601
1602        if let Some(file) = File::from_dyn(buffer.read(cx).file()) {
1603            self.path_to_buffer_id.insert(
1604                ProjectPath {
1605                    worktree_id: file.worktree_id(cx),
1606                    path: file.path.clone(),
1607                },
1608                buffer_id,
1609            );
1610            let this = self
1611                .as_local_mut()
1612                .expect("local-only method called in a non-local context");
1613            if let Some(entry_id) = file.entry_id {
1614                this.local_buffer_ids_by_entry_id
1615                    .insert(entry_id, buffer_id);
1616            }
1617        }
1618        buffer
1619    }
1620
1621    pub fn deserialize_project_transaction(
1622        &mut self,
1623        message: proto::ProjectTransaction,
1624        push_to_history: bool,
1625        cx: &mut Context<Self>,
1626    ) -> Task<Result<ProjectTransaction>> {
1627        if let Some(this) = self.as_remote_mut() {
1628            this.deserialize_project_transaction(message, push_to_history, cx)
1629        } else {
1630            debug_panic!("not a remote buffer store");
1631            Task::ready(Err(anyhow!("not a remote buffer store")))
1632        }
1633    }
1634
1635    pub fn wait_for_remote_buffer(
1636        &mut self,
1637        id: BufferId,
1638        cx: &mut Context<BufferStore>,
1639    ) -> Task<Result<Entity<Buffer>>> {
1640        if let Some(this) = self.as_remote_mut() {
1641            this.wait_for_remote_buffer(id, cx)
1642        } else {
1643            debug_panic!("not a remote buffer store");
1644            Task::ready(Err(anyhow!("not a remote buffer store")))
1645        }
1646    }
1647
1648    pub fn serialize_project_transaction_for_peer(
1649        &mut self,
1650        project_transaction: ProjectTransaction,
1651        peer_id: proto::PeerId,
1652        cx: &mut Context<Self>,
1653    ) -> proto::ProjectTransaction {
1654        let mut serialized_transaction = proto::ProjectTransaction {
1655            buffer_ids: Default::default(),
1656            transactions: Default::default(),
1657        };
1658        for (buffer, transaction) in project_transaction.0 {
1659            self.create_buffer_for_peer(&buffer, peer_id, cx)
1660                .detach_and_log_err(cx);
1661            serialized_transaction
1662                .buffer_ids
1663                .push(buffer.read(cx).remote_id().into());
1664            serialized_transaction
1665                .transactions
1666                .push(language::proto::serialize_transaction(&transaction));
1667        }
1668        serialized_transaction
1669    }
1670}
1671
1672impl OpenBuffer {
1673    fn upgrade(&self) -> Option<Entity<Buffer>> {
1674        match self {
1675            OpenBuffer::Complete { buffer, .. } => buffer.upgrade(),
1676            OpenBuffer::Operations(_) => None,
1677        }
1678    }
1679}
1680
1681fn is_not_found_error(error: &anyhow::Error) -> bool {
1682    error
1683        .root_cause()
1684        .downcast_ref::<io::Error>()
1685        .is_some_and(|err| err.kind() == io::ErrorKind::NotFound)
1686}