Detailed changes
@@ -1169,7 +1169,7 @@ dependencies = [
"futures 0.3.28",
"gpui2",
"language2",
- "live_kit_client",
+ "live_kit_client2",
"log",
"media",
"postage",
@@ -4589,6 +4589,39 @@ dependencies = [
"simplelog",
]
+[[package]]
+name = "live_kit_client2"
+version = "0.1.0"
+dependencies = [
+ "anyhow",
+ "async-broadcast",
+ "async-trait",
+ "block",
+ "byteorder",
+ "bytes 1.5.0",
+ "cocoa",
+ "collections",
+ "core-foundation",
+ "core-graphics",
+ "foreign-types",
+ "futures 0.3.28",
+ "gpui2",
+ "hmac 0.12.1",
+ "jwt",
+ "live_kit_server",
+ "log",
+ "media",
+ "nanoid",
+ "objc",
+ "parking_lot 0.11.2",
+ "postage",
+ "serde",
+ "serde_derive",
+ "serde_json",
+ "sha2 0.10.7",
+ "simplelog",
+]
+
[[package]]
name = "live_kit_server"
version = "0.1.0"
@@ -5035,6 +5068,53 @@ dependencies = [
"workspace",
]
+[[package]]
+name = "multi_buffer2"
+version = "0.1.0"
+dependencies = [
+ "aho-corasick",
+ "anyhow",
+ "client2",
+ "clock",
+ "collections",
+ "convert_case 0.6.0",
+ "copilot2",
+ "ctor",
+ "env_logger 0.9.3",
+ "futures 0.3.28",
+ "git",
+ "gpui2",
+ "indoc",
+ "itertools 0.10.5",
+ "language2",
+ "lazy_static",
+ "log",
+ "lsp2",
+ "ordered-float 2.10.0",
+ "parking_lot 0.11.2",
+ "postage",
+ "project2",
+ "pulldown-cmark",
+ "rand 0.8.5",
+ "rich_text",
+ "schemars",
+ "serde",
+ "serde_derive",
+ "settings2",
+ "smallvec",
+ "smol",
+ "snippet",
+ "sum_tree",
+ "text",
+ "theme2",
+ "tree-sitter",
+ "tree-sitter-html",
+ "tree-sitter-rust",
+ "tree-sitter-typescript",
+ "unindent",
+ "util",
+]
+
[[package]]
name = "multimap"
version = "0.8.3"
@@ -8726,6 +8806,29 @@ dependencies = [
"util",
]
+[[package]]
+name = "text2"
+version = "0.1.0"
+dependencies = [
+ "anyhow",
+ "clock",
+ "collections",
+ "ctor",
+ "digest 0.9.0",
+ "env_logger 0.9.3",
+ "gpui2",
+ "lazy_static",
+ "log",
+ "parking_lot 0.11.2",
+ "postage",
+ "rand 0.8.5",
+ "regex",
+ "rope",
+ "smallvec",
+ "sum_tree",
+ "util",
+]
+
[[package]]
name = "textwrap"
version = "0.16.0"
@@ -9586,6 +9689,7 @@ dependencies = [
"itertools 0.11.0",
"rand 0.8.5",
"serde",
+ "settings2",
"smallvec",
"strum",
"theme2",
@@ -10774,7 +10878,7 @@ dependencies = [
[[package]]
name = "zed"
-version = "0.111.0"
+version = "0.112.0"
dependencies = [
"activity_indicator",
"ai",
@@ -10971,7 +11075,7 @@ dependencies = [
"smol",
"sum_tree",
"tempdir",
- "text",
+ "text2",
"theme2",
"thiserror",
"tiny_http",
@@ -61,6 +61,7 @@ members = [
"crates/menu",
"crates/menu2",
"crates/multi_buffer",
+ "crates/multi_buffer2",
"crates/node_runtime",
"crates/notifications",
"crates/outline",
@@ -13,7 +13,7 @@ test-support = [
"client2/test-support",
"collections/test-support",
"gpui2/test-support",
- "live_kit_client/test-support",
+ "live_kit_client2/test-support",
"project2/test-support",
"util/test-support"
]
@@ -24,7 +24,7 @@ client2 = { path = "../client2" }
collections = { path = "../collections" }
gpui2 = { path = "../gpui2" }
log.workspace = true
-live_kit_client = { path = "../live_kit_client" }
+live_kit_client2 = { path = "../live_kit_client2" }
fs2 = { path = "../fs2" }
language2 = { path = "../language2" }
media = { path = "../media" }
@@ -47,6 +47,6 @@ fs2 = { path = "../fs2", features = ["test-support"] }
language2 = { path = "../language2", features = ["test-support"] }
collections = { path = "../collections", features = ["test-support"] }
gpui2 = { path = "../gpui2", features = ["test-support"] }
-live_kit_client = { path = "../live_kit_client", features = ["test-support"] }
+live_kit_client2 = { path = "../live_kit_client2", features = ["test-support"] }
project2 = { path = "../project2", features = ["test-support"] }
util = { path = "../util", features = ["test-support"] }
@@ -1,10 +1,12 @@
use anyhow::{anyhow, Result};
use client2::ParticipantIndex;
use client2::{proto, User};
+use collections::HashMap;
use gpui2::WeakModel;
-pub use live_kit_client::Frame;
+pub use live_kit_client2::Frame;
+use live_kit_client2::{RemoteAudioTrack, RemoteVideoTrack};
use project2::Project;
-use std::{fmt, sync::Arc};
+use std::sync::Arc;
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub enum ParticipantLocation {
@@ -45,27 +47,6 @@ pub struct RemoteParticipant {
pub participant_index: ParticipantIndex,
pub muted: bool,
pub speaking: bool,
- // pub video_tracks: HashMap<live_kit_client::Sid, Arc<RemoteVideoTrack>>,
- // pub audio_tracks: HashMap<live_kit_client::Sid, Arc<RemoteAudioTrack>>,
-}
-
-#[derive(Clone)]
-pub struct RemoteVideoTrack {
- pub(crate) live_kit_track: Arc<live_kit_client::RemoteVideoTrack>,
-}
-
-unsafe impl Send for RemoteVideoTrack {}
-// todo!("remove this sync because it's not legit")
-unsafe impl Sync for RemoteVideoTrack {}
-
-impl fmt::Debug for RemoteVideoTrack {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- f.debug_struct("RemoteVideoTrack").finish()
- }
-}
-
-impl RemoteVideoTrack {
- pub fn frames(&self) -> async_broadcast::Receiver<Frame> {
- self.live_kit_track.frames()
- }
+ pub video_tracks: HashMap<live_kit_client2::Sid, Arc<RemoteVideoTrack>>,
+ pub audio_tracks: HashMap<live_kit_client2::Sid, Arc<RemoteAudioTrack>>,
}
@@ -1,9 +1,6 @@
-#![allow(dead_code, unused)]
-// todo!()
-
use crate::{
call_settings::CallSettings,
- participant::{LocalParticipant, ParticipantLocation, RemoteParticipant, RemoteVideoTrack},
+ participant::{LocalParticipant, ParticipantLocation, RemoteParticipant},
IncomingCall,
};
use anyhow::{anyhow, Result};
@@ -19,12 +16,15 @@ use gpui2::{
AppContext, AsyncAppContext, Context, EventEmitter, Model, ModelContext, Task, WeakModel,
};
use language2::LanguageRegistry;
-use live_kit_client::{LocalTrackPublication, RemoteAudioTrackUpdate, RemoteVideoTrackUpdate};
+use live_kit_client2::{
+ LocalAudioTrack, LocalTrackPublication, LocalVideoTrack, RemoteAudioTrackUpdate,
+ RemoteVideoTrackUpdate,
+};
use postage::{sink::Sink, stream::Stream, watch};
use project2::Project;
use settings2::Settings;
-use std::{future::Future, sync::Arc, time::Duration};
-use util::{ResultExt, TryFutureExt};
+use std::{future::Future, mem, sync::Arc, time::Duration};
+use util::{post_inc, ResultExt, TryFutureExt};
pub const RECONNECT_TIMEOUT: Duration = Duration::from_secs(30);
@@ -59,7 +59,7 @@ pub enum Event {
pub struct Room {
id: u64,
channel_id: Option<u64>,
- // live_kit: Option<LiveKitRoom>,
+ live_kit: Option<LiveKitRoom>,
status: RoomStatus,
shared_projects: HashSet<WeakModel<Project>>,
joined_projects: HashSet<WeakModel<Project>>,
@@ -95,15 +95,14 @@ impl Room {
#[cfg(any(test, feature = "test-support"))]
pub fn is_connected(&self) -> bool {
- false
- // if let Some(live_kit) = self.live_kit.as_ref() {
- // matches!(
- // *live_kit.room.status().borrow(),
- // live_kit_client::ConnectionState::Connected { .. }
- // )
- // } else {
- // false
- // }
+ if let Some(live_kit) = self.live_kit.as_ref() {
+ matches!(
+ *live_kit.room.status().borrow(),
+ live_kit_client2::ConnectionState::Connected { .. }
+ )
+ } else {
+ false
+ }
}
fn new(
@@ -114,125 +113,130 @@ impl Room {
user_store: Model<UserStore>,
cx: &mut ModelContext<Self>,
) -> Self {
- todo!()
- // let _live_kit_room = if let Some(connection_info) = live_kit_connection_info {
- // let room = live_kit_client::Room::new();
- // let mut status = room.status();
- // // Consume the initial status of the room.
- // let _ = status.try_recv();
- // let _maintain_room = cx.spawn(|this, mut cx| async move {
- // while let Some(status) = status.next().await {
- // let this = if let Some(this) = this.upgrade() {
- // this
- // } else {
- // break;
- // };
-
- // if status == live_kit_client::ConnectionState::Disconnected {
- // this.update(&mut cx, |this, cx| this.leave(cx).log_err())
- // .ok();
- // break;
- // }
- // }
- // });
-
- // let mut track_video_changes = room.remote_video_track_updates();
- // let _maintain_video_tracks = cx.spawn(|this, mut cx| async move {
- // while let Some(track_change) = track_video_changes.next().await {
- // let this = if let Some(this) = this.upgrade() {
- // this
- // } else {
- // break;
- // };
-
- // this.update(&mut cx, |this, cx| {
- // this.remote_video_track_updated(track_change, cx).log_err()
- // })
- // .ok();
- // }
- // });
-
- // let mut track_audio_changes = room.remote_audio_track_updates();
- // let _maintain_audio_tracks = cx.spawn(|this, mut cx| async move {
- // while let Some(track_change) = track_audio_changes.next().await {
- // let this = if let Some(this) = this.upgrade() {
- // this
- // } else {
- // break;
- // };
-
- // this.update(&mut cx, |this, cx| {
- // this.remote_audio_track_updated(track_change, cx).log_err()
- // })
- // .ok();
- // }
- // });
-
- // let connect = room.connect(&connection_info.server_url, &connection_info.token);
- // cx.spawn(|this, mut cx| async move {
- // connect.await?;
-
- // if !cx.update(|cx| Self::mute_on_join(cx))? {
- // this.update(&mut cx, |this, cx| this.share_microphone(cx))?
- // .await?;
- // }
-
- // anyhow::Ok(())
- // })
- // .detach_and_log_err(cx);
-
- // Some(LiveKitRoom {
- // room,
- // screen_track: LocalTrack::None,
- // microphone_track: LocalTrack::None,
- // next_publish_id: 0,
- // muted_by_user: false,
- // deafened: false,
- // speaking: false,
- // _maintain_room,
- // _maintain_tracks: [_maintain_video_tracks, _maintain_audio_tracks],
- // })
- // } else {
- // None
- // };
-
- // let maintain_connection = cx.spawn({
- // let client = client.clone();
- // move |this, cx| Self::maintain_connection(this, client.clone(), cx).log_err()
- // });
-
- // Audio::play_sound(Sound::Joined, cx);
-
- // let (room_update_completed_tx, room_update_completed_rx) = watch::channel();
-
- // Self {
- // id,
- // channel_id,
- // // live_kit: live_kit_room,
- // status: RoomStatus::Online,
- // shared_projects: Default::default(),
- // joined_projects: Default::default(),
- // participant_user_ids: Default::default(),
- // local_participant: Default::default(),
- // remote_participants: Default::default(),
- // pending_participants: Default::default(),
- // pending_call_count: 0,
- // client_subscriptions: vec![
- // client.add_message_handler(cx.weak_handle(), Self::handle_room_updated)
- // ],
- // _subscriptions: vec![
- // cx.on_release(Self::released),
- // cx.on_app_quit(Self::app_will_quit),
- // ],
- // leave_when_empty: false,
- // pending_room_update: None,
- // client,
- // user_store,
- // follows_by_leader_id_project_id: Default::default(),
- // maintain_connection: Some(maintain_connection),
- // room_update_completed_tx,
- // room_update_completed_rx,
- // }
+ let live_kit_room = if let Some(connection_info) = live_kit_connection_info {
+ let room = live_kit_client2::Room::new();
+ let mut status = room.status();
+ // Consume the initial status of the room.
+ let _ = status.try_recv();
+ let _maintain_room = cx.spawn(|this, mut cx| async move {
+ while let Some(status) = status.next().await {
+ let this = if let Some(this) = this.upgrade() {
+ this
+ } else {
+ break;
+ };
+
+ if status == live_kit_client2::ConnectionState::Disconnected {
+ this.update(&mut cx, |this, cx| this.leave(cx).log_err())
+ .ok();
+ break;
+ }
+ }
+ });
+
+ let _maintain_video_tracks = cx.spawn_on_main({
+ let room = room.clone();
+ move |this, mut cx| async move {
+ let mut track_video_changes = room.remote_video_track_updates();
+ while let Some(track_change) = track_video_changes.next().await {
+ let this = if let Some(this) = this.upgrade() {
+ this
+ } else {
+ break;
+ };
+
+ this.update(&mut cx, |this, cx| {
+ this.remote_video_track_updated(track_change, cx).log_err()
+ })
+ .ok();
+ }
+ }
+ });
+
+ let _maintain_audio_tracks = cx.spawn_on_main({
+ let room = room.clone();
+ |this, mut cx| async move {
+ let mut track_audio_changes = room.remote_audio_track_updates();
+ while let Some(track_change) = track_audio_changes.next().await {
+ let this = if let Some(this) = this.upgrade() {
+ this
+ } else {
+ break;
+ };
+
+ this.update(&mut cx, |this, cx| {
+ this.remote_audio_track_updated(track_change, cx).log_err()
+ })
+ .ok();
+ }
+ }
+ });
+
+ let connect = room.connect(&connection_info.server_url, &connection_info.token);
+ cx.spawn(|this, mut cx| async move {
+ connect.await?;
+
+ if !cx.update(|cx| Self::mute_on_join(cx))? {
+ this.update(&mut cx, |this, cx| this.share_microphone(cx))?
+ .await?;
+ }
+
+ anyhow::Ok(())
+ })
+ .detach_and_log_err(cx);
+
+ Some(LiveKitRoom {
+ room,
+ screen_track: LocalTrack::None,
+ microphone_track: LocalTrack::None,
+ next_publish_id: 0,
+ muted_by_user: false,
+ deafened: false,
+ speaking: false,
+ _maintain_room,
+ _maintain_tracks: [_maintain_video_tracks, _maintain_audio_tracks],
+ })
+ } else {
+ None
+ };
+
+ let maintain_connection = cx.spawn({
+ let client = client.clone();
+ move |this, cx| Self::maintain_connection(this, client.clone(), cx).log_err()
+ });
+
+ Audio::play_sound(Sound::Joined, cx);
+
+ let (room_update_completed_tx, room_update_completed_rx) = watch::channel();
+
+ Self {
+ id,
+ channel_id,
+ live_kit: live_kit_room,
+ status: RoomStatus::Online,
+ shared_projects: Default::default(),
+ joined_projects: Default::default(),
+ participant_user_ids: Default::default(),
+ local_participant: Default::default(),
+ remote_participants: Default::default(),
+ pending_participants: Default::default(),
+ pending_call_count: 0,
+ client_subscriptions: vec![
+ client.add_message_handler(cx.weak_model(), Self::handle_room_updated)
+ ],
+ _subscriptions: vec![
+ cx.on_release(Self::released),
+ cx.on_app_quit(Self::app_will_quit),
+ ],
+ leave_when_empty: false,
+ pending_room_update: None,
+ client,
+ user_store,
+ follows_by_leader_id_project_id: Default::default(),
+ maintain_connection: Some(maintain_connection),
+ room_update_completed_tx,
+ room_update_completed_rx,
+ }
}
pub(crate) fn create(
@@ -418,7 +422,7 @@ impl Room {
self.pending_participants.clear();
self.participant_user_ids.clear();
self.client_subscriptions.clear();
- // self.live_kit.take();
+ self.live_kit.take();
self.pending_room_update.take();
self.maintain_connection.take();
}
@@ -794,43 +798,43 @@ impl Room {
location,
muted: true,
speaking: false,
- // video_tracks: Default::default(),
- // audio_tracks: Default::default(),
+ video_tracks: Default::default(),
+ audio_tracks: Default::default(),
},
);
Audio::play_sound(Sound::Joined, cx);
- // if let Some(live_kit) = this.live_kit.as_ref() {
- // let video_tracks =
- // live_kit.room.remote_video_tracks(&user.id.to_string());
- // let audio_tracks =
- // live_kit.room.remote_audio_tracks(&user.id.to_string());
- // let publications = live_kit
- // .room
- // .remote_audio_track_publications(&user.id.to_string());
-
- // for track in video_tracks {
- // this.remote_video_track_updated(
- // RemoteVideoTrackUpdate::Subscribed(track),
- // cx,
- // )
- // .log_err();
- // }
-
- // for (track, publication) in
- // audio_tracks.iter().zip(publications.iter())
- // {
- // this.remote_audio_track_updated(
- // RemoteAudioTrackUpdate::Subscribed(
- // track.clone(),
- // publication.clone(),
- // ),
- // cx,
- // )
- // .log_err();
- // }
- // }
+ if let Some(live_kit) = this.live_kit.as_ref() {
+ let video_tracks =
+ live_kit.room.remote_video_tracks(&user.id.to_string());
+ let audio_tracks =
+ live_kit.room.remote_audio_tracks(&user.id.to_string());
+ let publications = live_kit
+ .room
+ .remote_audio_track_publications(&user.id.to_string());
+
+ for track in video_tracks {
+ this.remote_video_track_updated(
+ RemoteVideoTrackUpdate::Subscribed(track),
+ cx,
+ )
+ .log_err();
+ }
+
+ for (track, publication) in
+ audio_tracks.iter().zip(publications.iter())
+ {
+ this.remote_audio_track_updated(
+ RemoteAudioTrackUpdate::Subscribed(
+ track.clone(),
+ publication.clone(),
+ ),
+ cx,
+ )
+ .log_err();
+ }
+ }
}
}
@@ -918,7 +922,6 @@ impl Room {
change: RemoteVideoTrackUpdate,
cx: &mut ModelContext<Self>,
) -> Result<()> {
- todo!();
match change {
RemoteVideoTrackUpdate::Subscribed(track) => {
let user_id = track.publisher_id().parse()?;
@@ -927,12 +930,7 @@ impl Room {
.remote_participants
.get_mut(&user_id)
.ok_or_else(|| anyhow!("subscribed to track by unknown participant"))?;
- // participant.video_tracks.insert(
- // track_id.clone(),
- // Arc::new(RemoteVideoTrack {
- // live_kit_track: track,
- // }),
- // );
+ participant.video_tracks.insert(track_id.clone(), track);
cx.emit(Event::RemoteVideoTracksChanged {
participant_id: participant.peer_id,
});
@@ -946,7 +944,7 @@ impl Room {
.remote_participants
.get_mut(&user_id)
.ok_or_else(|| anyhow!("unsubscribed from track by unknown participant"))?;
- // participant.video_tracks.remove(&track_id);
+ participant.video_tracks.remove(&track_id);
cx.emit(Event::RemoteVideoTracksChanged {
participant_id: participant.peer_id,
});
@@ -976,65 +974,61 @@ impl Room {
participant.speaking = false;
}
}
- // todo!()
- // if let Some(id) = self.client.user_id() {
- // if let Some(room) = &mut self.live_kit {
- // if let Ok(_) = speaker_ids.binary_search(&id) {
- // room.speaking = true;
- // } else {
- // room.speaking = false;
- // }
- // }
- // }
+ if let Some(id) = self.client.user_id() {
+ if let Some(room) = &mut self.live_kit {
+ if let Ok(_) = speaker_ids.binary_search(&id) {
+ room.speaking = true;
+ } else {
+ room.speaking = false;
+ }
+ }
+ }
cx.notify();
}
RemoteAudioTrackUpdate::MuteChanged { track_id, muted } => {
- // todo!()
- // let mut found = false;
- // for participant in &mut self.remote_participants.values_mut() {
- // for track in participant.audio_tracks.values() {
- // if track.sid() == track_id {
- // found = true;
- // break;
- // }
- // }
- // if found {
- // participant.muted = muted;
- // break;
- // }
- // }
+ let mut found = false;
+ for participant in &mut self.remote_participants.values_mut() {
+ for track in participant.audio_tracks.values() {
+ if track.sid() == track_id {
+ found = true;
+ break;
+ }
+ }
+ if found {
+ participant.muted = muted;
+ break;
+ }
+ }
cx.notify();
}
RemoteAudioTrackUpdate::Subscribed(track, publication) => {
- // todo!()
- // let user_id = track.publisher_id().parse()?;
- // let track_id = track.sid().to_string();
- // let participant = self
- // .remote_participants
- // .get_mut(&user_id)
- // .ok_or_else(|| anyhow!("subscribed to track by unknown participant"))?;
- // // participant.audio_tracks.insert(track_id.clone(), track);
- // participant.muted = publication.is_muted();
-
- // cx.emit(Event::RemoteAudioTracksChanged {
- // participant_id: participant.peer_id,
- // });
+ let user_id = track.publisher_id().parse()?;
+ let track_id = track.sid().to_string();
+ let participant = self
+ .remote_participants
+ .get_mut(&user_id)
+ .ok_or_else(|| anyhow!("subscribed to track by unknown participant"))?;
+ participant.audio_tracks.insert(track_id.clone(), track);
+ participant.muted = publication.is_muted();
+
+ cx.emit(Event::RemoteAudioTracksChanged {
+ participant_id: participant.peer_id,
+ });
}
RemoteAudioTrackUpdate::Unsubscribed {
publisher_id,
track_id,
} => {
- // todo!()
- // let user_id = publisher_id.parse()?;
- // let participant = self
- // .remote_participants
- // .get_mut(&user_id)
- // .ok_or_else(|| anyhow!("unsubscribed from track by unknown participant"))?;
- // participant.audio_tracks.remove(&track_id);
- // cx.emit(Event::RemoteAudioTracksChanged {
- // participant_id: participant.peer_id,
- // });
+ let user_id = publisher_id.parse()?;
+ let participant = self
+ .remote_participants
+ .get_mut(&user_id)
+ .ok_or_else(|| anyhow!("unsubscribed from track by unknown participant"))?;
+ participant.audio_tracks.remove(&track_id);
+ cx.emit(Event::RemoteAudioTracksChanged {
+ participant_id: participant.peer_id,
+ });
}
}
@@ -1215,278 +1209,269 @@ impl Room {
}
pub fn is_screen_sharing(&self) -> bool {
- todo!()
- // self.live_kit.as_ref().map_or(false, |live_kit| {
- // !matches!(live_kit.screen_track, LocalTrack::None)
- // })
+ self.live_kit.as_ref().map_or(false, |live_kit| {
+ !matches!(live_kit.screen_track, LocalTrack::None)
+ })
}
pub fn is_sharing_mic(&self) -> bool {
- todo!()
- // self.live_kit.as_ref().map_or(false, |live_kit| {
- // !matches!(live_kit.microphone_track, LocalTrack::None)
- // })
+ self.live_kit.as_ref().map_or(false, |live_kit| {
+ !matches!(live_kit.microphone_track, LocalTrack::None)
+ })
}
pub fn is_muted(&self, cx: &AppContext) -> bool {
- todo!()
- // self.live_kit
- // .as_ref()
- // .and_then(|live_kit| match &live_kit.microphone_track {
- // LocalTrack::None => Some(Self::mute_on_join(cx)),
- // LocalTrack::Pending { muted, .. } => Some(*muted),
- // LocalTrack::Published { muted, .. } => Some(*muted),
- // })
- // .unwrap_or(false)
+ self.live_kit
+ .as_ref()
+ .and_then(|live_kit| match &live_kit.microphone_track {
+ LocalTrack::None => Some(Self::mute_on_join(cx)),
+ LocalTrack::Pending { muted, .. } => Some(*muted),
+ LocalTrack::Published { muted, .. } => Some(*muted),
+ })
+ .unwrap_or(false)
}
pub fn is_speaking(&self) -> bool {
- todo!()
- // self.live_kit
- // .as_ref()
- // .map_or(false, |live_kit| live_kit.speaking)
+ self.live_kit
+ .as_ref()
+ .map_or(false, |live_kit| live_kit.speaking)
}
pub fn is_deafened(&self) -> Option<bool> {
- // self.live_kit.as_ref().map(|live_kit| live_kit.deafened)
- todo!()
+ self.live_kit.as_ref().map(|live_kit| live_kit.deafened)
}
#[track_caller]
pub fn share_microphone(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
- todo!()
- // if self.status.is_offline() {
- // return Task::ready(Err(anyhow!("room is offline")));
- // } else if self.is_sharing_mic() {
- // return Task::ready(Err(anyhow!("microphone was already shared")));
- // }
-
- // let publish_id = if let Some(live_kit) = self.live_kit.as_mut() {
- // let publish_id = post_inc(&mut live_kit.next_publish_id);
- // live_kit.microphone_track = LocalTrack::Pending {
- // publish_id,
- // muted: false,
- // };
- // cx.notify();
- // publish_id
- // } else {
- // return Task::ready(Err(anyhow!("live-kit was not initialized")));
- // };
-
- // cx.spawn(move |this, mut cx| async move {
- // let publish_track = async {
- // let track = LocalAudioTrack::create();
- // this.upgrade()
- // .ok_or_else(|| anyhow!("room was dropped"))?
- // .update(&mut cx, |this, _| {
- // this.live_kit
- // .as_ref()
- // .map(|live_kit| live_kit.room.publish_audio_track(track))
- // })?
- // .ok_or_else(|| anyhow!("live-kit was not initialized"))?
- // .await
- // };
-
- // let publication = publish_track.await;
- // this.upgrade()
- // .ok_or_else(|| anyhow!("room was dropped"))?
- // .update(&mut cx, |this, cx| {
- // let live_kit = this
- // .live_kit
- // .as_mut()
- // .ok_or_else(|| anyhow!("live-kit was not initialized"))?;
-
- // let (canceled, muted) = if let LocalTrack::Pending {
- // publish_id: cur_publish_id,
- // muted,
- // } = &live_kit.microphone_track
- // {
- // (*cur_publish_id != publish_id, *muted)
- // } else {
- // (true, false)
- // };
-
- // match publication {
- // Ok(publication) => {
- // if canceled {
- // live_kit.room.unpublish_track(publication);
- // } else {
- // if muted {
- // cx.executor().spawn(publication.set_mute(muted)).detach();
- // }
- // live_kit.microphone_track = LocalTrack::Published {
- // track_publication: publication,
- // muted,
- // };
- // cx.notify();
- // }
- // Ok(())
- // }
- // Err(error) => {
- // if canceled {
- // Ok(())
- // } else {
- // live_kit.microphone_track = LocalTrack::None;
- // cx.notify();
- // Err(error)
- // }
- // }
- // }
- // })?
- // })
+ if self.status.is_offline() {
+ return Task::ready(Err(anyhow!("room is offline")));
+ } else if self.is_sharing_mic() {
+ return Task::ready(Err(anyhow!("microphone was already shared")));
+ }
+
+ let publish_id = if let Some(live_kit) = self.live_kit.as_mut() {
+ let publish_id = post_inc(&mut live_kit.next_publish_id);
+ live_kit.microphone_track = LocalTrack::Pending {
+ publish_id,
+ muted: false,
+ };
+ cx.notify();
+ publish_id
+ } else {
+ return Task::ready(Err(anyhow!("live-kit was not initialized")));
+ };
+
+ cx.spawn(move |this, mut cx| async move {
+ let publish_track = async {
+ let track = LocalAudioTrack::create();
+ this.upgrade()
+ .ok_or_else(|| anyhow!("room was dropped"))?
+ .update(&mut cx, |this, _| {
+ this.live_kit
+ .as_ref()
+ .map(|live_kit| live_kit.room.publish_audio_track(track))
+ })?
+ .ok_or_else(|| anyhow!("live-kit was not initialized"))?
+ .await
+ };
+
+ let publication = publish_track.await;
+ this.upgrade()
+ .ok_or_else(|| anyhow!("room was dropped"))?
+ .update(&mut cx, |this, cx| {
+ let live_kit = this
+ .live_kit
+ .as_mut()
+ .ok_or_else(|| anyhow!("live-kit was not initialized"))?;
+
+ let (canceled, muted) = if let LocalTrack::Pending {
+ publish_id: cur_publish_id,
+ muted,
+ } = &live_kit.microphone_track
+ {
+ (*cur_publish_id != publish_id, *muted)
+ } else {
+ (true, false)
+ };
+
+ match publication {
+ Ok(publication) => {
+ if canceled {
+ live_kit.room.unpublish_track(publication);
+ } else {
+ if muted {
+ cx.executor().spawn(publication.set_mute(muted)).detach();
+ }
+ live_kit.microphone_track = LocalTrack::Published {
+ track_publication: publication,
+ muted,
+ };
+ cx.notify();
+ }
+ Ok(())
+ }
+ Err(error) => {
+ if canceled {
+ Ok(())
+ } else {
+ live_kit.microphone_track = LocalTrack::None;
+ cx.notify();
+ Err(error)
+ }
+ }
+ }
+ })?
+ })
}
pub fn share_screen(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
- todo!()
- // if self.status.is_offline() {
- // return Task::ready(Err(anyhow!("room is offline")));
- // } else if self.is_screen_sharing() {
- // return Task::ready(Err(anyhow!("screen was already shared")));
- // }
-
- // let (displays, publish_id) = if let Some(live_kit) = self.live_kit.as_mut() {
- // let publish_id = post_inc(&mut live_kit.next_publish_id);
- // live_kit.screen_track = LocalTrack::Pending {
- // publish_id,
- // muted: false,
- // };
- // cx.notify();
- // (live_kit.room.display_sources(), publish_id)
- // } else {
- // return Task::ready(Err(anyhow!("live-kit was not initialized")));
- // };
-
- // cx.spawn(move |this, mut cx| async move {
- // let publish_track = async {
- // let displays = displays.await?;
- // let display = displays
- // .first()
- // .ok_or_else(|| anyhow!("no display found"))?;
- // let track = LocalVideoTrack::screen_share_for_display(&display);
- // this.upgrade()
- // .ok_or_else(|| anyhow!("room was dropped"))?
- // .update(&mut cx, |this, _| {
- // this.live_kit
- // .as_ref()
- // .map(|live_kit| live_kit.room.publish_video_track(track))
- // })?
- // .ok_or_else(|| anyhow!("live-kit was not initialized"))?
- // .await
- // };
-
- // let publication = publish_track.await;
- // this.upgrade()
- // .ok_or_else(|| anyhow!("room was dropped"))?
- // .update(&mut cx, |this, cx| {
- // let live_kit = this
- // .live_kit
- // .as_mut()
- // .ok_or_else(|| anyhow!("live-kit was not initialized"))?;
-
- // let (canceled, muted) = if let LocalTrack::Pending {
- // publish_id: cur_publish_id,
- // muted,
- // } = &live_kit.screen_track
- // {
- // (*cur_publish_id != publish_id, *muted)
- // } else {
- // (true, false)
- // };
-
- // match publication {
- // Ok(publication) => {
- // if canceled {
- // live_kit.room.unpublish_track(publication);
- // } else {
- // if muted {
- // cx.executor().spawn(publication.set_mute(muted)).detach();
- // }
- // live_kit.screen_track = LocalTrack::Published {
- // track_publication: publication,
- // muted,
- // };
- // cx.notify();
- // }
-
- // Audio::play_sound(Sound::StartScreenshare, cx);
-
- // Ok(())
- // }
- // Err(error) => {
- // if canceled {
- // Ok(())
- // } else {
- // live_kit.screen_track = LocalTrack::None;
- // cx.notify();
- // Err(error)
- // }
- // }
- // }
- // })?
- // })
+ if self.status.is_offline() {
+ return Task::ready(Err(anyhow!("room is offline")));
+ } else if self.is_screen_sharing() {
+ return Task::ready(Err(anyhow!("screen was already shared")));
+ }
+
+ let (displays, publish_id) = if let Some(live_kit) = self.live_kit.as_mut() {
+ let publish_id = post_inc(&mut live_kit.next_publish_id);
+ live_kit.screen_track = LocalTrack::Pending {
+ publish_id,
+ muted: false,
+ };
+ cx.notify();
+ (live_kit.room.display_sources(), publish_id)
+ } else {
+ return Task::ready(Err(anyhow!("live-kit was not initialized")));
+ };
+
+ cx.spawn_on_main(move |this, mut cx| async move {
+ let publish_track = async {
+ let displays = displays.await?;
+ let display = displays
+ .first()
+ .ok_or_else(|| anyhow!("no display found"))?;
+ let track = LocalVideoTrack::screen_share_for_display(&display);
+ this.upgrade()
+ .ok_or_else(|| anyhow!("room was dropped"))?
+ .update(&mut cx, |this, _| {
+ this.live_kit
+ .as_ref()
+ .map(|live_kit| live_kit.room.publish_video_track(track))
+ })?
+ .ok_or_else(|| anyhow!("live-kit was not initialized"))?
+ .await
+ };
+
+ let publication = publish_track.await;
+ this.upgrade()
+ .ok_or_else(|| anyhow!("room was dropped"))?
+ .update(&mut cx, |this, cx| {
+ let live_kit = this
+ .live_kit
+ .as_mut()
+ .ok_or_else(|| anyhow!("live-kit was not initialized"))?;
+
+ let (canceled, muted) = if let LocalTrack::Pending {
+ publish_id: cur_publish_id,
+ muted,
+ } = &live_kit.screen_track
+ {
+ (*cur_publish_id != publish_id, *muted)
+ } else {
+ (true, false)
+ };
+
+ match publication {
+ Ok(publication) => {
+ if canceled {
+ live_kit.room.unpublish_track(publication);
+ } else {
+ if muted {
+ cx.executor().spawn(publication.set_mute(muted)).detach();
+ }
+ live_kit.screen_track = LocalTrack::Published {
+ track_publication: publication,
+ muted,
+ };
+ cx.notify();
+ }
+
+ Audio::play_sound(Sound::StartScreenshare, cx);
+
+ Ok(())
+ }
+ Err(error) => {
+ if canceled {
+ Ok(())
+ } else {
+ live_kit.screen_track = LocalTrack::None;
+ cx.notify();
+ Err(error)
+ }
+ }
+ }
+ })?
+ })
}
pub fn toggle_mute(&mut self, cx: &mut ModelContext<Self>) -> Result<Task<Result<()>>> {
- todo!()
- // let should_mute = !self.is_muted(cx);
- // if let Some(live_kit) = self.live_kit.as_mut() {
- // if matches!(live_kit.microphone_track, LocalTrack::None) {
- // return Ok(self.share_microphone(cx));
- // }
-
- // let (ret_task, old_muted) = live_kit.set_mute(should_mute, cx)?;
- // live_kit.muted_by_user = should_mute;
-
- // if old_muted == true && live_kit.deafened == true {
- // if let Some(task) = self.toggle_deafen(cx).ok() {
- // task.detach();
- // }
- // }
-
- // Ok(ret_task)
- // } else {
- // Err(anyhow!("LiveKit not started"))
- // }
+ let should_mute = !self.is_muted(cx);
+ if let Some(live_kit) = self.live_kit.as_mut() {
+ if matches!(live_kit.microphone_track, LocalTrack::None) {
+ return Ok(self.share_microphone(cx));
+ }
+
+ let (ret_task, old_muted) = live_kit.set_mute(should_mute, cx)?;
+ live_kit.muted_by_user = should_mute;
+
+ if old_muted == true && live_kit.deafened == true {
+ if let Some(task) = self.toggle_deafen(cx).ok() {
+ task.detach();
+ }
+ }
+
+ Ok(ret_task)
+ } else {
+ Err(anyhow!("LiveKit not started"))
+ }
}
pub fn toggle_deafen(&mut self, cx: &mut ModelContext<Self>) -> Result<Task<Result<()>>> {
- todo!()
- // if let Some(live_kit) = self.live_kit.as_mut() {
- // (*live_kit).deafened = !live_kit.deafened;
-
- // let mut tasks = Vec::with_capacity(self.remote_participants.len());
- // // Context notification is sent within set_mute itself.
- // let mut mute_task = None;
- // // When deafening, mute user's mic as well.
- // // When undeafening, unmute user's mic unless it was manually muted prior to deafening.
- // if live_kit.deafened || !live_kit.muted_by_user {
- // mute_task = Some(live_kit.set_mute(live_kit.deafened, cx)?.0);
- // };
- // for participant in self.remote_participants.values() {
- // for track in live_kit
- // .room
- // .remote_audio_track_publications(&participant.user.id.to_string())
- // {
- // let deafened = live_kit.deafened;
- // tasks.push(
- // cx.executor()
- // .spawn_on_main(move || track.set_enabled(!deafened)),
- // );
- // }
- // }
-
- // Ok(cx.executor().spawn_on_main(|| async {
- // if let Some(mute_task) = mute_task {
- // mute_task.await?;
- // }
- // for task in tasks {
- // task.await?;
- // }
- // Ok(())
- // }))
- // } else {
- // Err(anyhow!("LiveKit not started"))
- // }
+ if let Some(live_kit) = self.live_kit.as_mut() {
+ (*live_kit).deafened = !live_kit.deafened;
+
+ let mut tasks = Vec::with_capacity(self.remote_participants.len());
+ // Context notification is sent within set_mute itself.
+ let mut mute_task = None;
+ // When deafening, mute user's mic as well.
+ // When undeafening, unmute user's mic unless it was manually muted prior to deafening.
+ if live_kit.deafened || !live_kit.muted_by_user {
+ mute_task = Some(live_kit.set_mute(live_kit.deafened, cx)?.0);
+ };
+ for participant in self.remote_participants.values() {
+ for track in live_kit
+ .room
+ .remote_audio_track_publications(&participant.user.id.to_string())
+ {
+ let deafened = live_kit.deafened;
+ tasks.push(
+ cx.executor()
+ .spawn_on_main(move || track.set_enabled(!deafened)),
+ );
+ }
+ }
+
+ Ok(cx.executor().spawn_on_main(|| async {
+ if let Some(mute_task) = mute_task {
+ mute_task.await?;
+ }
+ for task in tasks {
+ task.await?;
+ }
+ Ok(())
+ }))
+ } else {
+ Err(anyhow!("LiveKit not started"))
+ }
}
pub fn unshare_screen(&mut self, cx: &mut ModelContext<Self>) -> Result<()> {
@@ -84,7 +84,7 @@ struct DeterministicState {
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum ExecutorEvent {
PollRunnable { id: usize },
- EnqueuRunnable { id: usize },
+ EnqueueRunnable { id: usize },
}
#[cfg(any(test, feature = "test-support"))]
@@ -199,7 +199,7 @@ impl Deterministic {
let unparker = self.parker.lock().unparker();
let (runnable, task) = async_task::spawn_local(future, move |runnable| {
let mut state = state.lock();
- state.push_to_history(ExecutorEvent::EnqueuRunnable { id });
+ state.push_to_history(ExecutorEvent::EnqueueRunnable { id });
state
.scheduled_from_foreground
.entry(cx_id)
@@ -229,7 +229,7 @@ impl Deterministic {
let mut state = state.lock();
state
.poll_history
- .push(ExecutorEvent::EnqueuRunnable { id });
+ .push(ExecutorEvent::EnqueueRunnable { id });
state
.scheduled_from_background
.push(BackgroundRunnable { id, runnable });
@@ -616,7 +616,7 @@ impl ExecutorEvent {
pub fn id(&self) -> usize {
match self {
ExecutorEvent::PollRunnable { id } => *id,
- ExecutorEvent::EnqueuRunnable { id } => *id,
+ ExecutorEvent::EnqueueRunnable { id } => *id,
}
}
}
@@ -376,7 +376,7 @@ impl AppContext {
self.observers.remove(&entity_id);
self.event_listeners.remove(&entity_id);
for mut release_callback in self.release_listeners.remove(&entity_id) {
- release_callback(&mut entity, self);
+ release_callback(entity.as_mut(), self);
}
}
}
@@ -106,7 +106,12 @@ impl EntityMap {
dropped_entity_ids
.into_iter()
.map(|entity_id| {
- ref_counts.counts.remove(entity_id);
+ let count = ref_counts.counts.remove(entity_id).unwrap();
+ debug_assert_eq!(
+ count.load(SeqCst),
+ 0,
+ "dropped an entity that was referenced"
+ );
(entity_id, self.entities.remove(entity_id).unwrap())
})
.collect()
@@ -211,7 +216,7 @@ impl Drop for AnyModel {
let count = entity_map
.counts
.get(self.entity_id)
- .expect("Detected over-release of a model.");
+ .expect("detected over-release of a handle.");
let prev_count = count.fetch_sub(1, SeqCst);
assert_ne!(prev_count, 0, "Detected over-release of a model.");
if prev_count == 1 {
@@ -395,12 +400,16 @@ impl AnyWeakModel {
}
pub fn upgrade(&self) -> Option<AnyModel> {
- let entity_map = self.entity_ref_counts.upgrade()?;
- entity_map
- .read()
- .counts
- .get(self.entity_id)?
- .fetch_add(1, SeqCst);
+ let ref_counts = &self.entity_ref_counts.upgrade()?;
+ let ref_counts = ref_counts.read();
+ let ref_count = ref_counts.counts.get(self.entity_id)?;
+
+ // entity_id is in dropped_entity_ids
+ if ref_count.load(SeqCst) == 0 {
+ return None;
+ }
+ ref_count.fetch_add(1, SeqCst);
+
Some(AnyModel {
entity_id: self.entity_id,
entity_type: self.entity_type,
@@ -499,3 +508,60 @@ impl<T> PartialEq<Model<T>> for WeakModel<T> {
self.entity_id() == other.any_model.entity_id()
}
}
+
+#[cfg(test)]
+mod test {
+ use crate::EntityMap;
+
+ struct TestEntity {
+ pub i: i32,
+ }
+
+ #[test]
+ fn test_entity_map_slot_assignment_before_cleanup() {
+ // Tests that slots are not re-used before take_dropped.
+ let mut entity_map = EntityMap::new();
+
+ let slot = entity_map.reserve::<TestEntity>();
+ entity_map.insert(slot, TestEntity { i: 1 });
+
+ let slot = entity_map.reserve::<TestEntity>();
+ entity_map.insert(slot, TestEntity { i: 2 });
+
+ let dropped = entity_map.take_dropped();
+ assert_eq!(dropped.len(), 2);
+
+ assert_eq!(
+ dropped
+ .into_iter()
+ .map(|(_, entity)| entity.downcast::<TestEntity>().unwrap().i)
+ .collect::<Vec<i32>>(),
+ vec![1, 2],
+ );
+ }
+
+ #[test]
+ fn test_entity_map_weak_upgrade_before_cleanup() {
+ // Tests that weak handles are not upgraded before take_dropped
+ let mut entity_map = EntityMap::new();
+
+ let slot = entity_map.reserve::<TestEntity>();
+ let handle = entity_map.insert(slot, TestEntity { i: 1 });
+ let weak = handle.downgrade();
+ drop(handle);
+
+ let strong = weak.upgrade();
+ assert_eq!(strong, None);
+
+ let dropped = entity_map.take_dropped();
+ assert_eq!(dropped.len(), 1);
+
+ assert_eq!(
+ dropped
+ .into_iter()
+ .map(|(_, entity)| entity.downcast::<TestEntity>().unwrap().i)
+ .collect::<Vec<i32>>(),
+ vec![1],
+ );
+ }
+}
@@ -1,7 +1,8 @@
use crate::{
- AnyWindowHandle, AppContext, AsyncAppContext, Context, Executor, MainThread, Model,
- ModelContext, Result, Task, TestDispatcher, TestPlatform, WindowContext,
+ AnyWindowHandle, AppContext, AsyncAppContext, Context, EventEmitter, Executor, MainThread,
+ Model, ModelContext, Result, Task, TestDispatcher, TestPlatform, WindowContext,
};
+use futures::SinkExt;
use parking_lot::Mutex;
use std::{future::Future, sync::Arc};
@@ -63,8 +64,8 @@ impl TestAppContext {
}
pub fn update<R>(&self, f: impl FnOnce(&mut AppContext) -> R) -> R {
- let mut lock = self.app.lock();
- f(&mut *lock)
+ let mut cx = self.app.lock();
+ cx.update(f)
}
pub fn read_window<R>(
@@ -149,4 +150,22 @@ impl TestAppContext {
executor: self.executor.clone(),
}
}
+
+ pub fn subscribe<T: 'static + EventEmitter + Send>(
+ &mut self,
+ entity: &Model<T>,
+ ) -> futures::channel::mpsc::UnboundedReceiver<T::Event>
+ where
+ T::Event: 'static + Send + Clone,
+ {
+ let (mut tx, rx) = futures::channel::mpsc::unbounded();
+ entity
+ .update(self, |_, cx: &mut ModelContext<T>| {
+ cx.subscribe(entity, move |_, _, event, cx| {
+ cx.executor().block(tx.send(event.clone())).unwrap();
+ })
+ })
+ .detach();
+ rx
+ }
}
@@ -6,7 +6,10 @@ use std::{
marker::PhantomData,
mem,
pin::Pin,
- sync::Arc,
+ sync::{
+ atomic::{AtomicBool, Ordering::SeqCst},
+ Arc,
+ },
task::{Context, Poll},
time::Duration,
};
@@ -136,7 +139,11 @@ impl Executor {
pub fn block<R>(&self, future: impl Future<Output = R>) -> R {
pin_mut!(future);
let (parker, unparker) = parking::pair();
+ let awoken = Arc::new(AtomicBool::new(false));
+ let awoken2 = awoken.clone();
+
let waker = waker_fn(move || {
+ awoken2.store(true, SeqCst);
unparker.unpark();
});
let mut cx = std::task::Context::from_waker(&waker);
@@ -146,9 +153,20 @@ impl Executor {
Poll::Ready(result) => return result,
Poll::Pending => {
if !self.dispatcher.poll() {
+ if awoken.swap(false, SeqCst) {
+ continue;
+ }
+
#[cfg(any(test, feature = "test-support"))]
- if let Some(_) = self.dispatcher.as_test() {
- panic!("blocked with nothing left to run")
+ if let Some(test) = self.dispatcher.as_test() {
+ if !test.parking_allowed() {
+ let mut backtrace_message = String::new();
+ if let Some(backtrace) = test.waiting_backtrace() {
+ backtrace_message =
+ format!("\nbacktrace of waiting future:\n{:?}", backtrace);
+ }
+ panic!("parked with nothing left to run\n{:?}", backtrace_message)
+ }
}
parker.park();
}
@@ -206,12 +224,12 @@ impl Executor {
#[cfg(any(test, feature = "test-support"))]
pub fn start_waiting(&self) {
- todo!("start_waiting")
+ self.dispatcher.as_test().unwrap().start_waiting();
}
#[cfg(any(test, feature = "test-support"))]
pub fn finish_waiting(&self) {
- todo!("finish_waiting")
+ self.dispatcher.as_test().unwrap().finish_waiting();
}
#[cfg(any(test, feature = "test-support"))]
@@ -229,6 +247,11 @@ impl Executor {
self.dispatcher.as_test().unwrap().run_until_parked()
}
+ #[cfg(any(test, feature = "test-support"))]
+ pub fn allow_parking(&self) {
+ self.dispatcher.as_test().unwrap().allow_parking();
+ }
+
pub fn num_cpus(&self) -> usize {
num_cpus::get()
}
@@ -1,5 +1,6 @@
use crate::PlatformDispatcher;
use async_task::Runnable;
+use backtrace::Backtrace;
use collections::{HashMap, VecDeque};
use parking_lot::Mutex;
use rand::prelude::*;
@@ -28,6 +29,8 @@ struct TestDispatcherState {
time: Duration,
is_main_thread: bool,
next_id: TestDispatcherId,
+ allow_parking: bool,
+ waiting_backtrace: Option<Backtrace>,
}
impl TestDispatcher {
@@ -40,6 +43,8 @@ impl TestDispatcher {
time: Duration::ZERO,
is_main_thread: true,
next_id: TestDispatcherId(1),
+ allow_parking: false,
+ waiting_backtrace: None,
};
TestDispatcher {
@@ -66,7 +71,7 @@ impl TestDispatcher {
self.state.lock().time = new_now;
}
- pub fn simulate_random_delay(&self) -> impl Future<Output = ()> {
+ pub fn simulate_random_delay(&self) -> impl 'static + Send + Future<Output = ()> {
pub struct YieldNow {
count: usize,
}
@@ -93,6 +98,29 @@ impl TestDispatcher {
pub fn run_until_parked(&self) {
while self.poll() {}
}
+
+ pub fn parking_allowed(&self) -> bool {
+ self.state.lock().allow_parking
+ }
+
+ pub fn allow_parking(&self) {
+ self.state.lock().allow_parking = true
+ }
+
+ pub fn start_waiting(&self) {
+ self.state.lock().waiting_backtrace = Some(Backtrace::new_unresolved());
+ }
+
+ pub fn finish_waiting(&self) {
+ self.state.lock().waiting_backtrace.take();
+ }
+
+ pub fn waiting_backtrace(&self) -> Option<Backtrace> {
+ self.state.lock().waiting_backtrace.take().map(|mut b| {
+ b.resolve();
+ b
+ })
+ }
}
impl Clone for TestDispatcher {
@@ -47,8 +47,8 @@ where
subscribers.remove(&subscriber_id);
if subscribers.is_empty() {
lock.subscribers.remove(&emitter_key);
- return;
}
+ return;
}
// We didn't manage to remove the subscription, which means it was dropped
@@ -541,6 +541,12 @@ impl<'a, 'w> WindowContext<'a, 'w> {
self.window.rem_size
}
+ /// Sets the size of an em for the base font of the application. Adjusting this value allows the
+ /// UI to scale, just like zooming a web page.
+ pub fn set_rem_size(&mut self, rem_size: impl Into<Pixels>) {
+ self.window.rem_size = rem_size.into();
+ }
+
/// The line height associated with the current text style.
pub fn line_height(&self) -> Pixels {
let rem_size = self.rem_size();
@@ -42,8 +42,8 @@
"repositoryURL": "https://github.com/apple/swift-protobuf.git",
"state": {
"branch": null,
- "revision": "ce20dc083ee485524b802669890291c0d8090170",
- "version": "1.22.1"
+ "revision": "0af9125c4eae12a4973fb66574c53a54962a9e1e",
+ "version": "1.21.0"
}
}
]
@@ -0,0 +1,2 @@
+[live_kit_client_test]
+rustflags = ["-C", "link-args=-ObjC"]
@@ -0,0 +1,71 @@
+[package]
+name = "live_kit_client2"
+version = "0.1.0"
+edition = "2021"
+description = "Bindings to LiveKit Swift client SDK"
+publish = false
+
+[lib]
+path = "src/live_kit_client2.rs"
+doctest = false
+
+[[example]]
+name = "test_app"
+
+[features]
+test-support = [
+ "async-trait",
+ "collections/test-support",
+ "gpui2/test-support",
+ "live_kit_server",
+ "nanoid",
+]
+
+[dependencies]
+collections = { path = "../collections", optional = true }
+gpui2 = { path = "../gpui2", optional = true }
+live_kit_server = { path = "../live_kit_server", optional = true }
+media = { path = "../media" }
+
+anyhow.workspace = true
+async-broadcast = "0.4"
+core-foundation = "0.9.3"
+core-graphics = "0.22.3"
+futures.workspace = true
+log.workspace = true
+parking_lot.workspace = true
+postage.workspace = true
+
+async-trait = { workspace = true, optional = true }
+nanoid = { version ="0.4", optional = true}
+
+[dev-dependencies]
+collections = { path = "../collections", features = ["test-support"] }
+gpui2 = { path = "../gpui2", features = ["test-support"] }
+live_kit_server = { path = "../live_kit_server" }
+media = { path = "../media" }
+nanoid = "0.4"
+
+anyhow.workspace = true
+async-trait.workspace = true
+block = "0.1"
+bytes = "1.2"
+byteorder = "1.4"
+cocoa = "0.24"
+core-foundation = "0.9.3"
+core-graphics = "0.22.3"
+foreign-types = "0.3"
+futures.workspace = true
+hmac = "0.12"
+jwt = "0.16"
+objc = "0.2"
+parking_lot.workspace = true
+serde.workspace = true
+serde_derive.workspace = true
+sha2 = "0.10"
+simplelog = "0.9"
+
+[build-dependencies]
+serde.workspace = true
+serde_derive.workspace = true
+serde_json.workspace = true
@@ -0,0 +1,52 @@
+{
+ "object": {
+ "pins": [
+ {
+ "package": "LiveKit",
+ "repositoryURL": "https://github.com/livekit/client-sdk-swift.git",
+ "state": {
+ "branch": null,
+ "revision": "7331b813a5ab8a95cfb81fb2b4ed10519428b9ff",
+ "version": "1.0.12"
+ }
+ },
+ {
+ "package": "Promises",
+ "repositoryURL": "https://github.com/google/promises.git",
+ "state": {
+ "branch": null,
+ "revision": "ec957ccddbcc710ccc64c9dcbd4c7006fcf8b73a",
+ "version": "2.2.0"
+ }
+ },
+ {
+ "package": "WebRTC",
+ "repositoryURL": "https://github.com/webrtc-sdk/Specs.git",
+ "state": {
+ "branch": null,
+ "revision": "2f6bab30c8df0fe59ab3e58bc99097f757f85f65",
+ "version": "104.5112.17"
+ }
+ },
+ {
+ "package": "swift-log",
+ "repositoryURL": "https://github.com/apple/swift-log.git",
+ "state": {
+ "branch": null,
+ "revision": "32e8d724467f8fe623624570367e3d50c5638e46",
+ "version": "1.5.2"
+ }
+ },
+ {
+ "package": "SwiftProtobuf",
+ "repositoryURL": "https://github.com/apple/swift-protobuf.git",
+ "state": {
+ "branch": null,
+ "revision": "ce20dc083ee485524b802669890291c0d8090170",
+ "version": "1.22.1"
+ }
+ }
+ ]
+ },
+ "version": 1
+}
@@ -0,0 +1,27 @@
+// swift-tools-version: 5.5
+
+import PackageDescription
+
+let package = Package(
+ name: "LiveKitBridge2",
+ platforms: [
+ .macOS(.v10_15)
+ ],
+ products: [
+ // Products define the executables and libraries a package produces, and make them visible to other packages.
+ .library(
+ name: "LiveKitBridge2",
+ type: .static,
+ targets: ["LiveKitBridge2"]),
+ ],
+ dependencies: [
+ .package(url: "https://github.com/livekit/client-sdk-swift.git", .exact("1.0.12")),
+ ],
+ targets: [
+ // Targets are the basic building blocks of a package. A target can define a module or a test suite.
+ // Targets can depend on other targets in this package, and on products in packages this package depends on.
+ .target(
+ name: "LiveKitBridge2",
+ dependencies: [.product(name: "LiveKit", package: "client-sdk-swift")]),
+ ]
+)
@@ -0,0 +1,3 @@
+# LiveKitBridge2
+
+A description of this package.
@@ -0,0 +1,327 @@
+import Foundation
+import LiveKit
+import WebRTC
+import ScreenCaptureKit
+
+class LKRoomDelegate: RoomDelegate {
+ var data: UnsafeRawPointer
+ var onDidDisconnect: @convention(c) (UnsafeRawPointer) -> Void
+ var onDidSubscribeToRemoteAudioTrack: @convention(c) (UnsafeRawPointer, CFString, CFString, UnsafeRawPointer, UnsafeRawPointer) -> Void
+ var onDidUnsubscribeFromRemoteAudioTrack: @convention(c) (UnsafeRawPointer, CFString, CFString) -> Void
+ var onMuteChangedFromRemoteAudioTrack: @convention(c) (UnsafeRawPointer, CFString, Bool) -> Void
+ var onActiveSpeakersChanged: @convention(c) (UnsafeRawPointer, CFArray) -> Void
+ var onDidSubscribeToRemoteVideoTrack: @convention(c) (UnsafeRawPointer, CFString, CFString, UnsafeRawPointer) -> Void
+ var onDidUnsubscribeFromRemoteVideoTrack: @convention(c) (UnsafeRawPointer, CFString, CFString) -> Void
+
+ init(
+ data: UnsafeRawPointer,
+ onDidDisconnect: @escaping @convention(c) (UnsafeRawPointer) -> Void,
+ onDidSubscribeToRemoteAudioTrack: @escaping @convention(c) (UnsafeRawPointer, CFString, CFString, UnsafeRawPointer, UnsafeRawPointer) -> Void,
+ onDidUnsubscribeFromRemoteAudioTrack: @escaping @convention(c) (UnsafeRawPointer, CFString, CFString) -> Void,
+ onMuteChangedFromRemoteAudioTrack: @escaping @convention(c) (UnsafeRawPointer, CFString, Bool) -> Void,
+ onActiveSpeakersChanged: @convention(c) (UnsafeRawPointer, CFArray) -> Void,
+ onDidSubscribeToRemoteVideoTrack: @escaping @convention(c) (UnsafeRawPointer, CFString, CFString, UnsafeRawPointer) -> Void,
+ onDidUnsubscribeFromRemoteVideoTrack: @escaping @convention(c) (UnsafeRawPointer, CFString, CFString) -> Void)
+ {
+ self.data = data
+ self.onDidDisconnect = onDidDisconnect
+ self.onDidSubscribeToRemoteAudioTrack = onDidSubscribeToRemoteAudioTrack
+ self.onDidUnsubscribeFromRemoteAudioTrack = onDidUnsubscribeFromRemoteAudioTrack
+ self.onDidSubscribeToRemoteVideoTrack = onDidSubscribeToRemoteVideoTrack
+ self.onDidUnsubscribeFromRemoteVideoTrack = onDidUnsubscribeFromRemoteVideoTrack
+ self.onMuteChangedFromRemoteAudioTrack = onMuteChangedFromRemoteAudioTrack
+ self.onActiveSpeakersChanged = onActiveSpeakersChanged
+ }
+
+ func room(_ room: Room, didUpdate connectionState: ConnectionState, oldValue: ConnectionState) {
+ if connectionState.isDisconnected {
+ self.onDidDisconnect(self.data)
+ }
+ }
+
+ func room(_ room: Room, participant: RemoteParticipant, didSubscribe publication: RemoteTrackPublication, track: Track) {
+ if track.kind == .video {
+ self.onDidSubscribeToRemoteVideoTrack(self.data, participant.identity as CFString, track.sid! as CFString, Unmanaged.passUnretained(track).toOpaque())
+ } else if track.kind == .audio {
+ self.onDidSubscribeToRemoteAudioTrack(self.data, participant.identity as CFString, track.sid! as CFString, Unmanaged.passUnretained(track).toOpaque(), Unmanaged.passUnretained(publication).toOpaque())
+ }
+ }
+
+ func room(_ room: Room, participant: Participant, didUpdate publication: TrackPublication, muted: Bool) {
+ if publication.kind == .audio {
+ self.onMuteChangedFromRemoteAudioTrack(self.data, publication.sid as CFString, muted)
+ }
+ }
+
+ func room(_ room: Room, didUpdate speakers: [Participant]) {
+ guard let speaker_ids = speakers.compactMap({ $0.identity as CFString }) as CFArray? else { return }
+ self.onActiveSpeakersChanged(self.data, speaker_ids)
+ }
+
+ func room(_ room: Room, participant: RemoteParticipant, didUnsubscribe publication: RemoteTrackPublication, track: Track) {
+ if track.kind == .video {
+ self.onDidUnsubscribeFromRemoteVideoTrack(self.data, participant.identity as CFString, track.sid! as CFString)
+ } else if track.kind == .audio {
+ self.onDidUnsubscribeFromRemoteAudioTrack(self.data, participant.identity as CFString, track.sid! as CFString)
+ }
+ }
+}
+
+class LKVideoRenderer: NSObject, VideoRenderer {
+ var data: UnsafeRawPointer
+ var onFrame: @convention(c) (UnsafeRawPointer, CVPixelBuffer) -> Bool
+ var onDrop: @convention(c) (UnsafeRawPointer) -> Void
+ var adaptiveStreamIsEnabled: Bool = false
+ var adaptiveStreamSize: CGSize = .zero
+ weak var track: VideoTrack?
+
+ init(data: UnsafeRawPointer, onFrame: @escaping @convention(c) (UnsafeRawPointer, CVPixelBuffer) -> Bool, onDrop: @escaping @convention(c) (UnsafeRawPointer) -> Void) {
+ self.data = data
+ self.onFrame = onFrame
+ self.onDrop = onDrop
+ }
+
+ deinit {
+ self.onDrop(self.data)
+ }
+
+ func setSize(_ size: CGSize) {
+ }
+
+ func renderFrame(_ frame: RTCVideoFrame?) {
+ let buffer = frame?.buffer as? RTCCVPixelBuffer
+ if let pixelBuffer = buffer?.pixelBuffer {
+ if !self.onFrame(self.data, pixelBuffer) {
+ DispatchQueue.main.async {
+ self.track?.remove(videoRenderer: self)
+ }
+ }
+ }
+ }
+}
+
+@_cdecl("LKRoomDelegateCreate")
+public func LKRoomDelegateCreate(
+ data: UnsafeRawPointer,
+ onDidDisconnect: @escaping @convention(c) (UnsafeRawPointer) -> Void,
+ onDidSubscribeToRemoteAudioTrack: @escaping @convention(c) (UnsafeRawPointer, CFString, CFString, UnsafeRawPointer, UnsafeRawPointer) -> Void,
+ onDidUnsubscribeFromRemoteAudioTrack: @escaping @convention(c) (UnsafeRawPointer, CFString, CFString) -> Void,
+ onMuteChangedFromRemoteAudioTrack: @escaping @convention(c) (UnsafeRawPointer, CFString, Bool) -> Void,
+ onActiveSpeakerChanged: @escaping @convention(c) (UnsafeRawPointer, CFArray) -> Void,
+ onDidSubscribeToRemoteVideoTrack: @escaping @convention(c) (UnsafeRawPointer, CFString, CFString, UnsafeRawPointer) -> Void,
+ onDidUnsubscribeFromRemoteVideoTrack: @escaping @convention(c) (UnsafeRawPointer, CFString, CFString) -> Void
+) -> UnsafeMutableRawPointer {
+ let delegate = LKRoomDelegate(
+ data: data,
+ onDidDisconnect: onDidDisconnect,
+ onDidSubscribeToRemoteAudioTrack: onDidSubscribeToRemoteAudioTrack,
+ onDidUnsubscribeFromRemoteAudioTrack: onDidUnsubscribeFromRemoteAudioTrack,
+ onMuteChangedFromRemoteAudioTrack: onMuteChangedFromRemoteAudioTrack,
+ onActiveSpeakersChanged: onActiveSpeakerChanged,
+ onDidSubscribeToRemoteVideoTrack: onDidSubscribeToRemoteVideoTrack,
+ onDidUnsubscribeFromRemoteVideoTrack: onDidUnsubscribeFromRemoteVideoTrack
+ )
+ return Unmanaged.passRetained(delegate).toOpaque()
+}
+
+@_cdecl("LKRoomCreate")
+public func LKRoomCreate(delegate: UnsafeRawPointer) -> UnsafeMutableRawPointer {
+ let delegate = Unmanaged<LKRoomDelegate>.fromOpaque(delegate).takeUnretainedValue()
+ return Unmanaged.passRetained(Room(delegate: delegate)).toOpaque()
+}
+
+@_cdecl("LKRoomConnect")
+public func LKRoomConnect(room: UnsafeRawPointer, url: CFString, token: CFString, callback: @escaping @convention(c) (UnsafeRawPointer, CFString?) -> Void, callback_data: UnsafeRawPointer) {
+ let room = Unmanaged<Room>.fromOpaque(room).takeUnretainedValue()
+
+ room.connect(url as String, token as String).then { _ in
+ callback(callback_data, UnsafeRawPointer(nil) as! CFString?)
+ }.catch { error in
+ callback(callback_data, error.localizedDescription as CFString)
+ }
+}
+
+@_cdecl("LKRoomDisconnect")
+public func LKRoomDisconnect(room: UnsafeRawPointer) {
+ let room = Unmanaged<Room>.fromOpaque(room).takeUnretainedValue()
+ room.disconnect()
+}
+
+@_cdecl("LKRoomPublishVideoTrack")
+public func LKRoomPublishVideoTrack(room: UnsafeRawPointer, track: UnsafeRawPointer, callback: @escaping @convention(c) (UnsafeRawPointer, UnsafeMutableRawPointer?, CFString?) -> Void, callback_data: UnsafeRawPointer) {
+ let room = Unmanaged<Room>.fromOpaque(room).takeUnretainedValue()
+ let track = Unmanaged<LocalVideoTrack>.fromOpaque(track).takeUnretainedValue()
+ room.localParticipant?.publishVideoTrack(track: track).then { publication in
+ callback(callback_data, Unmanaged.passRetained(publication).toOpaque(), nil)
+ }.catch { error in
+ callback(callback_data, nil, error.localizedDescription as CFString)
+ }
+}
+
+@_cdecl("LKRoomPublishAudioTrack")
+public func LKRoomPublishAudioTrack(room: UnsafeRawPointer, track: UnsafeRawPointer, callback: @escaping @convention(c) (UnsafeRawPointer, UnsafeMutableRawPointer?, CFString?) -> Void, callback_data: UnsafeRawPointer) {
+ let room = Unmanaged<Room>.fromOpaque(room).takeUnretainedValue()
+ let track = Unmanaged<LocalAudioTrack>.fromOpaque(track).takeUnretainedValue()
+ room.localParticipant?.publishAudioTrack(track: track).then { publication in
+ callback(callback_data, Unmanaged.passRetained(publication).toOpaque(), nil)
+ }.catch { error in
+ callback(callback_data, nil, error.localizedDescription as CFString)
+ }
+}
+
+
+@_cdecl("LKRoomUnpublishTrack")
+public func LKRoomUnpublishTrack(room: UnsafeRawPointer, publication: UnsafeRawPointer) {
+ let room = Unmanaged<Room>.fromOpaque(room).takeUnretainedValue()
+ let publication = Unmanaged<LocalTrackPublication>.fromOpaque(publication).takeUnretainedValue()
+ let _ = room.localParticipant?.unpublish(publication: publication)
+}
+
+@_cdecl("LKRoomAudioTracksForRemoteParticipant")
+public func LKRoomAudioTracksForRemoteParticipant(room: UnsafeRawPointer, participantId: CFString) -> CFArray? {
+ let room = Unmanaged<Room>.fromOpaque(room).takeUnretainedValue()
+
+ for (_, participant) in room.remoteParticipants {
+ if participant.identity == participantId as String {
+ return participant.audioTracks.compactMap { $0.track as? RemoteAudioTrack } as CFArray?
+ }
+ }
+
+ return nil;
+}
+
+@_cdecl("LKRoomAudioTrackPublicationsForRemoteParticipant")
+public func LKRoomAudioTrackPublicationsForRemoteParticipant(room: UnsafeRawPointer, participantId: CFString) -> CFArray? {
+ let room = Unmanaged<Room>.fromOpaque(room).takeUnretainedValue()
+
+ for (_, participant) in room.remoteParticipants {
+ if participant.identity == participantId as String {
+ return participant.audioTracks.compactMap { $0 as? RemoteTrackPublication } as CFArray?
+ }
+ }
+
+ return nil;
+}
+
+@_cdecl("LKRoomVideoTracksForRemoteParticipant")
+public func LKRoomVideoTracksForRemoteParticipant(room: UnsafeRawPointer, participantId: CFString) -> CFArray? {
+ let room = Unmanaged<Room>.fromOpaque(room).takeUnretainedValue()
+
+ for (_, participant) in room.remoteParticipants {
+ if participant.identity == participantId as String {
+ return participant.videoTracks.compactMap { $0.track as? RemoteVideoTrack } as CFArray?
+ }
+ }
+
+ return nil;
+}
+
+@_cdecl("LKLocalAudioTrackCreateTrack")
+public func LKLocalAudioTrackCreateTrack() -> UnsafeMutableRawPointer {
+ let track = LocalAudioTrack.createTrack(options: AudioCaptureOptions(
+ echoCancellation: true,
+ noiseSuppression: true
+ ))
+
+ return Unmanaged.passRetained(track).toOpaque()
+}
+
+
+@_cdecl("LKCreateScreenShareTrackForDisplay")
+public func LKCreateScreenShareTrackForDisplay(display: UnsafeMutableRawPointer) -> UnsafeMutableRawPointer {
+ let display = Unmanaged<MacOSDisplay>.fromOpaque(display).takeUnretainedValue()
+ let track = LocalVideoTrack.createMacOSScreenShareTrack(source: display, preferredMethod: .legacy)
+ return Unmanaged.passRetained(track).toOpaque()
+}
+
+@_cdecl("LKVideoRendererCreate")
+public func LKVideoRendererCreate(data: UnsafeRawPointer, onFrame: @escaping @convention(c) (UnsafeRawPointer, CVPixelBuffer) -> Bool, onDrop: @escaping @convention(c) (UnsafeRawPointer) -> Void) -> UnsafeMutableRawPointer {
+ Unmanaged.passRetained(LKVideoRenderer(data: data, onFrame: onFrame, onDrop: onDrop)).toOpaque()
+}
+
+@_cdecl("LKVideoTrackAddRenderer")
+public func LKVideoTrackAddRenderer(track: UnsafeRawPointer, renderer: UnsafeRawPointer) {
+ let track = Unmanaged<Track>.fromOpaque(track).takeUnretainedValue() as! VideoTrack
+ let renderer = Unmanaged<LKVideoRenderer>.fromOpaque(renderer).takeRetainedValue()
+ renderer.track = track
+ track.add(videoRenderer: renderer)
+}
+
+@_cdecl("LKRemoteVideoTrackGetSid")
+public func LKRemoteVideoTrackGetSid(track: UnsafeRawPointer) -> CFString {
+ let track = Unmanaged<RemoteVideoTrack>.fromOpaque(track).takeUnretainedValue()
+ return track.sid! as CFString
+}
+
+@_cdecl("LKRemoteAudioTrackGetSid")
+public func LKRemoteAudioTrackGetSid(track: UnsafeRawPointer) -> CFString {
+ let track = Unmanaged<RemoteAudioTrack>.fromOpaque(track).takeUnretainedValue()
+ return track.sid! as CFString
+}
+
+@_cdecl("LKDisplaySources")
+public func LKDisplaySources(data: UnsafeRawPointer, callback: @escaping @convention(c) (UnsafeRawPointer, CFArray?, CFString?) -> Void) {
+ MacOSScreenCapturer.sources(for: .display, includeCurrentApplication: false, preferredMethod: .legacy).then { displaySources in
+ callback(data, displaySources as CFArray, nil)
+ }.catch { error in
+ callback(data, nil, error.localizedDescription as CFString)
+ }
+}
+
+@_cdecl("LKLocalTrackPublicationSetMute")
+public func LKLocalTrackPublicationSetMute(
+ publication: UnsafeRawPointer,
+ muted: Bool,
+ on_complete: @escaping @convention(c) (UnsafeRawPointer, CFString?) -> Void,
+ callback_data: UnsafeRawPointer
+) {
+ let publication = Unmanaged<LocalTrackPublication>.fromOpaque(publication).takeUnretainedValue()
+
+ if muted {
+ publication.mute().then {
+ on_complete(callback_data, nil)
+ }.catch { error in
+ on_complete(callback_data, error.localizedDescription as CFString)
+ }
+ } else {
+ publication.unmute().then {
+ on_complete(callback_data, nil)
+ }.catch { error in
+ on_complete(callback_data, error.localizedDescription as CFString)
+ }
+ }
+}
+
+@_cdecl("LKRemoteTrackPublicationSetEnabled")
+public func LKRemoteTrackPublicationSetEnabled(
+ publication: UnsafeRawPointer,
+ enabled: Bool,
+ on_complete: @escaping @convention(c) (UnsafeRawPointer, CFString?) -> Void,
+ callback_data: UnsafeRawPointer
+) {
+ let publication = Unmanaged<RemoteTrackPublication>.fromOpaque(publication).takeUnretainedValue()
+
+ publication.set(enabled: enabled).then {
+ on_complete(callback_data, nil)
+ }.catch { error in
+ on_complete(callback_data, error.localizedDescription as CFString)
+ }
+}
+
+@_cdecl("LKRemoteTrackPublicationIsMuted")
+public func LKRemoteTrackPublicationIsMuted(
+ publication: UnsafeRawPointer
+) -> Bool {
+ let publication = Unmanaged<RemoteTrackPublication>.fromOpaque(publication).takeUnretainedValue()
+
+ return publication.muted
+}
+
+@_cdecl("LKRemoteTrackPublicationGetSid")
+public func LKRemoteTrackPublicationGetSid(
+ publication: UnsafeRawPointer
+) -> CFString {
+ let publication = Unmanaged<RemoteTrackPublication>.fromOpaque(publication).takeUnretainedValue()
+
+ return publication.sid as CFString
+}
@@ -0,0 +1,172 @@
+use serde::Deserialize;
+use std::{
+ env,
+ path::{Path, PathBuf},
+ process::Command,
+};
+
+const SWIFT_PACKAGE_NAME: &str = "LiveKitBridge2";
+
+#[derive(Debug, Deserialize)]
+#[serde(rename_all = "camelCase")]
+pub struct SwiftTargetInfo {
+ pub triple: String,
+ pub unversioned_triple: String,
+ pub module_triple: String,
+ pub swift_runtime_compatibility_version: String,
+ #[serde(rename = "librariesRequireRPath")]
+ pub libraries_require_rpath: bool,
+}
+
+#[derive(Debug, Deserialize)]
+#[serde(rename_all = "camelCase")]
+pub struct SwiftPaths {
+ pub runtime_library_paths: Vec<String>,
+ pub runtime_library_import_paths: Vec<String>,
+ pub runtime_resource_path: String,
+}
+
+#[derive(Debug, Deserialize)]
+pub struct SwiftTarget {
+ pub target: SwiftTargetInfo,
+ pub paths: SwiftPaths,
+}
+
+const MACOS_TARGET_VERSION: &str = "10.15.7";
+
+fn main() {
+ if cfg!(not(any(test, feature = "test-support"))) {
+ let swift_target = get_swift_target();
+
+ build_bridge(&swift_target);
+ link_swift_stdlib(&swift_target);
+ link_webrtc_framework(&swift_target);
+
+ // Register exported Objective-C selectors, protocols, etc when building example binaries.
+ println!("cargo:rustc-link-arg=-Wl,-ObjC");
+ }
+}
+
+fn build_bridge(swift_target: &SwiftTarget) {
+ println!("cargo:rerun-if-env-changed=MACOSX_DEPLOYMENT_TARGET");
+ println!("cargo:rerun-if-changed={}/Sources", SWIFT_PACKAGE_NAME);
+ println!(
+ "cargo:rerun-if-changed={}/Package.swift",
+ SWIFT_PACKAGE_NAME
+ );
+ println!(
+ "cargo:rerun-if-changed={}/Package.resolved",
+ SWIFT_PACKAGE_NAME
+ );
+
+ let swift_package_root = swift_package_root();
+ let swift_target_folder = swift_target_folder();
+ if !Command::new("swift")
+ .arg("build")
+ .arg("--disable-automatic-resolution")
+ .args(["--configuration", &env::var("PROFILE").unwrap()])
+ .args(["--triple", &swift_target.target.triple])
+ .args(["--build-path".into(), swift_target_folder])
+ .current_dir(&swift_package_root)
+ .status()
+ .unwrap()
+ .success()
+ {
+ panic!(
+ "Failed to compile swift package in {}",
+ swift_package_root.display()
+ );
+ }
+
+ println!(
+ "cargo:rustc-link-search=native={}",
+ swift_target.out_dir_path().display()
+ );
+ println!("cargo:rustc-link-lib=static={}", SWIFT_PACKAGE_NAME);
+}
+
+fn link_swift_stdlib(swift_target: &SwiftTarget) {
+ for path in &swift_target.paths.runtime_library_paths {
+ println!("cargo:rustc-link-search=native={}", path);
+ }
+}
+
+fn link_webrtc_framework(swift_target: &SwiftTarget) {
+ let swift_out_dir_path = swift_target.out_dir_path();
+ println!("cargo:rustc-link-lib=framework=WebRTC");
+ println!(
+ "cargo:rustc-link-search=framework={}",
+ swift_out_dir_path.display()
+ );
+ // Find WebRTC.framework as a sibling of the executable when running tests.
+ println!("cargo:rustc-link-arg=-Wl,-rpath,@executable_path");
+ // Find WebRTC.framework in parent directory of the executable when running examples.
+ println!("cargo:rustc-link-arg=-Wl,-rpath,@executable_path/..");
+
+ let source_path = swift_out_dir_path.join("WebRTC.framework");
+ let deps_dir_path =
+ PathBuf::from(env::var("OUT_DIR").unwrap()).join("../../../deps/WebRTC.framework");
+ let target_dir_path =
+ PathBuf::from(env::var("OUT_DIR").unwrap()).join("../../../WebRTC.framework");
+ copy_dir(&source_path, &deps_dir_path);
+ copy_dir(&source_path, &target_dir_path);
+}
+
+fn get_swift_target() -> SwiftTarget {
+ let mut arch = env::var("CARGO_CFG_TARGET_ARCH").unwrap();
+ if arch == "aarch64" {
+ arch = "arm64".into();
+ }
+ let target = format!("{}-apple-macosx{}", arch, MACOS_TARGET_VERSION);
+
+ let swift_target_info_str = Command::new("swift")
+ .args(["-target", &target, "-print-target-info"])
+ .output()
+ .unwrap()
+ .stdout;
+
+ serde_json::from_slice(&swift_target_info_str).unwrap()
+}
+
+fn swift_package_root() -> PathBuf {
+ env::current_dir().unwrap().join(SWIFT_PACKAGE_NAME)
+}
+
+fn swift_target_folder() -> PathBuf {
+ env::current_dir()
+ .unwrap()
+ .join(format!("../../target/{SWIFT_PACKAGE_NAME}"))
+}
+
+fn copy_dir(source: &Path, destination: &Path) {
+ assert!(
+ Command::new("rm")
+ .arg("-rf")
+ .arg(destination)
+ .status()
+ .unwrap()
+ .success(),
+ "could not remove {:?} before copying",
+ destination
+ );
+
+ assert!(
+ Command::new("cp")
+ .arg("-R")
+ .args([source, destination])
+ .status()
+ .unwrap()
+ .success(),
+ "could not copy {:?} to {:?}",
+ source,
+ destination
+ );
+}
+
+impl SwiftTarget {
+ fn out_dir_path(&self) -> PathBuf {
+ swift_target_folder()
+ .join(&self.target.unversioned_triple)
+ .join(env::var("PROFILE").unwrap())
+ }
+}
@@ -0,0 +1,178 @@
+use std::{sync::Arc, time::Duration};
+
+use futures::StreamExt;
+use gpui2::KeyBinding;
+use live_kit_client2::{
+ LocalAudioTrack, LocalVideoTrack, RemoteAudioTrackUpdate, RemoteVideoTrackUpdate, Room,
+};
+use live_kit_server::token::{self, VideoGrant};
+use log::LevelFilter;
+use serde_derive::Deserialize;
+use simplelog::SimpleLogger;
+
+#[derive(Deserialize, Debug, Clone, Copy, PartialEq, Eq, Default)]
+struct Quit;
+
+fn main() {
+ SimpleLogger::init(LevelFilter::Info, Default::default()).expect("could not initialize logger");
+
+ gpui2::App::production(Arc::new(())).run(|cx| {
+ #[cfg(any(test, feature = "test-support"))]
+ println!("USING TEST LIVEKIT");
+
+ #[cfg(not(any(test, feature = "test-support")))]
+ println!("USING REAL LIVEKIT");
+
+ cx.activate(true);
+
+ cx.on_action(quit);
+ cx.bind_keys([KeyBinding::new("cmd-q", Quit, None)]);
+
+ // todo!()
+ // cx.set_menus(vec![Menu {
+ // name: "Zed",
+ // items: vec![MenuItem::Action {
+ // name: "Quit",
+ // action: Box::new(Quit),
+ // os_action: None,
+ // }],
+ // }]);
+
+ let live_kit_url = std::env::var("LIVE_KIT_URL").unwrap_or("http://localhost:7880".into());
+ let live_kit_key = std::env::var("LIVE_KIT_KEY").unwrap_or("devkey".into());
+ let live_kit_secret = std::env::var("LIVE_KIT_SECRET").unwrap_or("secret".into());
+
+ cx.spawn_on_main(|cx| async move {
+ let user_a_token = token::create(
+ &live_kit_key,
+ &live_kit_secret,
+ Some("test-participant-1"),
+ VideoGrant::to_join("test-room"),
+ )
+ .unwrap();
+ let room_a = Room::new();
+ room_a.connect(&live_kit_url, &user_a_token).await.unwrap();
+
+ let user2_token = token::create(
+ &live_kit_key,
+ &live_kit_secret,
+ Some("test-participant-2"),
+ VideoGrant::to_join("test-room"),
+ )
+ .unwrap();
+ let room_b = Room::new();
+ room_b.connect(&live_kit_url, &user2_token).await.unwrap();
+
+ let mut audio_track_updates = room_b.remote_audio_track_updates();
+ let audio_track = LocalAudioTrack::create();
+ let audio_track_publication = room_a.publish_audio_track(audio_track).await.unwrap();
+
+ if let RemoteAudioTrackUpdate::Subscribed(track, _) =
+ audio_track_updates.next().await.unwrap()
+ {
+ let remote_tracks = room_b.remote_audio_tracks("test-participant-1");
+ assert_eq!(remote_tracks.len(), 1);
+ assert_eq!(remote_tracks[0].publisher_id(), "test-participant-1");
+ assert_eq!(track.publisher_id(), "test-participant-1");
+ } else {
+ panic!("unexpected message");
+ }
+
+ audio_track_publication.set_mute(true).await.unwrap();
+
+ println!("waiting for mute changed!");
+ if let RemoteAudioTrackUpdate::MuteChanged { track_id, muted } =
+ audio_track_updates.next().await.unwrap()
+ {
+ let remote_tracks = room_b.remote_audio_tracks("test-participant-1");
+ assert_eq!(remote_tracks[0].sid(), track_id);
+ assert_eq!(muted, true);
+ } else {
+ panic!("unexpected message");
+ }
+
+ audio_track_publication.set_mute(false).await.unwrap();
+
+ if let RemoteAudioTrackUpdate::MuteChanged { track_id, muted } =
+ audio_track_updates.next().await.unwrap()
+ {
+ let remote_tracks = room_b.remote_audio_tracks("test-participant-1");
+ assert_eq!(remote_tracks[0].sid(), track_id);
+ assert_eq!(muted, false);
+ } else {
+ panic!("unexpected message");
+ }
+
+ println!("Pausing for 5 seconds to test audio, make some noise!");
+ let timer = cx.executor().timer(Duration::from_secs(5));
+ timer.await;
+ let remote_audio_track = room_b
+ .remote_audio_tracks("test-participant-1")
+ .pop()
+ .unwrap();
+ room_a.unpublish_track(audio_track_publication);
+
+ // Clear out any active speakers changed messages
+ let mut next = audio_track_updates.next().await.unwrap();
+ while let RemoteAudioTrackUpdate::ActiveSpeakersChanged { speakers } = next {
+ println!("Speakers changed: {:?}", speakers);
+ next = audio_track_updates.next().await.unwrap();
+ }
+
+ if let RemoteAudioTrackUpdate::Unsubscribed {
+ publisher_id,
+ track_id,
+ } = next
+ {
+ assert_eq!(publisher_id, "test-participant-1");
+ assert_eq!(remote_audio_track.sid(), track_id);
+ assert_eq!(room_b.remote_audio_tracks("test-participant-1").len(), 0);
+ } else {
+ panic!("unexpected message");
+ }
+
+ let mut video_track_updates = room_b.remote_video_track_updates();
+ let displays = room_a.display_sources().await.unwrap();
+ let display = displays.into_iter().next().unwrap();
+
+ let local_video_track = LocalVideoTrack::screen_share_for_display(&display);
+ let local_video_track_publication =
+ room_a.publish_video_track(local_video_track).await.unwrap();
+
+ if let RemoteVideoTrackUpdate::Subscribed(track) =
+ video_track_updates.next().await.unwrap()
+ {
+ let remote_video_tracks = room_b.remote_video_tracks("test-participant-1");
+ assert_eq!(remote_video_tracks.len(), 1);
+ assert_eq!(remote_video_tracks[0].publisher_id(), "test-participant-1");
+ assert_eq!(track.publisher_id(), "test-participant-1");
+ } else {
+ panic!("unexpected message");
+ }
+
+ let remote_video_track = room_b
+ .remote_video_tracks("test-participant-1")
+ .pop()
+ .unwrap();
+ room_a.unpublish_track(local_video_track_publication);
+ if let RemoteVideoTrackUpdate::Unsubscribed {
+ publisher_id,
+ track_id,
+ } = video_track_updates.next().await.unwrap()
+ {
+ assert_eq!(publisher_id, "test-participant-1");
+ assert_eq!(remote_video_track.sid(), track_id);
+ assert_eq!(room_b.remote_video_tracks("test-participant-1").len(), 0);
+ } else {
+ panic!("unexpected message");
+ }
+
+ cx.update(|cx| cx.quit()).ok();
+ })
+ .detach();
+ });
+}
+
+fn quit(_: &Quit, cx: &mut gpui2::AppContext) {
+ cx.quit();
+}
@@ -0,0 +1,11 @@
+#[cfg(not(any(test, feature = "test-support")))]
+pub mod prod;
+
+#[cfg(not(any(test, feature = "test-support")))]
+pub use prod::*;
+
+#[cfg(any(test, feature = "test-support"))]
+pub mod test;
+
+#[cfg(any(test, feature = "test-support"))]
+pub use test::*;
@@ -0,0 +1,947 @@
+use anyhow::{anyhow, Context, Result};
+use core_foundation::{
+ array::{CFArray, CFArrayRef},
+ base::{CFRelease, CFRetain, TCFType},
+ string::{CFString, CFStringRef},
+};
+use futures::{
+ channel::{mpsc, oneshot},
+ Future,
+};
+pub use media::core_video::CVImageBuffer;
+use media::core_video::CVImageBufferRef;
+use parking_lot::Mutex;
+use postage::watch;
+use std::{
+ ffi::c_void,
+ sync::{Arc, Weak},
+};
+
+// SAFETY: Most live kit types are threadsafe:
+// https://github.com/livekit/client-sdk-swift#thread-safety
+macro_rules! pointer_type {
+ ($pointer_name:ident) => {
+ #[repr(transparent)]
+ #[derive(Copy, Clone, Debug)]
+ pub struct $pointer_name(pub *const std::ffi::c_void);
+ unsafe impl Send for $pointer_name {}
+ };
+}
+
+mod swift {
+ pointer_type!(Room);
+ pointer_type!(LocalAudioTrack);
+ pointer_type!(RemoteAudioTrack);
+ pointer_type!(LocalVideoTrack);
+ pointer_type!(RemoteVideoTrack);
+ pointer_type!(LocalTrackPublication);
+ pointer_type!(RemoteTrackPublication);
+ pointer_type!(MacOSDisplay);
+ pointer_type!(RoomDelegate);
+}
+
+extern "C" {
+ fn LKRoomDelegateCreate(
+ callback_data: *mut c_void,
+ on_did_disconnect: extern "C" fn(callback_data: *mut c_void),
+ on_did_subscribe_to_remote_audio_track: extern "C" fn(
+ callback_data: *mut c_void,
+ publisher_id: CFStringRef,
+ track_id: CFStringRef,
+ remote_track: swift::RemoteAudioTrack,
+ remote_publication: swift::RemoteTrackPublication,
+ ),
+ on_did_unsubscribe_from_remote_audio_track: extern "C" fn(
+ callback_data: *mut c_void,
+ publisher_id: CFStringRef,
+ track_id: CFStringRef,
+ ),
+ on_mute_changed_from_remote_audio_track: extern "C" fn(
+ callback_data: *mut c_void,
+ track_id: CFStringRef,
+ muted: bool,
+ ),
+ on_active_speakers_changed: extern "C" fn(
+ callback_data: *mut c_void,
+ participants: CFArrayRef,
+ ),
+ on_did_subscribe_to_remote_video_track: extern "C" fn(
+ callback_data: *mut c_void,
+ publisher_id: CFStringRef,
+ track_id: CFStringRef,
+ remote_track: swift::RemoteVideoTrack,
+ ),
+ on_did_unsubscribe_from_remote_video_track: extern "C" fn(
+ callback_data: *mut c_void,
+ publisher_id: CFStringRef,
+ track_id: CFStringRef,
+ ),
+ ) -> swift::RoomDelegate;
+
+ fn LKRoomCreate(delegate: swift::RoomDelegate) -> swift::Room;
+ fn LKRoomConnect(
+ room: swift::Room,
+ url: CFStringRef,
+ token: CFStringRef,
+ callback: extern "C" fn(*mut c_void, CFStringRef),
+ callback_data: *mut c_void,
+ );
+ fn LKRoomDisconnect(room: swift::Room);
+ fn LKRoomPublishVideoTrack(
+ room: swift::Room,
+ track: swift::LocalVideoTrack,
+ callback: extern "C" fn(*mut c_void, swift::LocalTrackPublication, CFStringRef),
+ callback_data: *mut c_void,
+ );
+ fn LKRoomPublishAudioTrack(
+ room: swift::Room,
+ track: swift::LocalAudioTrack,
+ callback: extern "C" fn(*mut c_void, swift::LocalTrackPublication, CFStringRef),
+ callback_data: *mut c_void,
+ );
+ fn LKRoomUnpublishTrack(room: swift::Room, publication: swift::LocalTrackPublication);
+
+ fn LKRoomAudioTracksForRemoteParticipant(
+ room: swift::Room,
+ participant_id: CFStringRef,
+ ) -> CFArrayRef;
+
+ fn LKRoomAudioTrackPublicationsForRemoteParticipant(
+ room: swift::Room,
+ participant_id: CFStringRef,
+ ) -> CFArrayRef;
+
+ fn LKRoomVideoTracksForRemoteParticipant(
+ room: swift::Room,
+ participant_id: CFStringRef,
+ ) -> CFArrayRef;
+
+ fn LKVideoRendererCreate(
+ callback_data: *mut c_void,
+ on_frame: extern "C" fn(callback_data: *mut c_void, frame: CVImageBufferRef) -> bool,
+ on_drop: extern "C" fn(callback_data: *mut c_void),
+ ) -> *const c_void;
+
+ fn LKRemoteAudioTrackGetSid(track: swift::RemoteAudioTrack) -> CFStringRef;
+ fn LKVideoTrackAddRenderer(track: swift::RemoteVideoTrack, renderer: *const c_void);
+ fn LKRemoteVideoTrackGetSid(track: swift::RemoteVideoTrack) -> CFStringRef;
+
+ fn LKDisplaySources(
+ callback_data: *mut c_void,
+ callback: extern "C" fn(
+ callback_data: *mut c_void,
+ sources: CFArrayRef,
+ error: CFStringRef,
+ ),
+ );
+ fn LKCreateScreenShareTrackForDisplay(display: swift::MacOSDisplay) -> swift::LocalVideoTrack;
+ fn LKLocalAudioTrackCreateTrack() -> swift::LocalAudioTrack;
+
+ fn LKLocalTrackPublicationSetMute(
+ publication: swift::LocalTrackPublication,
+ muted: bool,
+ on_complete: extern "C" fn(callback_data: *mut c_void, error: CFStringRef),
+ callback_data: *mut c_void,
+ );
+
+ fn LKRemoteTrackPublicationSetEnabled(
+ publication: swift::RemoteTrackPublication,
+ enabled: bool,
+ on_complete: extern "C" fn(callback_data: *mut c_void, error: CFStringRef),
+ callback_data: *mut c_void,
+ );
+
+ fn LKRemoteTrackPublicationIsMuted(publication: swift::RemoteTrackPublication) -> bool;
+ fn LKRemoteTrackPublicationGetSid(publication: swift::RemoteTrackPublication) -> CFStringRef;
+}
+
+pub type Sid = String;
+
+#[derive(Clone, Eq, PartialEq)]
+pub enum ConnectionState {
+ Disconnected,
+ Connected { url: String, token: String },
+}
+
+pub struct Room {
+ native_room: Mutex<swift::Room>,
+ connection: Mutex<(
+ watch::Sender<ConnectionState>,
+ watch::Receiver<ConnectionState>,
+ )>,
+ remote_audio_track_subscribers: Mutex<Vec<mpsc::UnboundedSender<RemoteAudioTrackUpdate>>>,
+ remote_video_track_subscribers: Mutex<Vec<mpsc::UnboundedSender<RemoteVideoTrackUpdate>>>,
+ _delegate: Mutex<RoomDelegate>,
+}
+
+trait AssertSendSync: Send {}
+impl AssertSendSync for Room {}
+
+impl Room {
+ pub fn new() -> Arc<Self> {
+ Arc::new_cyclic(|weak_room| {
+ let delegate = RoomDelegate::new(weak_room.clone());
+ Self {
+ native_room: Mutex::new(unsafe { LKRoomCreate(delegate.native_delegate) }),
+ connection: Mutex::new(watch::channel_with(ConnectionState::Disconnected)),
+ remote_audio_track_subscribers: Default::default(),
+ remote_video_track_subscribers: Default::default(),
+ _delegate: Mutex::new(delegate),
+ }
+ })
+ }
+
+ pub fn status(&self) -> watch::Receiver<ConnectionState> {
+ self.connection.lock().1.clone()
+ }
+
+ pub fn connect(self: &Arc<Self>, url: &str, token: &str) -> impl Future<Output = Result<()>> {
+ let url = CFString::new(url);
+ let token = CFString::new(token);
+ let (did_connect, tx, rx) = Self::build_done_callback();
+ unsafe {
+ LKRoomConnect(
+ *self.native_room.lock(),
+ url.as_concrete_TypeRef(),
+ token.as_concrete_TypeRef(),
+ did_connect,
+ tx,
+ )
+ }
+
+ let this = self.clone();
+ let url = url.to_string();
+ let token = token.to_string();
+ async move {
+ rx.await.unwrap().context("error connecting to room")?;
+ *this.connection.lock().0.borrow_mut() = ConnectionState::Connected { url, token };
+ Ok(())
+ }
+ }
+
+ fn did_disconnect(&self) {
+ *self.connection.lock().0.borrow_mut() = ConnectionState::Disconnected;
+ }
+
+ pub fn display_sources(self: &Arc<Self>) -> impl Future<Output = Result<Vec<MacOSDisplay>>> {
+ extern "C" fn callback(tx: *mut c_void, sources: CFArrayRef, error: CFStringRef) {
+ unsafe {
+ let tx = Box::from_raw(tx as *mut oneshot::Sender<Result<Vec<MacOSDisplay>>>);
+
+ if sources.is_null() {
+ let _ = tx.send(Err(anyhow!("{}", CFString::wrap_under_get_rule(error))));
+ } else {
+ let sources = CFArray::wrap_under_get_rule(sources)
+ .into_iter()
+ .map(|source| MacOSDisplay::new(swift::MacOSDisplay(*source)))
+ .collect();
+
+ let _ = tx.send(Ok(sources));
+ }
+ }
+ }
+
+ let (tx, rx) = oneshot::channel();
+
+ unsafe {
+ LKDisplaySources(Box::into_raw(Box::new(tx)) as *mut _, callback);
+ }
+
+ async move { rx.await.unwrap() }
+ }
+
+ pub fn publish_video_track(
+ self: &Arc<Self>,
+ track: LocalVideoTrack,
+ ) -> impl Future<Output = Result<LocalTrackPublication>> {
+ let (tx, rx) = oneshot::channel::<Result<LocalTrackPublication>>();
+ extern "C" fn callback(
+ tx: *mut c_void,
+ publication: swift::LocalTrackPublication,
+ error: CFStringRef,
+ ) {
+ let tx =
+ unsafe { Box::from_raw(tx as *mut oneshot::Sender<Result<LocalTrackPublication>>) };
+ if error.is_null() {
+ let _ = tx.send(Ok(LocalTrackPublication::new(publication)));
+ } else {
+ let error = unsafe { CFString::wrap_under_get_rule(error).to_string() };
+ let _ = tx.send(Err(anyhow!(error)));
+ }
+ }
+ unsafe {
+ LKRoomPublishVideoTrack(
+ *self.native_room.lock(),
+ track.0,
+ callback,
+ Box::into_raw(Box::new(tx)) as *mut c_void,
+ );
+ }
+ async { rx.await.unwrap().context("error publishing video track") }
+ }
+
+ pub fn publish_audio_track(
+ self: &Arc<Self>,
+ track: LocalAudioTrack,
+ ) -> impl Future<Output = Result<LocalTrackPublication>> {
+ let (tx, rx) = oneshot::channel::<Result<LocalTrackPublication>>();
+ extern "C" fn callback(
+ tx: *mut c_void,
+ publication: swift::LocalTrackPublication,
+ error: CFStringRef,
+ ) {
+ let tx =
+ unsafe { Box::from_raw(tx as *mut oneshot::Sender<Result<LocalTrackPublication>>) };
+ if error.is_null() {
+ let _ = tx.send(Ok(LocalTrackPublication::new(publication)));
+ } else {
+ let error = unsafe { CFString::wrap_under_get_rule(error).to_string() };
+ let _ = tx.send(Err(anyhow!(error)));
+ }
+ }
+ unsafe {
+ LKRoomPublishAudioTrack(
+ *self.native_room.lock(),
+ track.0,
+ callback,
+ Box::into_raw(Box::new(tx)) as *mut c_void,
+ );
+ }
+ async { rx.await.unwrap().context("error publishing audio track") }
+ }
+
+ pub fn unpublish_track(&self, publication: LocalTrackPublication) {
+ unsafe {
+ LKRoomUnpublishTrack(*self.native_room.lock(), publication.0);
+ }
+ }
+
+ pub fn remote_video_tracks(&self, participant_id: &str) -> Vec<Arc<RemoteVideoTrack>> {
+ unsafe {
+ let tracks = LKRoomVideoTracksForRemoteParticipant(
+ *self.native_room.lock(),
+ CFString::new(participant_id).as_concrete_TypeRef(),
+ );
+
+ if tracks.is_null() {
+ Vec::new()
+ } else {
+ let tracks = CFArray::wrap_under_get_rule(tracks);
+ tracks
+ .into_iter()
+ .map(|native_track| {
+ let native_track = swift::RemoteVideoTrack(*native_track);
+ let id =
+ CFString::wrap_under_get_rule(LKRemoteVideoTrackGetSid(native_track))
+ .to_string();
+ Arc::new(RemoteVideoTrack::new(
+ native_track,
+ id,
+ participant_id.into(),
+ ))
+ })
+ .collect()
+ }
+ }
+ }
+
+ pub fn remote_audio_tracks(&self, participant_id: &str) -> Vec<Arc<RemoteAudioTrack>> {
+ unsafe {
+ let tracks = LKRoomAudioTracksForRemoteParticipant(
+ *self.native_room.lock(),
+ CFString::new(participant_id).as_concrete_TypeRef(),
+ );
+
+ if tracks.is_null() {
+ Vec::new()
+ } else {
+ let tracks = CFArray::wrap_under_get_rule(tracks);
+ tracks
+ .into_iter()
+ .map(|native_track| {
+ let native_track = swift::RemoteAudioTrack(*native_track);
+ let id =
+ CFString::wrap_under_get_rule(LKRemoteAudioTrackGetSid(native_track))
+ .to_string();
+ Arc::new(RemoteAudioTrack::new(
+ native_track,
+ id,
+ participant_id.into(),
+ ))
+ })
+ .collect()
+ }
+ }
+ }
+
+ pub fn remote_audio_track_publications(
+ &self,
+ participant_id: &str,
+ ) -> Vec<Arc<RemoteTrackPublication>> {
+ unsafe {
+ let tracks = LKRoomAudioTrackPublicationsForRemoteParticipant(
+ *self.native_room.lock(),
+ CFString::new(participant_id).as_concrete_TypeRef(),
+ );
+
+ if tracks.is_null() {
+ Vec::new()
+ } else {
+ let tracks = CFArray::wrap_under_get_rule(tracks);
+ tracks
+ .into_iter()
+ .map(|native_track_publication| {
+ let native_track_publication =
+ swift::RemoteTrackPublication(*native_track_publication);
+ Arc::new(RemoteTrackPublication::new(native_track_publication))
+ })
+ .collect()
+ }
+ }
+ }
+
+ pub fn remote_audio_track_updates(&self) -> mpsc::UnboundedReceiver<RemoteAudioTrackUpdate> {
+ let (tx, rx) = mpsc::unbounded();
+ self.remote_audio_track_subscribers.lock().push(tx);
+ rx
+ }
+
+ pub fn remote_video_track_updates(&self) -> mpsc::UnboundedReceiver<RemoteVideoTrackUpdate> {
+ let (tx, rx) = mpsc::unbounded();
+ self.remote_video_track_subscribers.lock().push(tx);
+ rx
+ }
+
+ fn did_subscribe_to_remote_audio_track(
+ &self,
+ track: RemoteAudioTrack,
+ publication: RemoteTrackPublication,
+ ) {
+ let track = Arc::new(track);
+ let publication = Arc::new(publication);
+ self.remote_audio_track_subscribers.lock().retain(|tx| {
+ tx.unbounded_send(RemoteAudioTrackUpdate::Subscribed(
+ track.clone(),
+ publication.clone(),
+ ))
+ .is_ok()
+ });
+ }
+
+ fn did_unsubscribe_from_remote_audio_track(&self, publisher_id: String, track_id: String) {
+ self.remote_audio_track_subscribers.lock().retain(|tx| {
+ tx.unbounded_send(RemoteAudioTrackUpdate::Unsubscribed {
+ publisher_id: publisher_id.clone(),
+ track_id: track_id.clone(),
+ })
+ .is_ok()
+ });
+ }
+
+ fn mute_changed_from_remote_audio_track(&self, track_id: String, muted: bool) {
+ self.remote_audio_track_subscribers.lock().retain(|tx| {
+ tx.unbounded_send(RemoteAudioTrackUpdate::MuteChanged {
+ track_id: track_id.clone(),
+ muted,
+ })
+ .is_ok()
+ });
+ }
+
+ // A vec of publisher IDs
+ fn active_speakers_changed(&self, speakers: Vec<String>) {
+ self.remote_audio_track_subscribers
+ .lock()
+ .retain(move |tx| {
+ tx.unbounded_send(RemoteAudioTrackUpdate::ActiveSpeakersChanged {
+ speakers: speakers.clone(),
+ })
+ .is_ok()
+ });
+ }
+
+ fn did_subscribe_to_remote_video_track(&self, track: RemoteVideoTrack) {
+ let track = Arc::new(track);
+ self.remote_video_track_subscribers.lock().retain(|tx| {
+ tx.unbounded_send(RemoteVideoTrackUpdate::Subscribed(track.clone()))
+ .is_ok()
+ });
+ }
+
+ fn did_unsubscribe_from_remote_video_track(&self, publisher_id: String, track_id: String) {
+ self.remote_video_track_subscribers.lock().retain(|tx| {
+ tx.unbounded_send(RemoteVideoTrackUpdate::Unsubscribed {
+ publisher_id: publisher_id.clone(),
+ track_id: track_id.clone(),
+ })
+ .is_ok()
+ });
+ }
+
+ fn build_done_callback() -> (
+ extern "C" fn(*mut c_void, CFStringRef),
+ *mut c_void,
+ oneshot::Receiver<Result<()>>,
+ ) {
+ let (tx, rx) = oneshot::channel();
+ extern "C" fn done_callback(tx: *mut c_void, error: CFStringRef) {
+ let tx = unsafe { Box::from_raw(tx as *mut oneshot::Sender<Result<()>>) };
+ if error.is_null() {
+ let _ = tx.send(Ok(()));
+ } else {
+ let error = unsafe { CFString::wrap_under_get_rule(error).to_string() };
+ let _ = tx.send(Err(anyhow!(error)));
+ }
+ }
+ (
+ done_callback,
+ Box::into_raw(Box::new(tx)) as *mut c_void,
+ rx,
+ )
+ }
+
+ pub fn set_display_sources(&self, _: Vec<MacOSDisplay>) {
+ unreachable!("This is a test-only function")
+ }
+}
+
+impl Drop for Room {
+ fn drop(&mut self) {
+ unsafe {
+ let native_room = &*self.native_room.lock();
+ LKRoomDisconnect(*native_room);
+ CFRelease(native_room.0);
+ }
+ }
+}
+
+struct RoomDelegate {
+ native_delegate: swift::RoomDelegate,
+ _weak_room: Weak<Room>,
+}
+
+impl RoomDelegate {
+ fn new(weak_room: Weak<Room>) -> Self {
+ let native_delegate = unsafe {
+ LKRoomDelegateCreate(
+ weak_room.as_ptr() as *mut c_void,
+ Self::on_did_disconnect,
+ Self::on_did_subscribe_to_remote_audio_track,
+ Self::on_did_unsubscribe_from_remote_audio_track,
+ Self::on_mute_change_from_remote_audio_track,
+ Self::on_active_speakers_changed,
+ Self::on_did_subscribe_to_remote_video_track,
+ Self::on_did_unsubscribe_from_remote_video_track,
+ )
+ };
+ Self {
+ native_delegate,
+ _weak_room: weak_room,
+ }
+ }
+
+ extern "C" fn on_did_disconnect(room: *mut c_void) {
+ let room = unsafe { Weak::from_raw(room as *mut Room) };
+ if let Some(room) = room.upgrade() {
+ room.did_disconnect();
+ }
+ let _ = Weak::into_raw(room);
+ }
+
+ extern "C" fn on_did_subscribe_to_remote_audio_track(
+ room: *mut c_void,
+ publisher_id: CFStringRef,
+ track_id: CFStringRef,
+ track: swift::RemoteAudioTrack,
+ publication: swift::RemoteTrackPublication,
+ ) {
+ let room = unsafe { Weak::from_raw(room as *mut Room) };
+ let publisher_id = unsafe { CFString::wrap_under_get_rule(publisher_id).to_string() };
+ let track_id = unsafe { CFString::wrap_under_get_rule(track_id).to_string() };
+ let track = RemoteAudioTrack::new(track, track_id, publisher_id);
+ let publication = RemoteTrackPublication::new(publication);
+ if let Some(room) = room.upgrade() {
+ room.did_subscribe_to_remote_audio_track(track, publication);
+ }
+ let _ = Weak::into_raw(room);
+ }
+
+ extern "C" fn on_did_unsubscribe_from_remote_audio_track(
+ room: *mut c_void,
+ publisher_id: CFStringRef,
+ track_id: CFStringRef,
+ ) {
+ let room = unsafe { Weak::from_raw(room as *mut Room) };
+ let publisher_id = unsafe { CFString::wrap_under_get_rule(publisher_id).to_string() };
+ let track_id = unsafe { CFString::wrap_under_get_rule(track_id).to_string() };
+ if let Some(room) = room.upgrade() {
+ room.did_unsubscribe_from_remote_audio_track(publisher_id, track_id);
+ }
+ let _ = Weak::into_raw(room);
+ }
+
+ extern "C" fn on_mute_change_from_remote_audio_track(
+ room: *mut c_void,
+ track_id: CFStringRef,
+ muted: bool,
+ ) {
+ let room = unsafe { Weak::from_raw(room as *mut Room) };
+ let track_id = unsafe { CFString::wrap_under_get_rule(track_id).to_string() };
+ if let Some(room) = room.upgrade() {
+ room.mute_changed_from_remote_audio_track(track_id, muted);
+ }
+ let _ = Weak::into_raw(room);
+ }
+
+ extern "C" fn on_active_speakers_changed(room: *mut c_void, participants: CFArrayRef) {
+ if participants.is_null() {
+ return;
+ }
+
+ let room = unsafe { Weak::from_raw(room as *mut Room) };
+ let speakers = unsafe {
+ CFArray::wrap_under_get_rule(participants)
+ .into_iter()
+ .map(
+ |speaker: core_foundation::base::ItemRef<'_, *const c_void>| {
+ CFString::wrap_under_get_rule(*speaker as CFStringRef).to_string()
+ },
+ )
+ .collect()
+ };
+
+ if let Some(room) = room.upgrade() {
+ room.active_speakers_changed(speakers);
+ }
+ let _ = Weak::into_raw(room);
+ }
+
+ extern "C" fn on_did_subscribe_to_remote_video_track(
+ room: *mut c_void,
+ publisher_id: CFStringRef,
+ track_id: CFStringRef,
+ track: swift::RemoteVideoTrack,
+ ) {
+ let room = unsafe { Weak::from_raw(room as *mut Room) };
+ let publisher_id = unsafe { CFString::wrap_under_get_rule(publisher_id).to_string() };
+ let track_id = unsafe { CFString::wrap_under_get_rule(track_id).to_string() };
+ let track = RemoteVideoTrack::new(track, track_id, publisher_id);
+ if let Some(room) = room.upgrade() {
+ room.did_subscribe_to_remote_video_track(track);
+ }
+ let _ = Weak::into_raw(room);
+ }
+
+ extern "C" fn on_did_unsubscribe_from_remote_video_track(
+ room: *mut c_void,
+ publisher_id: CFStringRef,
+ track_id: CFStringRef,
+ ) {
+ let room = unsafe { Weak::from_raw(room as *mut Room) };
+ let publisher_id = unsafe { CFString::wrap_under_get_rule(publisher_id).to_string() };
+ let track_id = unsafe { CFString::wrap_under_get_rule(track_id).to_string() };
+ if let Some(room) = room.upgrade() {
+ room.did_unsubscribe_from_remote_video_track(publisher_id, track_id);
+ }
+ let _ = Weak::into_raw(room);
+ }
+}
+
+impl Drop for RoomDelegate {
+ fn drop(&mut self) {
+ unsafe {
+ CFRelease(self.native_delegate.0);
+ }
+ }
+}
+
+pub struct LocalAudioTrack(swift::LocalAudioTrack);
+
+impl LocalAudioTrack {
+ pub fn create() -> Self {
+ Self(unsafe { LKLocalAudioTrackCreateTrack() })
+ }
+}
+
+impl Drop for LocalAudioTrack {
+ fn drop(&mut self) {
+ unsafe { CFRelease(self.0 .0) }
+ }
+}
+
+pub struct LocalVideoTrack(swift::LocalVideoTrack);
+
+impl LocalVideoTrack {
+ pub fn screen_share_for_display(display: &MacOSDisplay) -> Self {
+ Self(unsafe { LKCreateScreenShareTrackForDisplay(display.0) })
+ }
+}
+
+impl Drop for LocalVideoTrack {
+ fn drop(&mut self) {
+ unsafe { CFRelease(self.0 .0) }
+ }
+}
+
+pub struct LocalTrackPublication(swift::LocalTrackPublication);
+
+impl LocalTrackPublication {
+ pub fn new(native_track_publication: swift::LocalTrackPublication) -> Self {
+ unsafe {
+ CFRetain(native_track_publication.0);
+ }
+ Self(native_track_publication)
+ }
+
+ pub fn set_mute(&self, muted: bool) -> impl Future<Output = Result<()>> {
+ let (tx, rx) = futures::channel::oneshot::channel();
+
+ extern "C" fn complete_callback(callback_data: *mut c_void, error: CFStringRef) {
+ let tx = unsafe { Box::from_raw(callback_data as *mut oneshot::Sender<Result<()>>) };
+ if error.is_null() {
+ tx.send(Ok(())).ok();
+ } else {
+ let error = unsafe { CFString::wrap_under_get_rule(error).to_string() };
+ tx.send(Err(anyhow!(error))).ok();
+ }
+ }
+
+ unsafe {
+ LKLocalTrackPublicationSetMute(
+ self.0,
+ muted,
+ complete_callback,
+ Box::into_raw(Box::new(tx)) as *mut c_void,
+ )
+ }
+
+ async move { rx.await.unwrap() }
+ }
+}
+
+impl Drop for LocalTrackPublication {
+ fn drop(&mut self) {
+ unsafe { CFRelease(self.0 .0) }
+ }
+}
+
+pub struct RemoteTrackPublication {
+ native_publication: Mutex<swift::RemoteTrackPublication>,
+}
+
+impl RemoteTrackPublication {
+ pub fn new(native_track_publication: swift::RemoteTrackPublication) -> Self {
+ unsafe {
+ CFRetain(native_track_publication.0);
+ }
+ Self {
+ native_publication: Mutex::new(native_track_publication),
+ }
+ }
+
+ pub fn sid(&self) -> String {
+ unsafe {
+ CFString::wrap_under_get_rule(LKRemoteTrackPublicationGetSid(
+ *self.native_publication.lock(),
+ ))
+ .to_string()
+ }
+ }
+
+ pub fn is_muted(&self) -> bool {
+ unsafe { LKRemoteTrackPublicationIsMuted(*self.native_publication.lock()) }
+ }
+
+ pub fn set_enabled(&self, enabled: bool) -> impl Future<Output = Result<()>> {
+ let (tx, rx) = futures::channel::oneshot::channel();
+
+ extern "C" fn complete_callback(callback_data: *mut c_void, error: CFStringRef) {
+ let tx = unsafe { Box::from_raw(callback_data as *mut oneshot::Sender<Result<()>>) };
+ if error.is_null() {
+ tx.send(Ok(())).ok();
+ } else {
+ let error = unsafe { CFString::wrap_under_get_rule(error).to_string() };
+ tx.send(Err(anyhow!(error))).ok();
+ }
+ }
+
+ unsafe {
+ LKRemoteTrackPublicationSetEnabled(
+ *self.native_publication.lock(),
+ enabled,
+ complete_callback,
+ Box::into_raw(Box::new(tx)) as *mut c_void,
+ )
+ }
+
+ async move { rx.await.unwrap() }
+ }
+}
+
+impl Drop for RemoteTrackPublication {
+ fn drop(&mut self) {
+ unsafe { CFRelease((*self.native_publication.lock()).0) }
+ }
+}
+
+#[derive(Debug)]
+pub struct RemoteAudioTrack {
+ native_track: Mutex<swift::RemoteAudioTrack>,
+ sid: Sid,
+ publisher_id: String,
+}
+
+impl RemoteAudioTrack {
+ fn new(native_track: swift::RemoteAudioTrack, sid: Sid, publisher_id: String) -> Self {
+ unsafe {
+ CFRetain(native_track.0);
+ }
+ Self {
+ native_track: Mutex::new(native_track),
+ sid,
+ publisher_id,
+ }
+ }
+
+ pub fn sid(&self) -> &str {
+ &self.sid
+ }
+
+ pub fn publisher_id(&self) -> &str {
+ &self.publisher_id
+ }
+
+ pub fn enable(&self) -> impl Future<Output = Result<()>> {
+ async { Ok(()) }
+ }
+
+ pub fn disable(&self) -> impl Future<Output = Result<()>> {
+ async { Ok(()) }
+ }
+}
+
+impl Drop for RemoteAudioTrack {
+ fn drop(&mut self) {
+ unsafe { CFRelease(self.native_track.lock().0) }
+ }
+}
+
+#[derive(Debug)]
+pub struct RemoteVideoTrack {
+ native_track: Mutex<swift::RemoteVideoTrack>,
+ sid: Sid,
+ publisher_id: String,
+}
+
+impl RemoteVideoTrack {
+ fn new(native_track: swift::RemoteVideoTrack, sid: Sid, publisher_id: String) -> Self {
+ unsafe {
+ CFRetain(native_track.0);
+ }
+ Self {
+ native_track: Mutex::new(native_track),
+ sid,
+ publisher_id,
+ }
+ }
+
+ pub fn sid(&self) -> &str {
+ &self.sid
+ }
+
+ pub fn publisher_id(&self) -> &str {
+ &self.publisher_id
+ }
+
+ pub fn frames(&self) -> async_broadcast::Receiver<Frame> {
+ extern "C" fn on_frame(callback_data: *mut c_void, frame: CVImageBufferRef) -> bool {
+ unsafe {
+ let tx = Box::from_raw(callback_data as *mut async_broadcast::Sender<Frame>);
+ let buffer = CVImageBuffer::wrap_under_get_rule(frame);
+ let result = tx.try_broadcast(Frame(buffer));
+ let _ = Box::into_raw(tx);
+ match result {
+ Ok(_) => true,
+ Err(async_broadcast::TrySendError::Closed(_))
+ | Err(async_broadcast::TrySendError::Inactive(_)) => {
+ log::warn!("no active receiver for frame");
+ false
+ }
+ Err(async_broadcast::TrySendError::Full(_)) => {
+ log::warn!("skipping frame as receiver is not keeping up");
+ true
+ }
+ }
+ }
+ }
+
+ extern "C" fn on_drop(callback_data: *mut c_void) {
+ unsafe {
+ let _ = Box::from_raw(callback_data as *mut async_broadcast::Sender<Frame>);
+ }
+ }
+
+ let (tx, rx) = async_broadcast::broadcast(64);
+ unsafe {
+ let renderer = LKVideoRendererCreate(
+ Box::into_raw(Box::new(tx)) as *mut c_void,
+ on_frame,
+ on_drop,
+ );
+ LKVideoTrackAddRenderer(*self.native_track.lock(), renderer);
+ rx
+ }
+ }
+}
+
+impl Drop for RemoteVideoTrack {
+ fn drop(&mut self) {
+ unsafe { CFRelease(self.native_track.lock().0) }
+ }
+}
+
+pub enum RemoteVideoTrackUpdate {
+ Subscribed(Arc<RemoteVideoTrack>),
+ Unsubscribed { publisher_id: Sid, track_id: Sid },
+}
+
+pub enum RemoteAudioTrackUpdate {
+ ActiveSpeakersChanged { speakers: Vec<Sid> },
+ MuteChanged { track_id: Sid, muted: bool },
+ Subscribed(Arc<RemoteAudioTrack>, Arc<RemoteTrackPublication>),
+ Unsubscribed { publisher_id: Sid, track_id: Sid },
+}
+
+pub struct MacOSDisplay(swift::MacOSDisplay);
+
+impl MacOSDisplay {
+ fn new(ptr: swift::MacOSDisplay) -> Self {
+ unsafe {
+ CFRetain(ptr.0);
+ }
+ Self(ptr)
+ }
+}
+
+impl Drop for MacOSDisplay {
+ fn drop(&mut self) {
+ unsafe { CFRelease(self.0 .0) }
+ }
+}
+
+#[derive(Clone)]
+pub struct Frame(CVImageBuffer);
+
+impl Frame {
+ pub fn width(&self) -> usize {
+ self.0.width()
+ }
+
+ pub fn height(&self) -> usize {
+ self.0.height()
+ }
+
+ pub fn image(&self) -> CVImageBuffer {
+ self.0.clone()
+ }
+}
@@ -0,0 +1,651 @@
+use anyhow::{anyhow, Context, Result};
+use async_trait::async_trait;
+use collections::{BTreeMap, HashMap};
+use futures::Stream;
+use gpui2::Executor;
+use live_kit_server::token;
+use media::core_video::CVImageBuffer;
+use parking_lot::Mutex;
+use postage::watch;
+use std::{future::Future, mem, sync::Arc};
+
+static SERVERS: Mutex<BTreeMap<String, Arc<TestServer>>> = Mutex::new(BTreeMap::new());
+
+pub struct TestServer {
+ pub url: String,
+ pub api_key: String,
+ pub secret_key: String,
+ rooms: Mutex<HashMap<String, TestServerRoom>>,
+ executor: Arc<Executor>,
+}
+
+impl TestServer {
+ pub fn create(
+ url: String,
+ api_key: String,
+ secret_key: String,
+ executor: Arc<Executor>,
+ ) -> Result<Arc<TestServer>> {
+ let mut servers = SERVERS.lock();
+ if servers.contains_key(&url) {
+ Err(anyhow!("a server with url {:?} already exists", url))
+ } else {
+ let server = Arc::new(TestServer {
+ url: url.clone(),
+ api_key,
+ secret_key,
+ rooms: Default::default(),
+ executor,
+ });
+ servers.insert(url, server.clone());
+ Ok(server)
+ }
+ }
+
+ fn get(url: &str) -> Result<Arc<TestServer>> {
+ Ok(SERVERS
+ .lock()
+ .get(url)
+ .ok_or_else(|| anyhow!("no server found for url"))?
+ .clone())
+ }
+
+ pub fn teardown(&self) -> Result<()> {
+ SERVERS
+ .lock()
+ .remove(&self.url)
+ .ok_or_else(|| anyhow!("server with url {:?} does not exist", self.url))?;
+ Ok(())
+ }
+
+ pub fn create_api_client(&self) -> TestApiClient {
+ TestApiClient {
+ url: self.url.clone(),
+ }
+ }
+
+ pub async fn create_room(&self, room: String) -> Result<()> {
+ self.executor.simulate_random_delay().await;
+ let mut server_rooms = self.rooms.lock();
+ if server_rooms.contains_key(&room) {
+ Err(anyhow!("room {:?} already exists", room))
+ } else {
+ server_rooms.insert(room, Default::default());
+ Ok(())
+ }
+ }
+
+ async fn delete_room(&self, room: String) -> Result<()> {
+ // TODO: clear state associated with all `Room`s.
+ self.executor.simulate_random_delay().await;
+ let mut server_rooms = self.rooms.lock();
+ server_rooms
+ .remove(&room)
+ .ok_or_else(|| anyhow!("room {:?} does not exist", room))?;
+ Ok(())
+ }
+
+ async fn join_room(&self, token: String, client_room: Arc<Room>) -> Result<()> {
+ self.executor.simulate_random_delay().await;
+ let claims = live_kit_server::token::validate(&token, &self.secret_key)?;
+ let identity = claims.sub.unwrap().to_string();
+ let room_name = claims.video.room.unwrap();
+ let mut server_rooms = self.rooms.lock();
+ let room = (*server_rooms).entry(room_name.to_string()).or_default();
+
+ if room.client_rooms.contains_key(&identity) {
+ Err(anyhow!(
+ "{:?} attempted to join room {:?} twice",
+ identity,
+ room_name
+ ))
+ } else {
+ for track in &room.video_tracks {
+ client_room
+ .0
+ .lock()
+ .video_track_updates
+ .0
+ .try_broadcast(RemoteVideoTrackUpdate::Subscribed(track.clone()))
+ .unwrap();
+ }
+ room.client_rooms.insert(identity, client_room);
+ Ok(())
+ }
+ }
+
+ async fn leave_room(&self, token: String) -> Result<()> {
+ self.executor.simulate_random_delay().await;
+ let claims = live_kit_server::token::validate(&token, &self.secret_key)?;
+ let identity = claims.sub.unwrap().to_string();
+ let room_name = claims.video.room.unwrap();
+ let mut server_rooms = self.rooms.lock();
+ let room = server_rooms
+ .get_mut(&*room_name)
+ .ok_or_else(|| anyhow!("room {} does not exist", room_name))?;
+ room.client_rooms.remove(&identity).ok_or_else(|| {
+ anyhow!(
+ "{:?} attempted to leave room {:?} before joining it",
+ identity,
+ room_name
+ )
+ })?;
+ Ok(())
+ }
+
+ async fn remove_participant(&self, room_name: String, identity: String) -> Result<()> {
+ // TODO: clear state associated with the `Room`.
+
+ self.executor.simulate_random_delay().await;
+ let mut server_rooms = self.rooms.lock();
+ let room = server_rooms
+ .get_mut(&room_name)
+ .ok_or_else(|| anyhow!("room {} does not exist", room_name))?;
+ room.client_rooms.remove(&identity).ok_or_else(|| {
+ anyhow!(
+ "participant {:?} did not join room {:?}",
+ identity,
+ room_name
+ )
+ })?;
+ Ok(())
+ }
+
+ pub async fn disconnect_client(&self, client_identity: String) {
+ self.executor.simulate_random_delay().await;
+ let mut server_rooms = self.rooms.lock();
+ for room in server_rooms.values_mut() {
+ if let Some(room) = room.client_rooms.remove(&client_identity) {
+ *room.0.lock().connection.0.borrow_mut() = ConnectionState::Disconnected;
+ }
+ }
+ }
+
+ async fn publish_video_track(&self, token: String, local_track: LocalVideoTrack) -> Result<()> {
+ self.executor.simulate_random_delay().await;
+ let claims = live_kit_server::token::validate(&token, &self.secret_key)?;
+ let identity = claims.sub.unwrap().to_string();
+ let room_name = claims.video.room.unwrap();
+
+ let mut server_rooms = self.rooms.lock();
+ let room = server_rooms
+ .get_mut(&*room_name)
+ .ok_or_else(|| anyhow!("room {} does not exist", room_name))?;
+
+ let track = Arc::new(RemoteVideoTrack {
+ sid: nanoid::nanoid!(17),
+ publisher_id: identity.clone(),
+ frames_rx: local_track.frames_rx.clone(),
+ });
+
+ room.video_tracks.push(track.clone());
+
+ for (id, client_room) in &room.client_rooms {
+ if *id != identity {
+ let _ = client_room
+ .0
+ .lock()
+ .video_track_updates
+ .0
+ .try_broadcast(RemoteVideoTrackUpdate::Subscribed(track.clone()))
+ .unwrap();
+ }
+ }
+
+ Ok(())
+ }
+
+ async fn publish_audio_track(
+ &self,
+ token: String,
+ _local_track: &LocalAudioTrack,
+ ) -> Result<()> {
+ self.executor.simulate_random_delay().await;
+ let claims = live_kit_server::token::validate(&token, &self.secret_key)?;
+ let identity = claims.sub.unwrap().to_string();
+ let room_name = claims.video.room.unwrap();
+
+ let mut server_rooms = self.rooms.lock();
+ let room = server_rooms
+ .get_mut(&*room_name)
+ .ok_or_else(|| anyhow!("room {} does not exist", room_name))?;
+
+ let track = Arc::new(RemoteAudioTrack {
+ sid: nanoid::nanoid!(17),
+ publisher_id: identity.clone(),
+ });
+
+ let publication = Arc::new(RemoteTrackPublication);
+
+ room.audio_tracks.push(track.clone());
+
+ for (id, client_room) in &room.client_rooms {
+ if *id != identity {
+ let _ = client_room
+ .0
+ .lock()
+ .audio_track_updates
+ .0
+ .try_broadcast(RemoteAudioTrackUpdate::Subscribed(
+ track.clone(),
+ publication.clone(),
+ ))
+ .unwrap();
+ }
+ }
+
+ Ok(())
+ }
+
+ fn video_tracks(&self, token: String) -> Result<Vec<Arc<RemoteVideoTrack>>> {
+ let claims = live_kit_server::token::validate(&token, &self.secret_key)?;
+ let room_name = claims.video.room.unwrap();
+
+ let mut server_rooms = self.rooms.lock();
+ let room = server_rooms
+ .get_mut(&*room_name)
+ .ok_or_else(|| anyhow!("room {} does not exist", room_name))?;
+ Ok(room.video_tracks.clone())
+ }
+
+ fn audio_tracks(&self, token: String) -> Result<Vec<Arc<RemoteAudioTrack>>> {
+ let claims = live_kit_server::token::validate(&token, &self.secret_key)?;
+ let room_name = claims.video.room.unwrap();
+
+ let mut server_rooms = self.rooms.lock();
+ let room = server_rooms
+ .get_mut(&*room_name)
+ .ok_or_else(|| anyhow!("room {} does not exist", room_name))?;
+ Ok(room.audio_tracks.clone())
+ }
+}
+
+#[derive(Default)]
+struct TestServerRoom {
+ client_rooms: HashMap<Sid, Arc<Room>>,
+ video_tracks: Vec<Arc<RemoteVideoTrack>>,
+ audio_tracks: Vec<Arc<RemoteAudioTrack>>,
+}
+
+impl TestServerRoom {}
+
+pub struct TestApiClient {
+ url: String,
+}
+
+#[async_trait]
+impl live_kit_server::api::Client for TestApiClient {
+ fn url(&self) -> &str {
+ &self.url
+ }
+
+ async fn create_room(&self, name: String) -> Result<()> {
+ let server = TestServer::get(&self.url)?;
+ server.create_room(name).await?;
+ Ok(())
+ }
+
+ async fn delete_room(&self, name: String) -> Result<()> {
+ let server = TestServer::get(&self.url)?;
+ server.delete_room(name).await?;
+ Ok(())
+ }
+
+ async fn remove_participant(&self, room: String, identity: String) -> Result<()> {
+ let server = TestServer::get(&self.url)?;
+ server.remove_participant(room, identity).await?;
+ Ok(())
+ }
+
+ fn room_token(&self, room: &str, identity: &str) -> Result<String> {
+ let server = TestServer::get(&self.url)?;
+ token::create(
+ &server.api_key,
+ &server.secret_key,
+ Some(identity),
+ token::VideoGrant::to_join(room),
+ )
+ }
+
+ fn guest_token(&self, room: &str, identity: &str) -> Result<String> {
+ let server = TestServer::get(&self.url)?;
+ token::create(
+ &server.api_key,
+ &server.secret_key,
+ Some(identity),
+ token::VideoGrant::for_guest(room),
+ )
+ }
+}
+
+pub type Sid = String;
+
+struct RoomState {
+ connection: (
+ watch::Sender<ConnectionState>,
+ watch::Receiver<ConnectionState>,
+ ),
+ display_sources: Vec<MacOSDisplay>,
+ audio_track_updates: (
+ async_broadcast::Sender<RemoteAudioTrackUpdate>,
+ async_broadcast::Receiver<RemoteAudioTrackUpdate>,
+ ),
+ video_track_updates: (
+ async_broadcast::Sender<RemoteVideoTrackUpdate>,
+ async_broadcast::Receiver<RemoteVideoTrackUpdate>,
+ ),
+}
+
+#[derive(Clone, Eq, PartialEq)]
+pub enum ConnectionState {
+ Disconnected,
+ Connected { url: String, token: String },
+}
+
+pub struct Room(Mutex<RoomState>);
+
+impl Room {
+ pub fn new() -> Arc<Self> {
+ Arc::new(Self(Mutex::new(RoomState {
+ connection: watch::channel_with(ConnectionState::Disconnected),
+ display_sources: Default::default(),
+ video_track_updates: async_broadcast::broadcast(128),
+ audio_track_updates: async_broadcast::broadcast(128),
+ })))
+ }
+
+ pub fn status(&self) -> watch::Receiver<ConnectionState> {
+ self.0.lock().connection.1.clone()
+ }
+
+ pub fn connect(self: &Arc<Self>, url: &str, token: &str) -> impl Future<Output = Result<()>> {
+ let this = self.clone();
+ let url = url.to_string();
+ let token = token.to_string();
+ async move {
+ let server = TestServer::get(&url)?;
+ server
+ .join_room(token.clone(), this.clone())
+ .await
+ .context("room join")?;
+ *this.0.lock().connection.0.borrow_mut() = ConnectionState::Connected { url, token };
+ Ok(())
+ }
+ }
+
+ pub fn display_sources(self: &Arc<Self>) -> impl Future<Output = Result<Vec<MacOSDisplay>>> {
+ let this = self.clone();
+ async move {
+ let server = this.test_server();
+ server.executor.simulate_random_delay().await;
+ Ok(this.0.lock().display_sources.clone())
+ }
+ }
+
+ pub fn publish_video_track(
+ self: &Arc<Self>,
+ track: LocalVideoTrack,
+ ) -> impl Future<Output = Result<LocalTrackPublication>> {
+ let this = self.clone();
+ let track = track.clone();
+ async move {
+ this.test_server()
+ .publish_video_track(this.token(), track)
+ .await?;
+ Ok(LocalTrackPublication)
+ }
+ }
+ pub fn publish_audio_track(
+ self: &Arc<Self>,
+ track: LocalAudioTrack,
+ ) -> impl Future<Output = Result<LocalTrackPublication>> {
+ let this = self.clone();
+ let track = track.clone();
+ async move {
+ this.test_server()
+ .publish_audio_track(this.token(), &track)
+ .await?;
+ Ok(LocalTrackPublication)
+ }
+ }
+
+ pub fn unpublish_track(&self, _publication: LocalTrackPublication) {}
+
+ pub fn remote_audio_tracks(&self, publisher_id: &str) -> Vec<Arc<RemoteAudioTrack>> {
+ if !self.is_connected() {
+ return Vec::new();
+ }
+
+ self.test_server()
+ .audio_tracks(self.token())
+ .unwrap()
+ .into_iter()
+ .filter(|track| track.publisher_id() == publisher_id)
+ .collect()
+ }
+
+ pub fn remote_audio_track_publications(
+ &self,
+ publisher_id: &str,
+ ) -> Vec<Arc<RemoteTrackPublication>> {
+ if !self.is_connected() {
+ return Vec::new();
+ }
+
+ self.test_server()
+ .audio_tracks(self.token())
+ .unwrap()
+ .into_iter()
+ .filter(|track| track.publisher_id() == publisher_id)
+ .map(|_track| Arc::new(RemoteTrackPublication {}))
+ .collect()
+ }
+
+ pub fn remote_video_tracks(&self, publisher_id: &str) -> Vec<Arc<RemoteVideoTrack>> {
+ if !self.is_connected() {
+ return Vec::new();
+ }
+
+ self.test_server()
+ .video_tracks(self.token())
+ .unwrap()
+ .into_iter()
+ .filter(|track| track.publisher_id() == publisher_id)
+ .collect()
+ }
+
+ pub fn remote_audio_track_updates(&self) -> impl Stream<Item = RemoteAudioTrackUpdate> {
+ self.0.lock().audio_track_updates.1.clone()
+ }
+
+ pub fn remote_video_track_updates(&self) -> impl Stream<Item = RemoteVideoTrackUpdate> {
+ self.0.lock().video_track_updates.1.clone()
+ }
+
+ pub fn set_display_sources(&self, sources: Vec<MacOSDisplay>) {
+ self.0.lock().display_sources = sources;
+ }
+
+ fn test_server(&self) -> Arc<TestServer> {
+ match self.0.lock().connection.1.borrow().clone() {
+ ConnectionState::Disconnected => panic!("must be connected to call this method"),
+ ConnectionState::Connected { url, .. } => TestServer::get(&url).unwrap(),
+ }
+ }
+
+ fn token(&self) -> String {
+ match self.0.lock().connection.1.borrow().clone() {
+ ConnectionState::Disconnected => panic!("must be connected to call this method"),
+ ConnectionState::Connected { token, .. } => token,
+ }
+ }
+
+ fn is_connected(&self) -> bool {
+ match *self.0.lock().connection.1.borrow() {
+ ConnectionState::Disconnected => false,
+ ConnectionState::Connected { .. } => true,
+ }
+ }
+}
+
+impl Drop for Room {
+ fn drop(&mut self) {
+ if let ConnectionState::Connected { token, .. } = mem::replace(
+ &mut *self.0.lock().connection.0.borrow_mut(),
+ ConnectionState::Disconnected,
+ ) {
+ if let Ok(server) = TestServer::get(&token) {
+ let executor = server.executor.clone();
+ executor
+ .spawn(async move { server.leave_room(token).await.unwrap() })
+ .detach();
+ }
+ }
+ }
+}
+
+pub struct LocalTrackPublication;
+
+impl LocalTrackPublication {
+ pub fn set_mute(&self, _mute: bool) -> impl Future<Output = Result<()>> {
+ async { Ok(()) }
+ }
+}
+
+pub struct RemoteTrackPublication;
+
+impl RemoteTrackPublication {
+ pub fn set_enabled(&self, _enabled: bool) -> impl Future<Output = Result<()>> {
+ async { Ok(()) }
+ }
+
+ pub fn is_muted(&self) -> bool {
+ false
+ }
+
+ pub fn sid(&self) -> String {
+ "".to_string()
+ }
+}
+
+#[derive(Clone)]
+pub struct LocalVideoTrack {
+ frames_rx: async_broadcast::Receiver<Frame>,
+}
+
+impl LocalVideoTrack {
+ pub fn screen_share_for_display(display: &MacOSDisplay) -> Self {
+ Self {
+ frames_rx: display.frames.1.clone(),
+ }
+ }
+}
+
+#[derive(Clone)]
+pub struct LocalAudioTrack;
+
+impl LocalAudioTrack {
+ pub fn create() -> Self {
+ Self
+ }
+}
+
+#[derive(Debug)]
+pub struct RemoteVideoTrack {
+ sid: Sid,
+ publisher_id: Sid,
+ frames_rx: async_broadcast::Receiver<Frame>,
+}
+
+impl RemoteVideoTrack {
+ pub fn sid(&self) -> &str {
+ &self.sid
+ }
+
+ pub fn publisher_id(&self) -> &str {
+ &self.publisher_id
+ }
+
+ pub fn frames(&self) -> async_broadcast::Receiver<Frame> {
+ self.frames_rx.clone()
+ }
+}
+
+#[derive(Debug)]
+pub struct RemoteAudioTrack {
+ sid: Sid,
+ publisher_id: Sid,
+}
+
+impl RemoteAudioTrack {
+ pub fn sid(&self) -> &str {
+ &self.sid
+ }
+
+ pub fn publisher_id(&self) -> &str {
+ &self.publisher_id
+ }
+
+ pub fn enable(&self) -> impl Future<Output = Result<()>> {
+ async { Ok(()) }
+ }
+
+ pub fn disable(&self) -> impl Future<Output = Result<()>> {
+ async { Ok(()) }
+ }
+}
+
+#[derive(Clone)]
+pub enum RemoteVideoTrackUpdate {
+ Subscribed(Arc<RemoteVideoTrack>),
+ Unsubscribed { publisher_id: Sid, track_id: Sid },
+}
+
+#[derive(Clone)]
+pub enum RemoteAudioTrackUpdate {
+ ActiveSpeakersChanged { speakers: Vec<Sid> },
+ MuteChanged { track_id: Sid, muted: bool },
+ Subscribed(Arc<RemoteAudioTrack>, Arc<RemoteTrackPublication>),
+ Unsubscribed { publisher_id: Sid, track_id: Sid },
+}
+
+#[derive(Clone)]
+pub struct MacOSDisplay {
+ frames: (
+ async_broadcast::Sender<Frame>,
+ async_broadcast::Receiver<Frame>,
+ ),
+}
+
+impl MacOSDisplay {
+ pub fn new() -> Self {
+ Self {
+ frames: async_broadcast::broadcast(128),
+ }
+ }
+
+ pub fn send_frame(&self, frame: Frame) {
+ self.frames.0.try_broadcast(frame).unwrap();
+ }
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub struct Frame {
+ pub label: String,
+ pub width: usize,
+ pub height: usize,
+}
+
+impl Frame {
+ pub fn width(&self) -> usize {
+ self.width
+ }
+
+ pub fn height(&self) -> usize {
+ self.height
+ }
+
+ pub fn image(&self) -> CVImageBuffer {
+ unimplemented!("you can't call this in test mode")
+ }
+}
@@ -0,0 +1,78 @@
+[package]
+name = "multi_buffer2"
+version = "0.1.0"
+edition = "2021"
+publish = false
+
+[lib]
+path = "src/multi_buffer2.rs"
+doctest = false
+
+[features]
+test-support = [
+ "copilot2/test-support",
+ "text/test-support",
+ "language2/test-support",
+ "gpui2/test-support",
+ "util/test-support",
+ "tree-sitter-rust",
+ "tree-sitter-typescript"
+]
+
+[dependencies]
+client2 = { path = "../client2" }
+clock = { path = "../clock" }
+collections = { path = "../collections" }
+git = { path = "../git" }
+gpui2 = { path = "../gpui2" }
+language2 = { path = "../language2" }
+lsp2 = { path = "../lsp2" }
+rich_text = { path = "../rich_text" }
+settings2 = { path = "../settings2" }
+snippet = { path = "../snippet" }
+sum_tree = { path = "../sum_tree" }
+text = { path = "../text" }
+theme2 = { path = "../theme2" }
+util = { path = "../util" }
+
+aho-corasick = "1.1"
+anyhow.workspace = true
+convert_case = "0.6.0"
+futures.workspace = true
+indoc = "1.0.4"
+itertools = "0.10"
+lazy_static.workspace = true
+log.workspace = true
+ordered-float.workspace = true
+parking_lot.workspace = true
+postage.workspace = true
+pulldown-cmark = { version = "0.9.2", default-features = false }
+rand.workspace = true
+schemars.workspace = true
+serde.workspace = true
+serde_derive.workspace = true
+smallvec.workspace = true
+smol.workspace = true
+
+tree-sitter-rust = { workspace = true, optional = true }
+tree-sitter-html = { workspace = true, optional = true }
+tree-sitter-typescript = { workspace = true, optional = true }
+
+[dev-dependencies]
+copilot2 = { path = "../copilot2", features = ["test-support"] }
+text = { path = "../text", features = ["test-support"] }
+language2 = { path = "../language2", features = ["test-support"] }
+lsp2 = { path = "../lsp2", features = ["test-support"] }
+gpui2 = { path = "../gpui2", features = ["test-support"] }
+util = { path = "../util", features = ["test-support"] }
+project2 = { path = "../project2", features = ["test-support"] }
+settings2 = { path = "../settings2", features = ["test-support"] }
+
+ctor.workspace = true
+env_logger.workspace = true
+rand.workspace = true
+unindent.workspace = true
+tree-sitter.workspace = true
+tree-sitter-rust.workspace = true
+tree-sitter-html.workspace = true
+tree-sitter-typescript.workspace = true
@@ -0,0 +1,138 @@
+use super::{ExcerptId, MultiBufferSnapshot, ToOffset, ToOffsetUtf16, ToPoint};
+use language2::{OffsetUtf16, Point, TextDimension};
+use std::{
+ cmp::Ordering,
+ ops::{Range, Sub},
+};
+use sum_tree::Bias;
+
+#[derive(Clone, Copy, Eq, PartialEq, Debug, Hash)]
+pub struct Anchor {
+ pub buffer_id: Option<u64>,
+ pub excerpt_id: ExcerptId,
+ pub text_anchor: text::Anchor,
+}
+
+impl Anchor {
+ pub fn min() -> Self {
+ Self {
+ buffer_id: None,
+ excerpt_id: ExcerptId::min(),
+ text_anchor: text::Anchor::MIN,
+ }
+ }
+
+ pub fn max() -> Self {
+ Self {
+ buffer_id: None,
+ excerpt_id: ExcerptId::max(),
+ text_anchor: text::Anchor::MAX,
+ }
+ }
+
+ pub fn cmp(&self, other: &Anchor, snapshot: &MultiBufferSnapshot) -> Ordering {
+ let excerpt_id_cmp = self.excerpt_id.cmp(&other.excerpt_id, snapshot);
+ if excerpt_id_cmp.is_eq() {
+ if self.excerpt_id == ExcerptId::min() || self.excerpt_id == ExcerptId::max() {
+ Ordering::Equal
+ } else if let Some(excerpt) = snapshot.excerpt(self.excerpt_id) {
+ self.text_anchor.cmp(&other.text_anchor, &excerpt.buffer)
+ } else {
+ Ordering::Equal
+ }
+ } else {
+ excerpt_id_cmp
+ }
+ }
+
+ pub fn bias(&self) -> Bias {
+ self.text_anchor.bias
+ }
+
+ pub fn bias_left(&self, snapshot: &MultiBufferSnapshot) -> Anchor {
+ if self.text_anchor.bias != Bias::Left {
+ if let Some(excerpt) = snapshot.excerpt(self.excerpt_id) {
+ return Self {
+ buffer_id: self.buffer_id,
+ excerpt_id: self.excerpt_id.clone(),
+ text_anchor: self.text_anchor.bias_left(&excerpt.buffer),
+ };
+ }
+ }
+ self.clone()
+ }
+
+ pub fn bias_right(&self, snapshot: &MultiBufferSnapshot) -> Anchor {
+ if self.text_anchor.bias != Bias::Right {
+ if let Some(excerpt) = snapshot.excerpt(self.excerpt_id) {
+ return Self {
+ buffer_id: self.buffer_id,
+ excerpt_id: self.excerpt_id.clone(),
+ text_anchor: self.text_anchor.bias_right(&excerpt.buffer),
+ };
+ }
+ }
+ self.clone()
+ }
+
+ pub fn summary<D>(&self, snapshot: &MultiBufferSnapshot) -> D
+ where
+ D: TextDimension + Ord + Sub<D, Output = D>,
+ {
+ snapshot.summary_for_anchor(self)
+ }
+
+ pub fn is_valid(&self, snapshot: &MultiBufferSnapshot) -> bool {
+ if *self == Anchor::min() || *self == Anchor::max() {
+ true
+ } else if let Some(excerpt) = snapshot.excerpt(self.excerpt_id) {
+ excerpt.contains(self)
+ && (self.text_anchor == excerpt.range.context.start
+ || self.text_anchor == excerpt.range.context.end
+ || self.text_anchor.is_valid(&excerpt.buffer))
+ } else {
+ false
+ }
+ }
+}
+
+impl ToOffset for Anchor {
+ fn to_offset(&self, snapshot: &MultiBufferSnapshot) -> usize {
+ self.summary(snapshot)
+ }
+}
+
+impl ToOffsetUtf16 for Anchor {
+ fn to_offset_utf16(&self, snapshot: &MultiBufferSnapshot) -> OffsetUtf16 {
+ self.summary(snapshot)
+ }
+}
+
+impl ToPoint for Anchor {
+ fn to_point<'a>(&self, snapshot: &MultiBufferSnapshot) -> Point {
+ self.summary(snapshot)
+ }
+}
+
+pub trait AnchorRangeExt {
+ fn cmp(&self, b: &Range<Anchor>, buffer: &MultiBufferSnapshot) -> Ordering;
+ fn to_offset(&self, content: &MultiBufferSnapshot) -> Range<usize>;
+ fn to_point(&self, content: &MultiBufferSnapshot) -> Range<Point>;
+}
+
+impl AnchorRangeExt for Range<Anchor> {
+ fn cmp(&self, other: &Range<Anchor>, buffer: &MultiBufferSnapshot) -> Ordering {
+ match self.start.cmp(&other.start, buffer) {
+ Ordering::Equal => other.end.cmp(&self.end, buffer),
+ ord => ord,
+ }
+ }
+
+ fn to_offset(&self, content: &MultiBufferSnapshot) -> Range<usize> {
+ self.start.to_offset(content)..self.end.to_offset(content)
+ }
+
+ fn to_point(&self, content: &MultiBufferSnapshot) -> Range<Point> {
+ self.start.to_point(content)..self.end.to_point(content)
+ }
+}
@@ -0,0 +1,5393 @@
+mod anchor;
+
+pub use anchor::{Anchor, AnchorRangeExt};
+use anyhow::{anyhow, Result};
+use clock::ReplicaId;
+use collections::{BTreeMap, Bound, HashMap, HashSet};
+use futures::{channel::mpsc, SinkExt};
+use git::diff::DiffHunk;
+use gpui2::{AppContext, EventEmitter, Model, ModelContext};
+pub use language2::Completion;
+use language2::{
+ char_kind,
+ language_settings::{language_settings, LanguageSettings},
+ AutoindentMode, Buffer, BufferChunks, BufferSnapshot, CharKind, Chunk, CursorShape,
+ DiagnosticEntry, File, IndentSize, Language, LanguageScope, OffsetRangeExt, OffsetUtf16,
+ Outline, OutlineItem, Point, PointUtf16, Selection, TextDimension, ToOffset as _,
+ ToOffsetUtf16 as _, ToPoint as _, ToPointUtf16 as _, TransactionId, Unclipped,
+};
+use std::{
+ borrow::Cow,
+ cell::{Ref, RefCell},
+ cmp, fmt,
+ future::Future,
+ io,
+ iter::{self, FromIterator},
+ mem,
+ ops::{Range, RangeBounds, Sub},
+ str,
+ sync::Arc,
+ time::{Duration, Instant},
+};
+use sum_tree::{Bias, Cursor, SumTree};
+use text::{
+ locator::Locator,
+ subscription::{Subscription, Topic},
+ Edit, TextSummary,
+};
+use theme2::SyntaxTheme;
+use util::post_inc;
+
+#[cfg(any(test, feature = "test-support"))]
+use gpui2::Context;
+
+const NEWLINES: &[u8] = &[b'\n'; u8::MAX as usize];
+
+#[derive(Debug, Default, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]
+pub struct ExcerptId(usize);
+
+pub struct MultiBuffer {
+ snapshot: RefCell<MultiBufferSnapshot>,
+ buffers: RefCell<HashMap<u64, BufferState>>,
+ next_excerpt_id: usize,
+ subscriptions: Topic,
+ singleton: bool,
+ replica_id: ReplicaId,
+ history: History,
+ title: Option<String>,
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum Event {
+ ExcerptsAdded {
+ buffer: Model<Buffer>,
+ predecessor: ExcerptId,
+ excerpts: Vec<(ExcerptId, ExcerptRange<language2::Anchor>)>,
+ },
+ ExcerptsRemoved {
+ ids: Vec<ExcerptId>,
+ },
+ ExcerptsEdited {
+ ids: Vec<ExcerptId>,
+ },
+ Edited {
+ sigleton_buffer_edited: bool,
+ },
+ TransactionUndone {
+ transaction_id: TransactionId,
+ },
+ Reloaded,
+ DiffBaseChanged,
+ LanguageChanged,
+ Reparsed,
+ Saved,
+ FileHandleChanged,
+ Closed,
+ DirtyChanged,
+ DiagnosticsUpdated,
+}
+
+#[derive(Clone)]
+struct History {
+ next_transaction_id: TransactionId,
+ undo_stack: Vec<Transaction>,
+ redo_stack: Vec<Transaction>,
+ transaction_depth: usize,
+ group_interval: Duration,
+}
+
+#[derive(Clone)]
+struct Transaction {
+ id: TransactionId,
+ buffer_transactions: HashMap<u64, text::TransactionId>,
+ first_edit_at: Instant,
+ last_edit_at: Instant,
+ suppress_grouping: bool,
+}
+
+pub trait ToOffset: 'static + fmt::Debug {
+ fn to_offset(&self, snapshot: &MultiBufferSnapshot) -> usize;
+}
+
+pub trait ToOffsetUtf16: 'static + fmt::Debug {
+ fn to_offset_utf16(&self, snapshot: &MultiBufferSnapshot) -> OffsetUtf16;
+}
+
+pub trait ToPoint: 'static + fmt::Debug {
+ fn to_point(&self, snapshot: &MultiBufferSnapshot) -> Point;
+}
+
+pub trait ToPointUtf16: 'static + fmt::Debug {
+ fn to_point_utf16(&self, snapshot: &MultiBufferSnapshot) -> PointUtf16;
+}
+
+struct BufferState {
+ buffer: Model<Buffer>,
+ last_version: clock::Global,
+ last_parse_count: usize,
+ last_selections_update_count: usize,
+ last_diagnostics_update_count: usize,
+ last_file_update_count: usize,
+ last_git_diff_update_count: usize,
+ excerpts: Vec<Locator>,
+ _subscriptions: [gpui2::Subscription; 2],
+}
+
+#[derive(Clone, Default)]
+pub struct MultiBufferSnapshot {
+ singleton: bool,
+ excerpts: SumTree<Excerpt>,
+ excerpt_ids: SumTree<ExcerptIdMapping>,
+ parse_count: usize,
+ diagnostics_update_count: usize,
+ trailing_excerpt_update_count: usize,
+ git_diff_update_count: usize,
+ edit_count: usize,
+ is_dirty: bool,
+ has_conflict: bool,
+}
+
+pub struct ExcerptBoundary {
+ pub id: ExcerptId,
+ pub row: u32,
+ pub buffer: BufferSnapshot,
+ pub range: ExcerptRange<text::Anchor>,
+ pub starts_new_buffer: bool,
+}
+
+#[derive(Clone)]
+struct Excerpt {
+ id: ExcerptId,
+ locator: Locator,
+ buffer_id: u64,
+ buffer: BufferSnapshot,
+ range: ExcerptRange<text::Anchor>,
+ max_buffer_row: u32,
+ text_summary: TextSummary,
+ has_trailing_newline: bool,
+}
+
+#[derive(Clone, Debug)]
+struct ExcerptIdMapping {
+ id: ExcerptId,
+ locator: Locator,
+}
+
+#[derive(Clone, Debug, Eq, PartialEq)]
+pub struct ExcerptRange<T> {
+ pub context: Range<T>,
+ pub primary: Option<Range<T>>,
+}
+
+#[derive(Clone, Debug, Default)]
+struct ExcerptSummary {
+ excerpt_id: ExcerptId,
+ excerpt_locator: Locator,
+ max_buffer_row: u32,
+ text: TextSummary,
+}
+
+#[derive(Clone)]
+pub struct MultiBufferRows<'a> {
+ buffer_row_range: Range<u32>,
+ excerpts: Cursor<'a, Excerpt, Point>,
+}
+
+pub struct MultiBufferChunks<'a> {
+ range: Range<usize>,
+ excerpts: Cursor<'a, Excerpt, usize>,
+ excerpt_chunks: Option<ExcerptChunks<'a>>,
+ language_aware: bool,
+}
+
+pub struct MultiBufferBytes<'a> {
+ range: Range<usize>,
+ excerpts: Cursor<'a, Excerpt, usize>,
+ excerpt_bytes: Option<ExcerptBytes<'a>>,
+ chunk: &'a [u8],
+}
+
+pub struct ReversedMultiBufferBytes<'a> {
+ range: Range<usize>,
+ excerpts: Cursor<'a, Excerpt, usize>,
+ excerpt_bytes: Option<ExcerptBytes<'a>>,
+ chunk: &'a [u8],
+}
+
+struct ExcerptChunks<'a> {
+ content_chunks: BufferChunks<'a>,
+ footer_height: usize,
+}
+
+struct ExcerptBytes<'a> {
+ content_bytes: text::Bytes<'a>,
+ footer_height: usize,
+}
+
+impl MultiBuffer {
+ pub fn new(replica_id: ReplicaId) -> Self {
+ Self {
+ snapshot: Default::default(),
+ buffers: Default::default(),
+ next_excerpt_id: 1,
+ subscriptions: Default::default(),
+ singleton: false,
+ replica_id,
+ history: History {
+ next_transaction_id: Default::default(),
+ undo_stack: Default::default(),
+ redo_stack: Default::default(),
+ transaction_depth: 0,
+ group_interval: Duration::from_millis(300),
+ },
+ title: Default::default(),
+ }
+ }
+
+ pub fn clone(&self, new_cx: &mut ModelContext<Self>) -> Self {
+ let mut buffers = HashMap::default();
+ for (buffer_id, buffer_state) in self.buffers.borrow().iter() {
+ buffers.insert(
+ *buffer_id,
+ BufferState {
+ buffer: buffer_state.buffer.clone(),
+ last_version: buffer_state.last_version.clone(),
+ last_parse_count: buffer_state.last_parse_count,
+ last_selections_update_count: buffer_state.last_selections_update_count,
+ last_diagnostics_update_count: buffer_state.last_diagnostics_update_count,
+ last_file_update_count: buffer_state.last_file_update_count,
+ last_git_diff_update_count: buffer_state.last_git_diff_update_count,
+ excerpts: buffer_state.excerpts.clone(),
+ _subscriptions: [
+ new_cx.observe(&buffer_state.buffer, |_, _, cx| cx.notify()),
+ new_cx.subscribe(&buffer_state.buffer, Self::on_buffer_event),
+ ],
+ },
+ );
+ }
+ Self {
+ snapshot: RefCell::new(self.snapshot.borrow().clone()),
+ buffers: RefCell::new(buffers),
+ next_excerpt_id: 1,
+ subscriptions: Default::default(),
+ singleton: self.singleton,
+ replica_id: self.replica_id,
+ history: self.history.clone(),
+ title: self.title.clone(),
+ }
+ }
+
+ pub fn with_title(mut self, title: String) -> Self {
+ self.title = Some(title);
+ self
+ }
+
+ pub fn singleton(buffer: Model<Buffer>, cx: &mut ModelContext<Self>) -> Self {
+ let mut this = Self::new(buffer.read(cx).replica_id());
+ this.singleton = true;
+ this.push_excerpts(
+ buffer,
+ [ExcerptRange {
+ context: text::Anchor::MIN..text::Anchor::MAX,
+ primary: None,
+ }],
+ cx,
+ );
+ this.snapshot.borrow_mut().singleton = true;
+ this
+ }
+
+ pub fn replica_id(&self) -> ReplicaId {
+ self.replica_id
+ }
+
+ pub fn snapshot(&self, cx: &AppContext) -> MultiBufferSnapshot {
+ self.sync(cx);
+ self.snapshot.borrow().clone()
+ }
+
+ pub fn read(&self, cx: &AppContext) -> Ref<MultiBufferSnapshot> {
+ self.sync(cx);
+ self.snapshot.borrow()
+ }
+
+ pub fn as_singleton(&self) -> Option<Model<Buffer>> {
+ if self.singleton {
+ return Some(
+ self.buffers
+ .borrow()
+ .values()
+ .next()
+ .unwrap()
+ .buffer
+ .clone(),
+ );
+ } else {
+ None
+ }
+ }
+
+ pub fn is_singleton(&self) -> bool {
+ self.singleton
+ }
+
+ pub fn subscribe(&mut self) -> Subscription {
+ self.subscriptions.subscribe()
+ }
+
+ pub fn is_dirty(&self, cx: &AppContext) -> bool {
+ self.read(cx).is_dirty()
+ }
+
+ pub fn has_conflict(&self, cx: &AppContext) -> bool {
+ self.read(cx).has_conflict()
+ }
+
+ // The `is_empty` signature doesn't match what clippy expects
+ #[allow(clippy::len_without_is_empty)]
+ pub fn len(&self, cx: &AppContext) -> usize {
+ self.read(cx).len()
+ }
+
+ pub fn is_empty(&self, cx: &AppContext) -> bool {
+ self.len(cx) != 0
+ }
+
+ pub fn symbols_containing<T: ToOffset>(
+ &self,
+ offset: T,
+ theme: Option<&SyntaxTheme>,
+ cx: &AppContext,
+ ) -> Option<(u64, Vec<OutlineItem<Anchor>>)> {
+ self.read(cx).symbols_containing(offset, theme)
+ }
+
+ pub fn edit<I, S, T>(
+ &mut self,
+ edits: I,
+ mut autoindent_mode: Option<AutoindentMode>,
+ cx: &mut ModelContext<Self>,
+ ) where
+ I: IntoIterator<Item = (Range<S>, T)>,
+ S: ToOffset,
+ T: Into<Arc<str>>,
+ {
+ if self.buffers.borrow().is_empty() {
+ return;
+ }
+
+ let snapshot = self.read(cx);
+ let edits = edits.into_iter().map(|(range, new_text)| {
+ let mut range = range.start.to_offset(&snapshot)..range.end.to_offset(&snapshot);
+ if range.start > range.end {
+ mem::swap(&mut range.start, &mut range.end);
+ }
+ (range, new_text)
+ });
+
+ if let Some(buffer) = self.as_singleton() {
+ return buffer.update(cx, |buffer, cx| {
+ buffer.edit(edits, autoindent_mode, cx);
+ });
+ }
+
+ let original_indent_columns = match &mut autoindent_mode {
+ Some(AutoindentMode::Block {
+ original_indent_columns,
+ }) => mem::take(original_indent_columns),
+ _ => Default::default(),
+ };
+
+ struct BufferEdit {
+ range: Range<usize>,
+ new_text: Arc<str>,
+ is_insertion: bool,
+ original_indent_column: u32,
+ }
+ let mut buffer_edits: HashMap<u64, Vec<BufferEdit>> = Default::default();
+ let mut edited_excerpt_ids = Vec::new();
+ let mut cursor = snapshot.excerpts.cursor::<usize>();
+ for (ix, (range, new_text)) in edits.enumerate() {
+ let new_text: Arc<str> = new_text.into();
+ let original_indent_column = original_indent_columns.get(ix).copied().unwrap_or(0);
+ cursor.seek(&range.start, Bias::Right, &());
+ if cursor.item().is_none() && range.start == *cursor.start() {
+ cursor.prev(&());
+ }
+ let start_excerpt = cursor.item().expect("start offset out of bounds");
+ let start_overshoot = range.start - cursor.start();
+ let buffer_start = start_excerpt
+ .range
+ .context
+ .start
+ .to_offset(&start_excerpt.buffer)
+ + start_overshoot;
+ edited_excerpt_ids.push(start_excerpt.id);
+
+ cursor.seek(&range.end, Bias::Right, &());
+ if cursor.item().is_none() && range.end == *cursor.start() {
+ cursor.prev(&());
+ }
+ let end_excerpt = cursor.item().expect("end offset out of bounds");
+ let end_overshoot = range.end - cursor.start();
+ let buffer_end = end_excerpt
+ .range
+ .context
+ .start
+ .to_offset(&end_excerpt.buffer)
+ + end_overshoot;
+
+ if start_excerpt.id == end_excerpt.id {
+ buffer_edits
+ .entry(start_excerpt.buffer_id)
+ .or_insert(Vec::new())
+ .push(BufferEdit {
+ range: buffer_start..buffer_end,
+ new_text,
+ is_insertion: true,
+ original_indent_column,
+ });
+ } else {
+ edited_excerpt_ids.push(end_excerpt.id);
+ let start_excerpt_range = buffer_start
+ ..start_excerpt
+ .range
+ .context
+ .end
+ .to_offset(&start_excerpt.buffer);
+ let end_excerpt_range = end_excerpt
+ .range
+ .context
+ .start
+ .to_offset(&end_excerpt.buffer)
+ ..buffer_end;
+ buffer_edits
+ .entry(start_excerpt.buffer_id)
+ .or_insert(Vec::new())
+ .push(BufferEdit {
+ range: start_excerpt_range,
+ new_text: new_text.clone(),
+ is_insertion: true,
+ original_indent_column,
+ });
+ buffer_edits
+ .entry(end_excerpt.buffer_id)
+ .or_insert(Vec::new())
+ .push(BufferEdit {
+ range: end_excerpt_range,
+ new_text: new_text.clone(),
+ is_insertion: false,
+ original_indent_column,
+ });
+
+ cursor.seek(&range.start, Bias::Right, &());
+ cursor.next(&());
+ while let Some(excerpt) = cursor.item() {
+ if excerpt.id == end_excerpt.id {
+ break;
+ }
+ buffer_edits
+ .entry(excerpt.buffer_id)
+ .or_insert(Vec::new())
+ .push(BufferEdit {
+ range: excerpt.range.context.to_offset(&excerpt.buffer),
+ new_text: new_text.clone(),
+ is_insertion: false,
+ original_indent_column,
+ });
+ edited_excerpt_ids.push(excerpt.id);
+ cursor.next(&());
+ }
+ }
+ }
+
+ drop(cursor);
+ drop(snapshot);
+ // Non-generic part of edit, hoisted out to avoid blowing up LLVM IR.
+ fn tail(
+ this: &mut MultiBuffer,
+ buffer_edits: HashMap<u64, Vec<BufferEdit>>,
+ autoindent_mode: Option<AutoindentMode>,
+ edited_excerpt_ids: Vec<ExcerptId>,
+ cx: &mut ModelContext<MultiBuffer>,
+ ) {
+ for (buffer_id, mut edits) in buffer_edits {
+ edits.sort_unstable_by_key(|edit| edit.range.start);
+ this.buffers.borrow()[&buffer_id]
+ .buffer
+ .update(cx, |buffer, cx| {
+ let mut edits = edits.into_iter().peekable();
+ let mut insertions = Vec::new();
+ let mut original_indent_columns = Vec::new();
+ let mut deletions = Vec::new();
+ let empty_str: Arc<str> = "".into();
+ while let Some(BufferEdit {
+ mut range,
+ new_text,
+ mut is_insertion,
+ original_indent_column,
+ }) = edits.next()
+ {
+ while let Some(BufferEdit {
+ range: next_range,
+ is_insertion: next_is_insertion,
+ ..
+ }) = edits.peek()
+ {
+ if range.end >= next_range.start {
+ range.end = cmp::max(next_range.end, range.end);
+ is_insertion |= *next_is_insertion;
+ edits.next();
+ } else {
+ break;
+ }
+ }
+
+ if is_insertion {
+ original_indent_columns.push(original_indent_column);
+ insertions.push((
+ buffer.anchor_before(range.start)
+ ..buffer.anchor_before(range.end),
+ new_text.clone(),
+ ));
+ } else if !range.is_empty() {
+ deletions.push((
+ buffer.anchor_before(range.start)
+ ..buffer.anchor_before(range.end),
+ empty_str.clone(),
+ ));
+ }
+ }
+
+ let deletion_autoindent_mode =
+ if let Some(AutoindentMode::Block { .. }) = autoindent_mode {
+ Some(AutoindentMode::Block {
+ original_indent_columns: Default::default(),
+ })
+ } else {
+ None
+ };
+ let insertion_autoindent_mode =
+ if let Some(AutoindentMode::Block { .. }) = autoindent_mode {
+ Some(AutoindentMode::Block {
+ original_indent_columns,
+ })
+ } else {
+ None
+ };
+
+ buffer.edit(deletions, deletion_autoindent_mode, cx);
+ buffer.edit(insertions, insertion_autoindent_mode, cx);
+ })
+ }
+
+ cx.emit(Event::ExcerptsEdited {
+ ids: edited_excerpt_ids,
+ });
+ }
+ tail(self, buffer_edits, autoindent_mode, edited_excerpt_ids, cx);
+ }
+
+ pub fn start_transaction(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
+ self.start_transaction_at(Instant::now(), cx)
+ }
+
+ pub fn start_transaction_at(
+ &mut self,
+ now: Instant,
+ cx: &mut ModelContext<Self>,
+ ) -> Option<TransactionId> {
+ if let Some(buffer) = self.as_singleton() {
+ return buffer.update(cx, |buffer, _| buffer.start_transaction_at(now));
+ }
+
+ for BufferState { buffer, .. } in self.buffers.borrow().values() {
+ buffer.update(cx, |buffer, _| buffer.start_transaction_at(now));
+ }
+ self.history.start_transaction(now)
+ }
+
+ pub fn end_transaction(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
+ self.end_transaction_at(Instant::now(), cx)
+ }
+
+ pub fn end_transaction_at(
+ &mut self,
+ now: Instant,
+ cx: &mut ModelContext<Self>,
+ ) -> Option<TransactionId> {
+ if let Some(buffer) = self.as_singleton() {
+ return buffer.update(cx, |buffer, cx| buffer.end_transaction_at(now, cx));
+ }
+
+ let mut buffer_transactions = HashMap::default();
+ for BufferState { buffer, .. } in self.buffers.borrow().values() {
+ if let Some(transaction_id) =
+ buffer.update(cx, |buffer, cx| buffer.end_transaction_at(now, cx))
+ {
+ buffer_transactions.insert(buffer.read(cx).remote_id(), transaction_id);
+ }
+ }
+
+ if self.history.end_transaction(now, buffer_transactions) {
+ let transaction_id = self.history.group().unwrap();
+ Some(transaction_id)
+ } else {
+ None
+ }
+ }
+
+ pub fn merge_transactions(
+ &mut self,
+ transaction: TransactionId,
+ destination: TransactionId,
+ cx: &mut ModelContext<Self>,
+ ) {
+ if let Some(buffer) = self.as_singleton() {
+ buffer.update(cx, |buffer, _| {
+ buffer.merge_transactions(transaction, destination)
+ });
+ } else {
+ if let Some(transaction) = self.history.forget(transaction) {
+ if let Some(destination) = self.history.transaction_mut(destination) {
+ for (buffer_id, buffer_transaction_id) in transaction.buffer_transactions {
+ if let Some(destination_buffer_transaction_id) =
+ destination.buffer_transactions.get(&buffer_id)
+ {
+ if let Some(state) = self.buffers.borrow().get(&buffer_id) {
+ state.buffer.update(cx, |buffer, _| {
+ buffer.merge_transactions(
+ buffer_transaction_id,
+ *destination_buffer_transaction_id,
+ )
+ });
+ }
+ } else {
+ destination
+ .buffer_transactions
+ .insert(buffer_id, buffer_transaction_id);
+ }
+ }
+ }
+ }
+ }
+ }
+
+ pub fn finalize_last_transaction(&mut self, cx: &mut ModelContext<Self>) {
+ self.history.finalize_last_transaction();
+ for BufferState { buffer, .. } in self.buffers.borrow().values() {
+ buffer.update(cx, |buffer, _| {
+ buffer.finalize_last_transaction();
+ });
+ }
+ }
+
+ pub fn push_transaction<'a, T>(&mut self, buffer_transactions: T, cx: &mut ModelContext<Self>)
+ where
+ T: IntoIterator<Item = (&'a Model<Buffer>, &'a language2::Transaction)>,
+ {
+ self.history
+ .push_transaction(buffer_transactions, Instant::now(), cx);
+ self.history.finalize_last_transaction();
+ }
+
+ pub fn group_until_transaction(
+ &mut self,
+ transaction_id: TransactionId,
+ cx: &mut ModelContext<Self>,
+ ) {
+ if let Some(buffer) = self.as_singleton() {
+ buffer.update(cx, |buffer, _| {
+ buffer.group_until_transaction(transaction_id)
+ });
+ } else {
+ self.history.group_until(transaction_id);
+ }
+ }
+
+ pub fn set_active_selections(
+ &mut self,
+ selections: &[Selection<Anchor>],
+ line_mode: bool,
+ cursor_shape: CursorShape,
+ cx: &mut ModelContext<Self>,
+ ) {
+ let mut selections_by_buffer: HashMap<u64, Vec<Selection<text::Anchor>>> =
+ Default::default();
+ let snapshot = self.read(cx);
+ let mut cursor = snapshot.excerpts.cursor::<Option<&Locator>>();
+ for selection in selections {
+ let start_locator = snapshot.excerpt_locator_for_id(selection.start.excerpt_id);
+ let end_locator = snapshot.excerpt_locator_for_id(selection.end.excerpt_id);
+
+ cursor.seek(&Some(start_locator), Bias::Left, &());
+ while let Some(excerpt) = cursor.item() {
+ if excerpt.locator > *end_locator {
+ break;
+ }
+
+ let mut start = excerpt.range.context.start;
+ let mut end = excerpt.range.context.end;
+ if excerpt.id == selection.start.excerpt_id {
+ start = selection.start.text_anchor;
+ }
+ if excerpt.id == selection.end.excerpt_id {
+ end = selection.end.text_anchor;
+ }
+ selections_by_buffer
+ .entry(excerpt.buffer_id)
+ .or_default()
+ .push(Selection {
+ id: selection.id,
+ start,
+ end,
+ reversed: selection.reversed,
+ goal: selection.goal,
+ });
+
+ cursor.next(&());
+ }
+ }
+
+ for (buffer_id, buffer_state) in self.buffers.borrow().iter() {
+ if !selections_by_buffer.contains_key(buffer_id) {
+ buffer_state
+ .buffer
+ .update(cx, |buffer, cx| buffer.remove_active_selections(cx));
+ }
+ }
+
+ for (buffer_id, mut selections) in selections_by_buffer {
+ self.buffers.borrow()[&buffer_id]
+ .buffer
+ .update(cx, |buffer, cx| {
+ selections.sort_unstable_by(|a, b| a.start.cmp(&b.start, buffer));
+ let mut selections = selections.into_iter().peekable();
+ let merged_selections = Arc::from_iter(iter::from_fn(|| {
+ let mut selection = selections.next()?;
+ while let Some(next_selection) = selections.peek() {
+ if selection.end.cmp(&next_selection.start, buffer).is_ge() {
+ let next_selection = selections.next().unwrap();
+ if next_selection.end.cmp(&selection.end, buffer).is_ge() {
+ selection.end = next_selection.end;
+ }
+ } else {
+ break;
+ }
+ }
+ Some(selection)
+ }));
+ buffer.set_active_selections(merged_selections, line_mode, cursor_shape, cx);
+ });
+ }
+ }
+
+ pub fn remove_active_selections(&mut self, cx: &mut ModelContext<Self>) {
+ for buffer in self.buffers.borrow().values() {
+ buffer
+ .buffer
+ .update(cx, |buffer, cx| buffer.remove_active_selections(cx));
+ }
+ }
+
+ pub fn undo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
+ let mut transaction_id = None;
+ if let Some(buffer) = self.as_singleton() {
+ transaction_id = buffer.update(cx, |buffer, cx| buffer.undo(cx));
+ } else {
+ while let Some(transaction) = self.history.pop_undo() {
+ let mut undone = false;
+ for (buffer_id, buffer_transaction_id) in &mut transaction.buffer_transactions {
+ if let Some(BufferState { buffer, .. }) = self.buffers.borrow().get(buffer_id) {
+ undone |= buffer.update(cx, |buffer, cx| {
+ let undo_to = *buffer_transaction_id;
+ if let Some(entry) = buffer.peek_undo_stack() {
+ *buffer_transaction_id = entry.transaction_id();
+ }
+ buffer.undo_to_transaction(undo_to, cx)
+ });
+ }
+ }
+
+ if undone {
+ transaction_id = Some(transaction.id);
+ break;
+ }
+ }
+ }
+
+ if let Some(transaction_id) = transaction_id {
+ cx.emit(Event::TransactionUndone { transaction_id });
+ }
+
+ transaction_id
+ }
+
+ pub fn redo(&mut self, cx: &mut ModelContext<Self>) -> Option<TransactionId> {
+ if let Some(buffer) = self.as_singleton() {
+ return buffer.update(cx, |buffer, cx| buffer.redo(cx));
+ }
+
+ while let Some(transaction) = self.history.pop_redo() {
+ let mut redone = false;
+ for (buffer_id, buffer_transaction_id) in &mut transaction.buffer_transactions {
+ if let Some(BufferState { buffer, .. }) = self.buffers.borrow().get(buffer_id) {
+ redone |= buffer.update(cx, |buffer, cx| {
+ let redo_to = *buffer_transaction_id;
+ if let Some(entry) = buffer.peek_redo_stack() {
+ *buffer_transaction_id = entry.transaction_id();
+ }
+ buffer.redo_to_transaction(redo_to, cx)
+ });
+ }
+ }
+
+ if redone {
+ return Some(transaction.id);
+ }
+ }
+
+ None
+ }
+
+ pub fn undo_transaction(&mut self, transaction_id: TransactionId, cx: &mut ModelContext<Self>) {
+ if let Some(buffer) = self.as_singleton() {
+ buffer.update(cx, |buffer, cx| buffer.undo_transaction(transaction_id, cx));
+ } else if let Some(transaction) = self.history.remove_from_undo(transaction_id) {
+ for (buffer_id, transaction_id) in &transaction.buffer_transactions {
+ if let Some(BufferState { buffer, .. }) = self.buffers.borrow().get(buffer_id) {
+ buffer.update(cx, |buffer, cx| {
+ buffer.undo_transaction(*transaction_id, cx)
+ });
+ }
+ }
+ }
+ }
+
+ pub fn stream_excerpts_with_context_lines(
+ &mut self,
+ buffer: Model<Buffer>,
+ ranges: Vec<Range<text::Anchor>>,
+ context_line_count: u32,
+ cx: &mut ModelContext<Self>,
+ ) -> mpsc::Receiver<Range<Anchor>> {
+ let (buffer_id, buffer_snapshot) =
+ buffer.update(cx, |buffer, _| (buffer.remote_id(), buffer.snapshot()));
+
+ let (mut tx, rx) = mpsc::channel(256);
+ cx.spawn(move |this, mut cx| async move {
+ let mut excerpt_ranges = Vec::new();
+ let mut range_counts = Vec::new();
+ cx.executor()
+ .scoped(|scope| {
+ scope.spawn(async {
+ let (ranges, counts) =
+ build_excerpt_ranges(&buffer_snapshot, &ranges, context_line_count);
+ excerpt_ranges = ranges;
+ range_counts = counts;
+ });
+ })
+ .await;
+
+ let mut ranges = ranges.into_iter();
+ let mut range_counts = range_counts.into_iter();
+ for excerpt_ranges in excerpt_ranges.chunks(100) {
+ let excerpt_ids = match this.update(&mut cx, |this, cx| {
+ this.push_excerpts(buffer.clone(), excerpt_ranges.iter().cloned(), cx)
+ }) {
+ Ok(excerpt_ids) => excerpt_ids,
+ Err(_) => return,
+ };
+
+ for (excerpt_id, range_count) in excerpt_ids.into_iter().zip(range_counts.by_ref())
+ {
+ for range in ranges.by_ref().take(range_count) {
+ let start = Anchor {
+ buffer_id: Some(buffer_id),
+ excerpt_id: excerpt_id.clone(),
+ text_anchor: range.start,
+ };
+ let end = Anchor {
+ buffer_id: Some(buffer_id),
+ excerpt_id: excerpt_id.clone(),
+ text_anchor: range.end,
+ };
+ if tx.send(start..end).await.is_err() {
+ break;
+ }
+ }
+ }
+ }
+ })
+ .detach();
+
+ rx
+ }
+
+ pub fn push_excerpts<O>(
+ &mut self,
+ buffer: Model<Buffer>,
+ ranges: impl IntoIterator<Item = ExcerptRange<O>>,
+ cx: &mut ModelContext<Self>,
+ ) -> Vec<ExcerptId>
+ where
+ O: text::ToOffset,
+ {
+ self.insert_excerpts_after(ExcerptId::max(), buffer, ranges, cx)
+ }
+
+ pub fn push_excerpts_with_context_lines<O>(
+ &mut self,
+ buffer: Model<Buffer>,
+ ranges: Vec<Range<O>>,
+ context_line_count: u32,
+ cx: &mut ModelContext<Self>,
+ ) -> Vec<Range<Anchor>>
+ where
+ O: text::ToPoint + text::ToOffset,
+ {
+ let buffer_id = buffer.read(cx).remote_id();
+ let buffer_snapshot = buffer.read(cx).snapshot();
+ let (excerpt_ranges, range_counts) =
+ build_excerpt_ranges(&buffer_snapshot, &ranges, context_line_count);
+
+ let excerpt_ids = self.push_excerpts(buffer, excerpt_ranges, cx);
+
+ let mut anchor_ranges = Vec::new();
+ let mut ranges = ranges.into_iter();
+ for (excerpt_id, range_count) in excerpt_ids.into_iter().zip(range_counts.into_iter()) {
+ anchor_ranges.extend(ranges.by_ref().take(range_count).map(|range| {
+ let start = Anchor {
+ buffer_id: Some(buffer_id),
+ excerpt_id: excerpt_id.clone(),
+ text_anchor: buffer_snapshot.anchor_after(range.start),
+ };
+ let end = Anchor {
+ buffer_id: Some(buffer_id),
+ excerpt_id: excerpt_id.clone(),
+ text_anchor: buffer_snapshot.anchor_after(range.end),
+ };
+ start..end
+ }))
+ }
+ anchor_ranges
+ }
+
+ pub fn insert_excerpts_after<O>(
+ &mut self,
+ prev_excerpt_id: ExcerptId,
+ buffer: Model<Buffer>,
+ ranges: impl IntoIterator<Item = ExcerptRange<O>>,
+ cx: &mut ModelContext<Self>,
+ ) -> Vec<ExcerptId>
+ where
+ O: text::ToOffset,
+ {
+ let mut ids = Vec::new();
+ let mut next_excerpt_id = self.next_excerpt_id;
+ self.insert_excerpts_with_ids_after(
+ prev_excerpt_id,
+ buffer,
+ ranges.into_iter().map(|range| {
+ let id = ExcerptId(post_inc(&mut next_excerpt_id));
+ ids.push(id);
+ (id, range)
+ }),
+ cx,
+ );
+ ids
+ }
+
+ pub fn insert_excerpts_with_ids_after<O>(
+ &mut self,
+ prev_excerpt_id: ExcerptId,
+ buffer: Model<Buffer>,
+ ranges: impl IntoIterator<Item = (ExcerptId, ExcerptRange<O>)>,
+ cx: &mut ModelContext<Self>,
+ ) where
+ O: text::ToOffset,
+ {
+ assert_eq!(self.history.transaction_depth, 0);
+ let mut ranges = ranges.into_iter().peekable();
+ if ranges.peek().is_none() {
+ return Default::default();
+ }
+
+ self.sync(cx);
+
+ let buffer_id = buffer.read(cx).remote_id();
+ let buffer_snapshot = buffer.read(cx).snapshot();
+
+ let mut buffers = self.buffers.borrow_mut();
+ let buffer_state = buffers.entry(buffer_id).or_insert_with(|| BufferState {
+ last_version: buffer_snapshot.version().clone(),
+ last_parse_count: buffer_snapshot.parse_count(),
+ last_selections_update_count: buffer_snapshot.selections_update_count(),
+ last_diagnostics_update_count: buffer_snapshot.diagnostics_update_count(),
+ last_file_update_count: buffer_snapshot.file_update_count(),
+ last_git_diff_update_count: buffer_snapshot.git_diff_update_count(),
+ excerpts: Default::default(),
+ _subscriptions: [
+ cx.observe(&buffer, |_, _, cx| cx.notify()),
+ cx.subscribe(&buffer, Self::on_buffer_event),
+ ],
+ buffer: buffer.clone(),
+ });
+
+ let mut snapshot = self.snapshot.borrow_mut();
+
+ let mut prev_locator = snapshot.excerpt_locator_for_id(prev_excerpt_id).clone();
+ let mut new_excerpt_ids = mem::take(&mut snapshot.excerpt_ids);
+ let mut cursor = snapshot.excerpts.cursor::<Option<&Locator>>();
+ let mut new_excerpts = cursor.slice(&prev_locator, Bias::Right, &());
+ prev_locator = cursor.start().unwrap_or(Locator::min_ref()).clone();
+
+ let edit_start = new_excerpts.summary().text.len;
+ new_excerpts.update_last(
+ |excerpt| {
+ excerpt.has_trailing_newline = true;
+ },
+ &(),
+ );
+
+ let next_locator = if let Some(excerpt) = cursor.item() {
+ excerpt.locator.clone()
+ } else {
+ Locator::max()
+ };
+
+ let mut excerpts = Vec::new();
+ while let Some((id, range)) = ranges.next() {
+ let locator = Locator::between(&prev_locator, &next_locator);
+ if let Err(ix) = buffer_state.excerpts.binary_search(&locator) {
+ buffer_state.excerpts.insert(ix, locator.clone());
+ }
+ let range = ExcerptRange {
+ context: buffer_snapshot.anchor_before(&range.context.start)
+ ..buffer_snapshot.anchor_after(&range.context.end),
+ primary: range.primary.map(|primary| {
+ buffer_snapshot.anchor_before(&primary.start)
+ ..buffer_snapshot.anchor_after(&primary.end)
+ }),
+ };
+ if id.0 >= self.next_excerpt_id {
+ self.next_excerpt_id = id.0 + 1;
+ }
+ excerpts.push((id, range.clone()));
+ let excerpt = Excerpt::new(
+ id,
+ locator.clone(),
+ buffer_id,
+ buffer_snapshot.clone(),
+ range,
+ ranges.peek().is_some() || cursor.item().is_some(),
+ );
+ new_excerpts.push(excerpt, &());
+ prev_locator = locator.clone();
+ new_excerpt_ids.push(ExcerptIdMapping { id, locator }, &());
+ }
+
+ let edit_end = new_excerpts.summary().text.len;
+
+ let suffix = cursor.suffix(&());
+ let changed_trailing_excerpt = suffix.is_empty();
+ new_excerpts.append(suffix, &());
+ drop(cursor);
+ snapshot.excerpts = new_excerpts;
+ snapshot.excerpt_ids = new_excerpt_ids;
+ if changed_trailing_excerpt {
+ snapshot.trailing_excerpt_update_count += 1;
+ }
+
+ self.subscriptions.publish_mut([Edit {
+ old: edit_start..edit_start,
+ new: edit_start..edit_end,
+ }]);
+ cx.emit(Event::Edited {
+ sigleton_buffer_edited: false,
+ });
+ cx.emit(Event::ExcerptsAdded {
+ buffer,
+ predecessor: prev_excerpt_id,
+ excerpts,
+ });
+ cx.notify();
+ }
+
+ pub fn clear(&mut self, cx: &mut ModelContext<Self>) {
+ self.sync(cx);
+ let ids = self.excerpt_ids();
+ self.buffers.borrow_mut().clear();
+ let mut snapshot = self.snapshot.borrow_mut();
+ let prev_len = snapshot.len();
+ snapshot.excerpts = Default::default();
+ snapshot.trailing_excerpt_update_count += 1;
+ snapshot.is_dirty = false;
+ snapshot.has_conflict = false;
+
+ self.subscriptions.publish_mut([Edit {
+ old: 0..prev_len,
+ new: 0..0,
+ }]);
+ cx.emit(Event::Edited {
+ sigleton_buffer_edited: false,
+ });
+ cx.emit(Event::ExcerptsRemoved { ids });
+ cx.notify();
+ }
+
+ pub fn excerpts_for_buffer(
+ &self,
+ buffer: &Model<Buffer>,
+ cx: &AppContext,
+ ) -> Vec<(ExcerptId, ExcerptRange<text::Anchor>)> {
+ let mut excerpts = Vec::new();
+ let snapshot = self.read(cx);
+ let buffers = self.buffers.borrow();
+ let mut cursor = snapshot.excerpts.cursor::<Option<&Locator>>();
+ for locator in buffers
+ .get(&buffer.read(cx).remote_id())
+ .map(|state| &state.excerpts)
+ .into_iter()
+ .flatten()
+ {
+ cursor.seek_forward(&Some(locator), Bias::Left, &());
+ if let Some(excerpt) = cursor.item() {
+ if excerpt.locator == *locator {
+ excerpts.push((excerpt.id.clone(), excerpt.range.clone()));
+ }
+ }
+ }
+
+ excerpts
+ }
+
+ pub fn excerpt_ids(&self) -> Vec<ExcerptId> {
+ self.snapshot
+ .borrow()
+ .excerpts
+ .iter()
+ .map(|entry| entry.id)
+ .collect()
+ }
+
+ pub fn excerpt_containing(
+ &self,
+ position: impl ToOffset,
+ cx: &AppContext,
+ ) -> Option<(ExcerptId, Model<Buffer>, Range<text::Anchor>)> {
+ let snapshot = self.read(cx);
+ let position = position.to_offset(&snapshot);
+
+ let mut cursor = snapshot.excerpts.cursor::<usize>();
+ cursor.seek(&position, Bias::Right, &());
+ cursor
+ .item()
+ .or_else(|| snapshot.excerpts.last())
+ .map(|excerpt| {
+ (
+ excerpt.id.clone(),
+ self.buffers
+ .borrow()
+ .get(&excerpt.buffer_id)
+ .unwrap()
+ .buffer
+ .clone(),
+ excerpt.range.context.clone(),
+ )
+ })
+ }
+
+ // If point is at the end of the buffer, the last excerpt is returned
+ pub fn point_to_buffer_offset<T: ToOffset>(
+ &self,
+ point: T,
+ cx: &AppContext,
+ ) -> Option<(Model<Buffer>, usize, ExcerptId)> {
+ let snapshot = self.read(cx);
+ let offset = point.to_offset(&snapshot);
+ let mut cursor = snapshot.excerpts.cursor::<usize>();
+ cursor.seek(&offset, Bias::Right, &());
+ if cursor.item().is_none() {
+ cursor.prev(&());
+ }
+
+ cursor.item().map(|excerpt| {
+ let excerpt_start = excerpt.range.context.start.to_offset(&excerpt.buffer);
+ let buffer_point = excerpt_start + offset - *cursor.start();
+ let buffer = self.buffers.borrow()[&excerpt.buffer_id].buffer.clone();
+
+ (buffer, buffer_point, excerpt.id)
+ })
+ }
+
+ pub fn range_to_buffer_ranges<T: ToOffset>(
+ &self,
+ range: Range<T>,
+ cx: &AppContext,
+ ) -> Vec<(Model<Buffer>, Range<usize>, ExcerptId)> {
+ let snapshot = self.read(cx);
+ let start = range.start.to_offset(&snapshot);
+ let end = range.end.to_offset(&snapshot);
+
+ let mut result = Vec::new();
+ let mut cursor = snapshot.excerpts.cursor::<usize>();
+ cursor.seek(&start, Bias::Right, &());
+ if cursor.item().is_none() {
+ cursor.prev(&());
+ }
+
+ while let Some(excerpt) = cursor.item() {
+ if *cursor.start() > end {
+ break;
+ }
+
+ let mut end_before_newline = cursor.end(&());
+ if excerpt.has_trailing_newline {
+ end_before_newline -= 1;
+ }
+ let excerpt_start = excerpt.range.context.start.to_offset(&excerpt.buffer);
+ let start = excerpt_start + (cmp::max(start, *cursor.start()) - *cursor.start());
+ let end = excerpt_start + (cmp::min(end, end_before_newline) - *cursor.start());
+ let buffer = self.buffers.borrow()[&excerpt.buffer_id].buffer.clone();
+ result.push((buffer, start..end, excerpt.id));
+ cursor.next(&());
+ }
+
+ result
+ }
+
+ pub fn remove_excerpts(
+ &mut self,
+ excerpt_ids: impl IntoIterator<Item = ExcerptId>,
+ cx: &mut ModelContext<Self>,
+ ) {
+ self.sync(cx);
+ let ids = excerpt_ids.into_iter().collect::<Vec<_>>();
+ if ids.is_empty() {
+ return;
+ }
+
+ let mut buffers = self.buffers.borrow_mut();
+ let mut snapshot = self.snapshot.borrow_mut();
+ let mut new_excerpts = SumTree::new();
+ let mut cursor = snapshot.excerpts.cursor::<(Option<&Locator>, usize)>();
+ let mut edits = Vec::new();
+ let mut excerpt_ids = ids.iter().copied().peekable();
+
+ while let Some(excerpt_id) = excerpt_ids.next() {
+ // Seek to the next excerpt to remove, preserving any preceding excerpts.
+ let locator = snapshot.excerpt_locator_for_id(excerpt_id);
+ new_excerpts.append(cursor.slice(&Some(locator), Bias::Left, &()), &());
+
+ if let Some(mut excerpt) = cursor.item() {
+ if excerpt.id != excerpt_id {
+ continue;
+ }
+ let mut old_start = cursor.start().1;
+
+ // Skip over the removed excerpt.
+ 'remove_excerpts: loop {
+ if let Some(buffer_state) = buffers.get_mut(&excerpt.buffer_id) {
+ buffer_state.excerpts.retain(|l| l != &excerpt.locator);
+ if buffer_state.excerpts.is_empty() {
+ buffers.remove(&excerpt.buffer_id);
+ }
+ }
+ cursor.next(&());
+
+ // Skip over any subsequent excerpts that are also removed.
+ while let Some(&next_excerpt_id) = excerpt_ids.peek() {
+ let next_locator = snapshot.excerpt_locator_for_id(next_excerpt_id);
+ if let Some(next_excerpt) = cursor.item() {
+ if next_excerpt.locator == *next_locator {
+ excerpt_ids.next();
+ excerpt = next_excerpt;
+ continue 'remove_excerpts;
+ }
+ }
+ break;
+ }
+
+ break;
+ }
+
+ // When removing the last excerpt, remove the trailing newline from
+ // the previous excerpt.
+ if cursor.item().is_none() && old_start > 0 {
+ old_start -= 1;
+ new_excerpts.update_last(|e| e.has_trailing_newline = false, &());
+ }
+
+ // Push an edit for the removal of this run of excerpts.
+ let old_end = cursor.start().1;
+ let new_start = new_excerpts.summary().text.len;
+ edits.push(Edit {
+ old: old_start..old_end,
+ new: new_start..new_start,
+ });
+ }
+ }
+ let suffix = cursor.suffix(&());
+ let changed_trailing_excerpt = suffix.is_empty();
+ new_excerpts.append(suffix, &());
+ drop(cursor);
+ snapshot.excerpts = new_excerpts;
+
+ if changed_trailing_excerpt {
+ snapshot.trailing_excerpt_update_count += 1;
+ }
+
+ self.subscriptions.publish_mut(edits);
+ cx.emit(Event::Edited {
+ sigleton_buffer_edited: false,
+ });
+ cx.emit(Event::ExcerptsRemoved { ids });
+ cx.notify();
+ }
+
+ pub fn wait_for_anchors<'a>(
+ &self,
+ anchors: impl 'a + Iterator<Item = Anchor>,
+ cx: &mut ModelContext<Self>,
+ ) -> impl 'static + Future<Output = Result<()>> {
+ let borrow = self.buffers.borrow();
+ let mut error = None;
+ let mut futures = Vec::new();
+ for anchor in anchors {
+ if let Some(buffer_id) = anchor.buffer_id {
+ if let Some(buffer) = borrow.get(&buffer_id) {
+ buffer.buffer.update(cx, |buffer, _| {
+ futures.push(buffer.wait_for_anchors([anchor.text_anchor]))
+ });
+ } else {
+ error = Some(anyhow!(
+ "buffer {buffer_id} is not part of this multi-buffer"
+ ));
+ break;
+ }
+ }
+ }
+ async move {
+ if let Some(error) = error {
+ Err(error)?;
+ }
+ for future in futures {
+ future.await?;
+ }
+ Ok(())
+ }
+ }
+
+ pub fn text_anchor_for_position<T: ToOffset>(
+ &self,
+ position: T,
+ cx: &AppContext,
+ ) -> Option<(Model<Buffer>, language2::Anchor)> {
+ let snapshot = self.read(cx);
+ let anchor = snapshot.anchor_before(position);
+ let buffer = self
+ .buffers
+ .borrow()
+ .get(&anchor.buffer_id?)?
+ .buffer
+ .clone();
+ Some((buffer, anchor.text_anchor))
+ }
+
+ fn on_buffer_event(
+ &mut self,
+ _: Model<Buffer>,
+ event: &language2::Event,
+ cx: &mut ModelContext<Self>,
+ ) {
+ cx.emit(match event {
+ language2::Event::Edited => Event::Edited {
+ sigleton_buffer_edited: true,
+ },
+ language2::Event::DirtyChanged => Event::DirtyChanged,
+ language2::Event::Saved => Event::Saved,
+ language2::Event::FileHandleChanged => Event::FileHandleChanged,
+ language2::Event::Reloaded => Event::Reloaded,
+ language2::Event::DiffBaseChanged => Event::DiffBaseChanged,
+ language2::Event::LanguageChanged => Event::LanguageChanged,
+ language2::Event::Reparsed => Event::Reparsed,
+ language2::Event::DiagnosticsUpdated => Event::DiagnosticsUpdated,
+ language2::Event::Closed => Event::Closed,
+
+ //
+ language2::Event::Operation(_) => return,
+ });
+ }
+
+ pub fn all_buffers(&self) -> HashSet<Model<Buffer>> {
+ self.buffers
+ .borrow()
+ .values()
+ .map(|state| state.buffer.clone())
+ .collect()
+ }
+
+ pub fn buffer(&self, buffer_id: u64) -> Option<Model<Buffer>> {
+ self.buffers
+ .borrow()
+ .get(&buffer_id)
+ .map(|state| state.buffer.clone())
+ }
+
+ pub fn is_completion_trigger(&self, position: Anchor, text: &str, cx: &AppContext) -> bool {
+ let mut chars = text.chars();
+ let char = if let Some(char) = chars.next() {
+ char
+ } else {
+ return false;
+ };
+ if chars.next().is_some() {
+ return false;
+ }
+
+ let snapshot = self.snapshot(cx);
+ let position = position.to_offset(&snapshot);
+ let scope = snapshot.language_scope_at(position);
+ if char_kind(&scope, char) == CharKind::Word {
+ return true;
+ }
+
+ let anchor = snapshot.anchor_before(position);
+ anchor
+ .buffer_id
+ .and_then(|buffer_id| {
+ let buffer = self.buffers.borrow().get(&buffer_id)?.buffer.clone();
+ Some(
+ buffer
+ .read(cx)
+ .completion_triggers()
+ .iter()
+ .any(|string| string == text),
+ )
+ })
+ .unwrap_or(false)
+ }
+
+ pub fn language_at<'a, T: ToOffset>(
+ &self,
+ point: T,
+ cx: &'a AppContext,
+ ) -> Option<Arc<Language>> {
+ self.point_to_buffer_offset(point, cx)
+ .and_then(|(buffer, offset, _)| buffer.read(cx).language_at(offset))
+ }
+
+ pub fn settings_at<'a, T: ToOffset>(
+ &self,
+ point: T,
+ cx: &'a AppContext,
+ ) -> &'a LanguageSettings {
+ let mut language = None;
+ let mut file = None;
+ if let Some((buffer, offset, _)) = self.point_to_buffer_offset(point, cx) {
+ let buffer = buffer.read(cx);
+ language = buffer.language_at(offset);
+ file = buffer.file();
+ }
+ language_settings(language.as_ref(), file, cx)
+ }
+
+ pub fn for_each_buffer(&self, mut f: impl FnMut(&Model<Buffer>)) {
+ self.buffers
+ .borrow()
+ .values()
+ .for_each(|state| f(&state.buffer))
+ }
+
+ pub fn title<'a>(&'a self, cx: &'a AppContext) -> Cow<'a, str> {
+ if let Some(title) = self.title.as_ref() {
+ return title.into();
+ }
+
+ if let Some(buffer) = self.as_singleton() {
+ if let Some(file) = buffer.read(cx).file() {
+ return file.file_name(cx).to_string_lossy();
+ }
+ }
+
+ "untitled".into()
+ }
+
+ #[cfg(any(test, feature = "test-support"))]
+ pub fn is_parsing(&self, cx: &AppContext) -> bool {
+ self.as_singleton().unwrap().read(cx).is_parsing()
+ }
+
+ fn sync(&self, cx: &AppContext) {
+ let mut snapshot = self.snapshot.borrow_mut();
+ let mut excerpts_to_edit = Vec::new();
+ let mut reparsed = false;
+ let mut diagnostics_updated = false;
+ let mut git_diff_updated = false;
+ let mut is_dirty = false;
+ let mut has_conflict = false;
+ let mut edited = false;
+ let mut buffers = self.buffers.borrow_mut();
+ for buffer_state in buffers.values_mut() {
+ let buffer = buffer_state.buffer.read(cx);
+ let version = buffer.version();
+ let parse_count = buffer.parse_count();
+ let selections_update_count = buffer.selections_update_count();
+ let diagnostics_update_count = buffer.diagnostics_update_count();
+ let file_update_count = buffer.file_update_count();
+ let git_diff_update_count = buffer.git_diff_update_count();
+
+ let buffer_edited = version.changed_since(&buffer_state.last_version);
+ let buffer_reparsed = parse_count > buffer_state.last_parse_count;
+ let buffer_selections_updated =
+ selections_update_count > buffer_state.last_selections_update_count;
+ let buffer_diagnostics_updated =
+ diagnostics_update_count > buffer_state.last_diagnostics_update_count;
+ let buffer_file_updated = file_update_count > buffer_state.last_file_update_count;
+ let buffer_git_diff_updated =
+ git_diff_update_count > buffer_state.last_git_diff_update_count;
+ if buffer_edited
+ || buffer_reparsed
+ || buffer_selections_updated
+ || buffer_diagnostics_updated
+ || buffer_file_updated
+ || buffer_git_diff_updated
+ {
+ buffer_state.last_version = version;
+ buffer_state.last_parse_count = parse_count;
+ buffer_state.last_selections_update_count = selections_update_count;
+ buffer_state.last_diagnostics_update_count = diagnostics_update_count;
+ buffer_state.last_file_update_count = file_update_count;
+ buffer_state.last_git_diff_update_count = git_diff_update_count;
+ excerpts_to_edit.extend(
+ buffer_state
+ .excerpts
+ .iter()
+ .map(|locator| (locator, buffer_state.buffer.clone(), buffer_edited)),
+ );
+ }
+
+ edited |= buffer_edited;
+ reparsed |= buffer_reparsed;
+ diagnostics_updated |= buffer_diagnostics_updated;
+ git_diff_updated |= buffer_git_diff_updated;
+ is_dirty |= buffer.is_dirty();
+ has_conflict |= buffer.has_conflict();
+ }
+ if edited {
+ snapshot.edit_count += 1;
+ }
+ if reparsed {
+ snapshot.parse_count += 1;
+ }
+ if diagnostics_updated {
+ snapshot.diagnostics_update_count += 1;
+ }
+ if git_diff_updated {
+ snapshot.git_diff_update_count += 1;
+ }
+ snapshot.is_dirty = is_dirty;
+ snapshot.has_conflict = has_conflict;
+
+ excerpts_to_edit.sort_unstable_by_key(|(locator, _, _)| *locator);
+
+ let mut edits = Vec::new();
+ let mut new_excerpts = SumTree::new();
+ let mut cursor = snapshot.excerpts.cursor::<(Option<&Locator>, usize)>();
+
+ for (locator, buffer, buffer_edited) in excerpts_to_edit {
+ new_excerpts.append(cursor.slice(&Some(locator), Bias::Left, &()), &());
+ let old_excerpt = cursor.item().unwrap();
+ let buffer = buffer.read(cx);
+ let buffer_id = buffer.remote_id();
+
+ let mut new_excerpt;
+ if buffer_edited {
+ edits.extend(
+ buffer
+ .edits_since_in_range::<usize>(
+ old_excerpt.buffer.version(),
+ old_excerpt.range.context.clone(),
+ )
+ .map(|mut edit| {
+ let excerpt_old_start = cursor.start().1;
+ let excerpt_new_start = new_excerpts.summary().text.len;
+ edit.old.start += excerpt_old_start;
+ edit.old.end += excerpt_old_start;
+ edit.new.start += excerpt_new_start;
+ edit.new.end += excerpt_new_start;
+ edit
+ }),
+ );
+
+ new_excerpt = Excerpt::new(
+ old_excerpt.id,
+ locator.clone(),
+ buffer_id,
+ buffer.snapshot(),
+ old_excerpt.range.clone(),
+ old_excerpt.has_trailing_newline,
+ );
+ } else {
+ new_excerpt = old_excerpt.clone();
+ new_excerpt.buffer = buffer.snapshot();
+ }
+
+ new_excerpts.push(new_excerpt, &());
+ cursor.next(&());
+ }
+ new_excerpts.append(cursor.suffix(&()), &());
+
+ drop(cursor);
+ snapshot.excerpts = new_excerpts;
+
+ self.subscriptions.publish(edits);
+ }
+}
+
+#[cfg(any(test, feature = "test-support"))]
+impl MultiBuffer {
+ pub fn build_simple(text: &str, cx: &mut gpui2::AppContext) -> Model<Self> {
+ let buffer = cx.build_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), text));
+ cx.build_model(|cx| Self::singleton(buffer, cx))
+ }
+
+ pub fn build_multi<const COUNT: usize>(
+ excerpts: [(&str, Vec<Range<Point>>); COUNT],
+ cx: &mut gpui2::AppContext,
+ ) -> Model<Self> {
+ let multi = cx.build_model(|_| Self::new(0));
+ for (text, ranges) in excerpts {
+ let buffer = cx.build_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), text));
+ let excerpt_ranges = ranges.into_iter().map(|range| ExcerptRange {
+ context: range,
+ primary: None,
+ });
+ multi.update(cx, |multi, cx| {
+ multi.push_excerpts(buffer, excerpt_ranges, cx)
+ });
+ }
+
+ multi
+ }
+
+ pub fn build_from_buffer(buffer: Model<Buffer>, cx: &mut gpui2::AppContext) -> Model<Self> {
+ cx.build_model(|cx| Self::singleton(buffer, cx))
+ }
+
+ pub fn build_random(rng: &mut impl rand::Rng, cx: &mut gpui2::AppContext) -> Model<Self> {
+ cx.build_model(|cx| {
+ let mut multibuffer = MultiBuffer::new(0);
+ let mutation_count = rng.gen_range(1..=5);
+ multibuffer.randomly_edit_excerpts(rng, mutation_count, cx);
+ multibuffer
+ })
+ }
+
+ pub fn randomly_edit(
+ &mut self,
+ rng: &mut impl rand::Rng,
+ edit_count: usize,
+ cx: &mut ModelContext<Self>,
+ ) {
+ use util::RandomCharIter;
+
+ let snapshot = self.read(cx);
+ let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
+ let mut last_end = None;
+ for _ in 0..edit_count {
+ if last_end.map_or(false, |last_end| last_end >= snapshot.len()) {
+ break;
+ }
+
+ let new_start = last_end.map_or(0, |last_end| last_end + 1);
+ let end = snapshot.clip_offset(rng.gen_range(new_start..=snapshot.len()), Bias::Right);
+ let start = snapshot.clip_offset(rng.gen_range(new_start..=end), Bias::Right);
+ last_end = Some(end);
+
+ let mut range = start..end;
+ if rng.gen_bool(0.2) {
+ mem::swap(&mut range.start, &mut range.end);
+ }
+
+ let new_text_len = rng.gen_range(0..10);
+ let new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
+
+ edits.push((range, new_text.into()));
+ }
+ log::info!("mutating multi-buffer with {:?}", edits);
+ drop(snapshot);
+
+ self.edit(edits, None, cx);
+ }
+
+ pub fn randomly_edit_excerpts(
+ &mut self,
+ rng: &mut impl rand::Rng,
+ mutation_count: usize,
+ cx: &mut ModelContext<Self>,
+ ) {
+ use rand::prelude::*;
+ use std::env;
+ use util::RandomCharIter;
+
+ let max_excerpts = env::var("MAX_EXCERPTS")
+ .map(|i| i.parse().expect("invalid `MAX_EXCERPTS` variable"))
+ .unwrap_or(5);
+
+ let mut buffers = Vec::new();
+ for _ in 0..mutation_count {
+ if rng.gen_bool(0.05) {
+ log::info!("Clearing multi-buffer");
+ self.clear(cx);
+ continue;
+ }
+
+ let excerpt_ids = self.excerpt_ids();
+ if excerpt_ids.is_empty() || (rng.gen() && excerpt_ids.len() < max_excerpts) {
+ let buffer_handle = if rng.gen() || self.buffers.borrow().is_empty() {
+ let text = RandomCharIter::new(&mut *rng).take(10).collect::<String>();
+ buffers
+ .push(cx.build_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), text)));
+ let buffer = buffers.last().unwrap().read(cx);
+ log::info!(
+ "Creating new buffer {} with text: {:?}",
+ buffer.remote_id(),
+ buffer.text()
+ );
+ buffers.last().unwrap().clone()
+ } else {
+ self.buffers
+ .borrow()
+ .values()
+ .choose(rng)
+ .unwrap()
+ .buffer
+ .clone()
+ };
+
+ let buffer = buffer_handle.read(cx);
+ let buffer_text = buffer.text();
+ let ranges = (0..rng.gen_range(0..5))
+ .map(|_| {
+ let end_ix =
+ buffer.clip_offset(rng.gen_range(0..=buffer.len()), Bias::Right);
+ let start_ix = buffer.clip_offset(rng.gen_range(0..=end_ix), Bias::Left);
+ ExcerptRange {
+ context: start_ix..end_ix,
+ primary: None,
+ }
+ })
+ .collect::<Vec<_>>();
+ log::info!(
+ "Inserting excerpts from buffer {} and ranges {:?}: {:?}",
+ buffer_handle.read(cx).remote_id(),
+ ranges.iter().map(|r| &r.context).collect::<Vec<_>>(),
+ ranges
+ .iter()
+ .map(|r| &buffer_text[r.context.clone()])
+ .collect::<Vec<_>>()
+ );
+
+ let excerpt_id = self.push_excerpts(buffer_handle.clone(), ranges, cx);
+ log::info!("Inserted with ids: {:?}", excerpt_id);
+ } else {
+ let remove_count = rng.gen_range(1..=excerpt_ids.len());
+ let mut excerpts_to_remove = excerpt_ids
+ .choose_multiple(rng, remove_count)
+ .cloned()
+ .collect::<Vec<_>>();
+ let snapshot = self.snapshot.borrow();
+ excerpts_to_remove.sort_unstable_by(|a, b| a.cmp(b, &*snapshot));
+ drop(snapshot);
+ log::info!("Removing excerpts {:?}", excerpts_to_remove);
+ self.remove_excerpts(excerpts_to_remove, cx);
+ }
+ }
+ }
+
+ pub fn randomly_mutate(
+ &mut self,
+ rng: &mut impl rand::Rng,
+ mutation_count: usize,
+ cx: &mut ModelContext<Self>,
+ ) {
+ use rand::prelude::*;
+
+ if rng.gen_bool(0.7) || self.singleton {
+ let buffer = self
+ .buffers
+ .borrow()
+ .values()
+ .choose(rng)
+ .map(|state| state.buffer.clone());
+
+ if let Some(buffer) = buffer {
+ buffer.update(cx, |buffer, cx| {
+ if rng.gen() {
+ buffer.randomly_edit(rng, mutation_count, cx);
+ } else {
+ buffer.randomly_undo_redo(rng, cx);
+ }
+ });
+ } else {
+ self.randomly_edit(rng, mutation_count, cx);
+ }
+ } else {
+ self.randomly_edit_excerpts(rng, mutation_count, cx);
+ }
+
+ self.check_invariants(cx);
+ }
+
+ fn check_invariants(&self, cx: &mut ModelContext<Self>) {
+ let snapshot = self.read(cx);
+ let excerpts = snapshot.excerpts.items(&());
+ let excerpt_ids = snapshot.excerpt_ids.items(&());
+
+ for (ix, excerpt) in excerpts.iter().enumerate() {
+ if ix == 0 {
+ if excerpt.locator <= Locator::min() {
+ panic!("invalid first excerpt locator {:?}", excerpt.locator);
+ }
+ } else {
+ if excerpt.locator <= excerpts[ix - 1].locator {
+ panic!("excerpts are out-of-order: {:?}", excerpts);
+ }
+ }
+ }
+
+ for (ix, entry) in excerpt_ids.iter().enumerate() {
+ if ix == 0 {
+ if entry.id.cmp(&ExcerptId::min(), &*snapshot).is_le() {
+ panic!("invalid first excerpt id {:?}", entry.id);
+ }
+ } else {
+ if entry.id <= excerpt_ids[ix - 1].id {
+ panic!("excerpt ids are out-of-order: {:?}", excerpt_ids);
+ }
+ }
+ }
+ }
+}
+
+impl EventEmitter for MultiBuffer {
+ type Event = Event;
+}
+
+impl MultiBufferSnapshot {
+ pub fn text(&self) -> String {
+ self.chunks(0..self.len(), false)
+ .map(|chunk| chunk.text)
+ .collect()
+ }
+
+ pub fn reversed_chars_at<T: ToOffset>(&self, position: T) -> impl Iterator<Item = char> + '_ {
+ let mut offset = position.to_offset(self);
+ let mut cursor = self.excerpts.cursor::<usize>();
+ cursor.seek(&offset, Bias::Left, &());
+ let mut excerpt_chunks = cursor.item().map(|excerpt| {
+ let end_before_footer = cursor.start() + excerpt.text_summary.len;
+ let start = excerpt.range.context.start.to_offset(&excerpt.buffer);
+ let end = start + (cmp::min(offset, end_before_footer) - cursor.start());
+ excerpt.buffer.reversed_chunks_in_range(start..end)
+ });
+ iter::from_fn(move || {
+ if offset == *cursor.start() {
+ cursor.prev(&());
+ let excerpt = cursor.item()?;
+ excerpt_chunks = Some(
+ excerpt
+ .buffer
+ .reversed_chunks_in_range(excerpt.range.context.clone()),
+ );
+ }
+
+ let excerpt = cursor.item().unwrap();
+ if offset == cursor.end(&()) && excerpt.has_trailing_newline {
+ offset -= 1;
+ Some("\n")
+ } else {
+ let chunk = excerpt_chunks.as_mut().unwrap().next().unwrap();
+ offset -= chunk.len();
+ Some(chunk)
+ }
+ })
+ .flat_map(|c| c.chars().rev())
+ }
+
+ pub fn chars_at<T: ToOffset>(&self, position: T) -> impl Iterator<Item = char> + '_ {
+ let offset = position.to_offset(self);
+ self.text_for_range(offset..self.len())
+ .flat_map(|chunk| chunk.chars())
+ }
+
+ pub fn text_for_range<T: ToOffset>(&self, range: Range<T>) -> impl Iterator<Item = &str> + '_ {
+ self.chunks(range, false).map(|chunk| chunk.text)
+ }
+
+ pub fn is_line_blank(&self, row: u32) -> bool {
+ self.text_for_range(Point::new(row, 0)..Point::new(row, self.line_len(row)))
+ .all(|chunk| chunk.matches(|c: char| !c.is_whitespace()).next().is_none())
+ }
+
+ pub fn contains_str_at<T>(&self, position: T, needle: &str) -> bool
+ where
+ T: ToOffset,
+ {
+ let position = position.to_offset(self);
+ position == self.clip_offset(position, Bias::Left)
+ && self
+ .bytes_in_range(position..self.len())
+ .flatten()
+ .copied()
+ .take(needle.len())
+ .eq(needle.bytes())
+ }
+
+ pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
+ let mut start = start.to_offset(self);
+ let mut end = start;
+ let mut next_chars = self.chars_at(start).peekable();
+ let mut prev_chars = self.reversed_chars_at(start).peekable();
+
+ let scope = self.language_scope_at(start);
+ let kind = |c| char_kind(&scope, c);
+ let word_kind = cmp::max(
+ prev_chars.peek().copied().map(kind),
+ next_chars.peek().copied().map(kind),
+ );
+
+ for ch in prev_chars {
+ if Some(kind(ch)) == word_kind && ch != '\n' {
+ start -= ch.len_utf8();
+ } else {
+ break;
+ }
+ }
+
+ for ch in next_chars {
+ if Some(kind(ch)) == word_kind && ch != '\n' {
+ end += ch.len_utf8();
+ } else {
+ break;
+ }
+ }
+
+ (start..end, word_kind)
+ }
+
+ pub fn as_singleton(&self) -> Option<(&ExcerptId, u64, &BufferSnapshot)> {
+ if self.singleton {
+ self.excerpts
+ .iter()
+ .next()
+ .map(|e| (&e.id, e.buffer_id, &e.buffer))
+ } else {
+ None
+ }
+ }
+
+ pub fn len(&self) -> usize {
+ self.excerpts.summary().text.len
+ }
+
+ pub fn is_empty(&self) -> bool {
+ self.excerpts.summary().text.len == 0
+ }
+
+ pub fn max_buffer_row(&self) -> u32 {
+ self.excerpts.summary().max_buffer_row
+ }
+
+ pub fn clip_offset(&self, offset: usize, bias: Bias) -> usize {
+ if let Some((_, _, buffer)) = self.as_singleton() {
+ return buffer.clip_offset(offset, bias);
+ }
+
+ let mut cursor = self.excerpts.cursor::<usize>();
+ cursor.seek(&offset, Bias::Right, &());
+ let overshoot = if let Some(excerpt) = cursor.item() {
+ let excerpt_start = excerpt.range.context.start.to_offset(&excerpt.buffer);
+ let buffer_offset = excerpt
+ .buffer
+ .clip_offset(excerpt_start + (offset - cursor.start()), bias);
+ buffer_offset.saturating_sub(excerpt_start)
+ } else {
+ 0
+ };
+ cursor.start() + overshoot
+ }
+
+ pub fn clip_point(&self, point: Point, bias: Bias) -> Point {
+ if let Some((_, _, buffer)) = self.as_singleton() {
+ return buffer.clip_point(point, bias);
+ }
+
+ let mut cursor = self.excerpts.cursor::<Point>();
+ cursor.seek(&point, Bias::Right, &());
+ let overshoot = if let Some(excerpt) = cursor.item() {
+ let excerpt_start = excerpt.range.context.start.to_point(&excerpt.buffer);
+ let buffer_point = excerpt
+ .buffer
+ .clip_point(excerpt_start + (point - cursor.start()), bias);
+ buffer_point.saturating_sub(excerpt_start)
+ } else {
+ Point::zero()
+ };
+ *cursor.start() + overshoot
+ }
+
+ pub fn clip_offset_utf16(&self, offset: OffsetUtf16, bias: Bias) -> OffsetUtf16 {
+ if let Some((_, _, buffer)) = self.as_singleton() {
+ return buffer.clip_offset_utf16(offset, bias);
+ }
+
+ let mut cursor = self.excerpts.cursor::<OffsetUtf16>();
+ cursor.seek(&offset, Bias::Right, &());
+ let overshoot = if let Some(excerpt) = cursor.item() {
+ let excerpt_start = excerpt.range.context.start.to_offset_utf16(&excerpt.buffer);
+ let buffer_offset = excerpt
+ .buffer
+ .clip_offset_utf16(excerpt_start + (offset - cursor.start()), bias);
+ OffsetUtf16(buffer_offset.0.saturating_sub(excerpt_start.0))
+ } else {
+ OffsetUtf16(0)
+ };
+ *cursor.start() + overshoot
+ }
+
+ pub fn clip_point_utf16(&self, point: Unclipped<PointUtf16>, bias: Bias) -> PointUtf16 {
+ if let Some((_, _, buffer)) = self.as_singleton() {
+ return buffer.clip_point_utf16(point, bias);
+ }
+
+ let mut cursor = self.excerpts.cursor::<PointUtf16>();
+ cursor.seek(&point.0, Bias::Right, &());
+ let overshoot = if let Some(excerpt) = cursor.item() {
+ let excerpt_start = excerpt
+ .buffer
+ .offset_to_point_utf16(excerpt.range.context.start.to_offset(&excerpt.buffer));
+ let buffer_point = excerpt
+ .buffer
+ .clip_point_utf16(Unclipped(excerpt_start + (point.0 - cursor.start())), bias);
+ buffer_point.saturating_sub(excerpt_start)
+ } else {
+ PointUtf16::zero()
+ };
+ *cursor.start() + overshoot
+ }
+
+ pub fn bytes_in_range<T: ToOffset>(&self, range: Range<T>) -> MultiBufferBytes {
+ let range = range.start.to_offset(self)..range.end.to_offset(self);
+ let mut excerpts = self.excerpts.cursor::<usize>();
+ excerpts.seek(&range.start, Bias::Right, &());
+
+ let mut chunk = &[][..];
+ let excerpt_bytes = if let Some(excerpt) = excerpts.item() {
+ let mut excerpt_bytes = excerpt
+ .bytes_in_range(range.start - excerpts.start()..range.end - excerpts.start());
+ chunk = excerpt_bytes.next().unwrap_or(&[][..]);
+ Some(excerpt_bytes)
+ } else {
+ None
+ };
+ MultiBufferBytes {
+ range,
+ excerpts,
+ excerpt_bytes,
+ chunk,
+ }
+ }
+
+ pub fn reversed_bytes_in_range<T: ToOffset>(
+ &self,
+ range: Range<T>,
+ ) -> ReversedMultiBufferBytes {
+ let range = range.start.to_offset(self)..range.end.to_offset(self);
+ let mut excerpts = self.excerpts.cursor::<usize>();
+ excerpts.seek(&range.end, Bias::Left, &());
+
+ let mut chunk = &[][..];
+ let excerpt_bytes = if let Some(excerpt) = excerpts.item() {
+ let mut excerpt_bytes = excerpt.reversed_bytes_in_range(
+ range.start - excerpts.start()..range.end - excerpts.start(),
+ );
+ chunk = excerpt_bytes.next().unwrap_or(&[][..]);
+ Some(excerpt_bytes)
+ } else {
+ None
+ };
+
+ ReversedMultiBufferBytes {
+ range,
+ excerpts,
+ excerpt_bytes,
+ chunk,
+ }
+ }
+
+ pub fn buffer_rows(&self, start_row: u32) -> MultiBufferRows {
+ let mut result = MultiBufferRows {
+ buffer_row_range: 0..0,
+ excerpts: self.excerpts.cursor(),
+ };
+ result.seek(start_row);
+ result
+ }
+
+ pub fn chunks<T: ToOffset>(&self, range: Range<T>, language_aware: bool) -> MultiBufferChunks {
+ let range = range.start.to_offset(self)..range.end.to_offset(self);
+ let mut chunks = MultiBufferChunks {
+ range: range.clone(),
+ excerpts: self.excerpts.cursor(),
+ excerpt_chunks: None,
+ language_aware,
+ };
+ chunks.seek(range.start);
+ chunks
+ }
+
+ pub fn offset_to_point(&self, offset: usize) -> Point {
+ if let Some((_, _, buffer)) = self.as_singleton() {
+ return buffer.offset_to_point(offset);
+ }
+
+ let mut cursor = self.excerpts.cursor::<(usize, Point)>();
+ cursor.seek(&offset, Bias::Right, &());
+ if let Some(excerpt) = cursor.item() {
+ let (start_offset, start_point) = cursor.start();
+ let overshoot = offset - start_offset;
+ let excerpt_start_offset = excerpt.range.context.start.to_offset(&excerpt.buffer);
+ let excerpt_start_point = excerpt.range.context.start.to_point(&excerpt.buffer);
+ let buffer_point = excerpt
+ .buffer
+ .offset_to_point(excerpt_start_offset + overshoot);
+ *start_point + (buffer_point - excerpt_start_point)
+ } else {
+ self.excerpts.summary().text.lines
+ }
+ }
+
+ pub fn offset_to_point_utf16(&self, offset: usize) -> PointUtf16 {
+ if let Some((_, _, buffer)) = self.as_singleton() {
+ return buffer.offset_to_point_utf16(offset);
+ }
+
+ let mut cursor = self.excerpts.cursor::<(usize, PointUtf16)>();
+ cursor.seek(&offset, Bias::Right, &());
+ if let Some(excerpt) = cursor.item() {
+ let (start_offset, start_point) = cursor.start();
+ let overshoot = offset - start_offset;
+ let excerpt_start_offset = excerpt.range.context.start.to_offset(&excerpt.buffer);
+ let excerpt_start_point = excerpt.range.context.start.to_point_utf16(&excerpt.buffer);
+ let buffer_point = excerpt
+ .buffer
+ .offset_to_point_utf16(excerpt_start_offset + overshoot);
+ *start_point + (buffer_point - excerpt_start_point)
+ } else {
+ self.excerpts.summary().text.lines_utf16()
+ }
+ }
+
+ pub fn point_to_point_utf16(&self, point: Point) -> PointUtf16 {
+ if let Some((_, _, buffer)) = self.as_singleton() {
+ return buffer.point_to_point_utf16(point);
+ }
+
+ let mut cursor = self.excerpts.cursor::<(Point, PointUtf16)>();
+ cursor.seek(&point, Bias::Right, &());
+ if let Some(excerpt) = cursor.item() {
+ let (start_offset, start_point) = cursor.start();
+ let overshoot = point - start_offset;
+ let excerpt_start_point = excerpt.range.context.start.to_point(&excerpt.buffer);
+ let excerpt_start_point_utf16 =
+ excerpt.range.context.start.to_point_utf16(&excerpt.buffer);
+ let buffer_point = excerpt
+ .buffer
+ .point_to_point_utf16(excerpt_start_point + overshoot);
+ *start_point + (buffer_point - excerpt_start_point_utf16)
+ } else {
+ self.excerpts.summary().text.lines_utf16()
+ }
+ }
+
+ pub fn point_to_offset(&self, point: Point) -> usize {
+ if let Some((_, _, buffer)) = self.as_singleton() {
+ return buffer.point_to_offset(point);
+ }
+
+ let mut cursor = self.excerpts.cursor::<(Point, usize)>();
+ cursor.seek(&point, Bias::Right, &());
+ if let Some(excerpt) = cursor.item() {
+ let (start_point, start_offset) = cursor.start();
+ let overshoot = point - start_point;
+ let excerpt_start_offset = excerpt.range.context.start.to_offset(&excerpt.buffer);
+ let excerpt_start_point = excerpt.range.context.start.to_point(&excerpt.buffer);
+ let buffer_offset = excerpt
+ .buffer
+ .point_to_offset(excerpt_start_point + overshoot);
+ *start_offset + buffer_offset - excerpt_start_offset
+ } else {
+ self.excerpts.summary().text.len
+ }
+ }
+
+ pub fn offset_utf16_to_offset(&self, offset_utf16: OffsetUtf16) -> usize {
+ if let Some((_, _, buffer)) = self.as_singleton() {
+ return buffer.offset_utf16_to_offset(offset_utf16);
+ }
+
+ let mut cursor = self.excerpts.cursor::<(OffsetUtf16, usize)>();
+ cursor.seek(&offset_utf16, Bias::Right, &());
+ if let Some(excerpt) = cursor.item() {
+ let (start_offset_utf16, start_offset) = cursor.start();
+ let overshoot = offset_utf16 - start_offset_utf16;
+ let excerpt_start_offset = excerpt.range.context.start.to_offset(&excerpt.buffer);
+ let excerpt_start_offset_utf16 =
+ excerpt.buffer.offset_to_offset_utf16(excerpt_start_offset);
+ let buffer_offset = excerpt
+ .buffer
+ .offset_utf16_to_offset(excerpt_start_offset_utf16 + overshoot);
+ *start_offset + (buffer_offset - excerpt_start_offset)
+ } else {
+ self.excerpts.summary().text.len
+ }
+ }
+
+ pub fn offset_to_offset_utf16(&self, offset: usize) -> OffsetUtf16 {
+ if let Some((_, _, buffer)) = self.as_singleton() {
+ return buffer.offset_to_offset_utf16(offset);
+ }
+
+ let mut cursor = self.excerpts.cursor::<(usize, OffsetUtf16)>();
+ cursor.seek(&offset, Bias::Right, &());
+ if let Some(excerpt) = cursor.item() {
+ let (start_offset, start_offset_utf16) = cursor.start();
+ let overshoot = offset - start_offset;
+ let excerpt_start_offset_utf16 =
+ excerpt.range.context.start.to_offset_utf16(&excerpt.buffer);
+ let excerpt_start_offset = excerpt
+ .buffer
+ .offset_utf16_to_offset(excerpt_start_offset_utf16);
+ let buffer_offset_utf16 = excerpt
+ .buffer
+ .offset_to_offset_utf16(excerpt_start_offset + overshoot);
+ *start_offset_utf16 + (buffer_offset_utf16 - excerpt_start_offset_utf16)
+ } else {
+ self.excerpts.summary().text.len_utf16
+ }
+ }
+
+ pub fn point_utf16_to_offset(&self, point: PointUtf16) -> usize {
+ if let Some((_, _, buffer)) = self.as_singleton() {
+ return buffer.point_utf16_to_offset(point);
+ }
+
+ let mut cursor = self.excerpts.cursor::<(PointUtf16, usize)>();
+ cursor.seek(&point, Bias::Right, &());
+ if let Some(excerpt) = cursor.item() {
+ let (start_point, start_offset) = cursor.start();
+ let overshoot = point - start_point;
+ let excerpt_start_offset = excerpt.range.context.start.to_offset(&excerpt.buffer);
+ let excerpt_start_point = excerpt
+ .buffer
+ .offset_to_point_utf16(excerpt.range.context.start.to_offset(&excerpt.buffer));
+ let buffer_offset = excerpt
+ .buffer
+ .point_utf16_to_offset(excerpt_start_point + overshoot);
+ *start_offset + (buffer_offset - excerpt_start_offset)
+ } else {
+ self.excerpts.summary().text.len
+ }
+ }
+
+ pub fn point_to_buffer_offset<T: ToOffset>(
+ &self,
+ point: T,
+ ) -> Option<(&BufferSnapshot, usize)> {
+ let offset = point.to_offset(&self);
+ let mut cursor = self.excerpts.cursor::<usize>();
+ cursor.seek(&offset, Bias::Right, &());
+ if cursor.item().is_none() {
+ cursor.prev(&());
+ }
+
+ cursor.item().map(|excerpt| {
+ let excerpt_start = excerpt.range.context.start.to_offset(&excerpt.buffer);
+ let buffer_point = excerpt_start + offset - *cursor.start();
+ (&excerpt.buffer, buffer_point)
+ })
+ }
+
+ pub fn suggested_indents(
+ &self,
+ rows: impl IntoIterator<Item = u32>,
+ cx: &AppContext,
+ ) -> BTreeMap<u32, IndentSize> {
+ let mut result = BTreeMap::new();
+
+ let mut rows_for_excerpt = Vec::new();
+ let mut cursor = self.excerpts.cursor::<Point>();
+ let mut rows = rows.into_iter().peekable();
+ let mut prev_row = u32::MAX;
+ let mut prev_language_indent_size = IndentSize::default();
+
+ while let Some(row) = rows.next() {
+ cursor.seek(&Point::new(row, 0), Bias::Right, &());
+ let excerpt = match cursor.item() {
+ Some(excerpt) => excerpt,
+ _ => continue,
+ };
+
+ // Retrieve the language and indent size once for each disjoint region being indented.
+ let single_indent_size = if row.saturating_sub(1) == prev_row {
+ prev_language_indent_size
+ } else {
+ excerpt
+ .buffer
+ .language_indent_size_at(Point::new(row, 0), cx)
+ };
+ prev_language_indent_size = single_indent_size;
+ prev_row = row;
+
+ let start_buffer_row = excerpt.range.context.start.to_point(&excerpt.buffer).row;
+ let start_multibuffer_row = cursor.start().row;
+
+ rows_for_excerpt.push(row);
+ while let Some(next_row) = rows.peek().copied() {
+ if cursor.end(&()).row > next_row {
+ rows_for_excerpt.push(next_row);
+ rows.next();
+ } else {
+ break;
+ }
+ }
+
+ let buffer_rows = rows_for_excerpt
+ .drain(..)
+ .map(|row| start_buffer_row + row - start_multibuffer_row);
+ let buffer_indents = excerpt
+ .buffer
+ .suggested_indents(buffer_rows, single_indent_size);
+ let multibuffer_indents = buffer_indents
+ .into_iter()
+ .map(|(row, indent)| (start_multibuffer_row + row - start_buffer_row, indent));
+ result.extend(multibuffer_indents);
+ }
+
+ result
+ }
+
+ pub fn indent_size_for_line(&self, row: u32) -> IndentSize {
+ if let Some((buffer, range)) = self.buffer_line_for_row(row) {
+ let mut size = buffer.indent_size_for_line(range.start.row);
+ size.len = size
+ .len
+ .min(range.end.column)
+ .saturating_sub(range.start.column);
+ size
+ } else {
+ IndentSize::spaces(0)
+ }
+ }
+
+ pub fn prev_non_blank_row(&self, mut row: u32) -> Option<u32> {
+ while row > 0 {
+ row -= 1;
+ if !self.is_line_blank(row) {
+ return Some(row);
+ }
+ }
+ None
+ }
+
+ pub fn line_len(&self, row: u32) -> u32 {
+ if let Some((_, range)) = self.buffer_line_for_row(row) {
+ range.end.column - range.start.column
+ } else {
+ 0
+ }
+ }
+
+ pub fn buffer_line_for_row(&self, row: u32) -> Option<(&BufferSnapshot, Range<Point>)> {
+ let mut cursor = self.excerpts.cursor::<Point>();
+ let point = Point::new(row, 0);
+ cursor.seek(&point, Bias::Right, &());
+ if cursor.item().is_none() && *cursor.start() == point {
+ cursor.prev(&());
+ }
+ if let Some(excerpt) = cursor.item() {
+ let overshoot = row - cursor.start().row;
+ let excerpt_start = excerpt.range.context.start.to_point(&excerpt.buffer);
+ let excerpt_end = excerpt.range.context.end.to_point(&excerpt.buffer);
+ let buffer_row = excerpt_start.row + overshoot;
+ let line_start = Point::new(buffer_row, 0);
+ let line_end = Point::new(buffer_row, excerpt.buffer.line_len(buffer_row));
+ return Some((
+ &excerpt.buffer,
+ line_start.max(excerpt_start)..line_end.min(excerpt_end),
+ ));
+ }
+ None
+ }
+
+ pub fn max_point(&self) -> Point {
+ self.text_summary().lines
+ }
+
+ pub fn text_summary(&self) -> TextSummary {
+ self.excerpts.summary().text.clone()
+ }
+
+ pub fn text_summary_for_range<D, O>(&self, range: Range<O>) -> D
+ where
+ D: TextDimension,
+ O: ToOffset,
+ {
+ let mut summary = D::default();
+ let mut range = range.start.to_offset(self)..range.end.to_offset(self);
+ let mut cursor = self.excerpts.cursor::<usize>();
+ cursor.seek(&range.start, Bias::Right, &());
+ if let Some(excerpt) = cursor.item() {
+ let mut end_before_newline = cursor.end(&());
+ if excerpt.has_trailing_newline {
+ end_before_newline -= 1;
+ }
+
+ let excerpt_start = excerpt.range.context.start.to_offset(&excerpt.buffer);
+ let start_in_excerpt = excerpt_start + (range.start - cursor.start());
+ let end_in_excerpt =
+ excerpt_start + (cmp::min(end_before_newline, range.end) - cursor.start());
+ summary.add_assign(
+ &excerpt
+ .buffer
+ .text_summary_for_range(start_in_excerpt..end_in_excerpt),
+ );
+
+ if range.end > end_before_newline {
+ summary.add_assign(&D::from_text_summary(&TextSummary::from("\n")));
+ }
+
+ cursor.next(&());
+ }
+
+ if range.end > *cursor.start() {
+ summary.add_assign(&D::from_text_summary(&cursor.summary::<_, TextSummary>(
+ &range.end,
+ Bias::Right,
+ &(),
+ )));
+ if let Some(excerpt) = cursor.item() {
+ range.end = cmp::max(*cursor.start(), range.end);
+
+ let excerpt_start = excerpt.range.context.start.to_offset(&excerpt.buffer);
+ let end_in_excerpt = excerpt_start + (range.end - cursor.start());
+ summary.add_assign(
+ &excerpt
+ .buffer
+ .text_summary_for_range(excerpt_start..end_in_excerpt),
+ );
+ }
+ }
+
+ summary
+ }
+
+ pub fn summary_for_anchor<D>(&self, anchor: &Anchor) -> D
+ where
+ D: TextDimension + Ord + Sub<D, Output = D>,
+ {
+ let mut cursor = self.excerpts.cursor::<ExcerptSummary>();
+ let locator = self.excerpt_locator_for_id(anchor.excerpt_id);
+
+ cursor.seek(locator, Bias::Left, &());
+ if cursor.item().is_none() {
+ cursor.next(&());
+ }
+
+ let mut position = D::from_text_summary(&cursor.start().text);
+ if let Some(excerpt) = cursor.item() {
+ if excerpt.id == anchor.excerpt_id {
+ let excerpt_buffer_start =
+ excerpt.range.context.start.summary::<D>(&excerpt.buffer);
+ let excerpt_buffer_end = excerpt.range.context.end.summary::<D>(&excerpt.buffer);
+ let buffer_position = cmp::min(
+ excerpt_buffer_end,
+ anchor.text_anchor.summary::<D>(&excerpt.buffer),
+ );
+ if buffer_position > excerpt_buffer_start {
+ position.add_assign(&(buffer_position - excerpt_buffer_start));
+ }
+ }
+ }
+ position
+ }
+
+ pub fn summaries_for_anchors<'a, D, I>(&'a self, anchors: I) -> Vec<D>
+ where
+ D: TextDimension + Ord + Sub<D, Output = D>,
+ I: 'a + IntoIterator<Item = &'a Anchor>,
+ {
+ if let Some((_, _, buffer)) = self.as_singleton() {
+ return buffer
+ .summaries_for_anchors(anchors.into_iter().map(|a| &a.text_anchor))
+ .collect();
+ }
+
+ let mut anchors = anchors.into_iter().peekable();
+ let mut cursor = self.excerpts.cursor::<ExcerptSummary>();
+ let mut summaries = Vec::new();
+ while let Some(anchor) = anchors.peek() {
+ let excerpt_id = anchor.excerpt_id;
+ let excerpt_anchors = iter::from_fn(|| {
+ let anchor = anchors.peek()?;
+ if anchor.excerpt_id == excerpt_id {
+ Some(&anchors.next().unwrap().text_anchor)
+ } else {
+ None
+ }
+ });
+
+ let locator = self.excerpt_locator_for_id(excerpt_id);
+ cursor.seek_forward(locator, Bias::Left, &());
+ if cursor.item().is_none() {
+ cursor.next(&());
+ }
+
+ let position = D::from_text_summary(&cursor.start().text);
+ if let Some(excerpt) = cursor.item() {
+ if excerpt.id == excerpt_id {
+ let excerpt_buffer_start =
+ excerpt.range.context.start.summary::<D>(&excerpt.buffer);
+ let excerpt_buffer_end =
+ excerpt.range.context.end.summary::<D>(&excerpt.buffer);
+ summaries.extend(
+ excerpt
+ .buffer
+ .summaries_for_anchors::<D, _>(excerpt_anchors)
+ .map(move |summary| {
+ let summary = cmp::min(excerpt_buffer_end.clone(), summary);
+ let mut position = position.clone();
+ let excerpt_buffer_start = excerpt_buffer_start.clone();
+ if summary > excerpt_buffer_start {
+ position.add_assign(&(summary - excerpt_buffer_start));
+ }
+ position
+ }),
+ );
+ continue;
+ }
+ }
+
+ summaries.extend(excerpt_anchors.map(|_| position.clone()));
+ }
+
+ summaries
+ }
+
+ pub fn refresh_anchors<'a, I>(&'a self, anchors: I) -> Vec<(usize, Anchor, bool)>
+ where
+ I: 'a + IntoIterator<Item = &'a Anchor>,
+ {
+ let mut anchors = anchors.into_iter().enumerate().peekable();
+ let mut cursor = self.excerpts.cursor::<Option<&Locator>>();
+ cursor.next(&());
+
+ let mut result = Vec::new();
+
+ while let Some((_, anchor)) = anchors.peek() {
+ let old_excerpt_id = anchor.excerpt_id;
+
+ // Find the location where this anchor's excerpt should be.
+ let old_locator = self.excerpt_locator_for_id(old_excerpt_id);
+ cursor.seek_forward(&Some(old_locator), Bias::Left, &());
+
+ if cursor.item().is_none() {
+ cursor.next(&());
+ }
+
+ let next_excerpt = cursor.item();
+ let prev_excerpt = cursor.prev_item();
+
+ // Process all of the anchors for this excerpt.
+ while let Some((_, anchor)) = anchors.peek() {
+ if anchor.excerpt_id != old_excerpt_id {
+ break;
+ }
+ let (anchor_ix, anchor) = anchors.next().unwrap();
+ let mut anchor = *anchor;
+
+ // Leave min and max anchors unchanged if invalid or
+ // if the old excerpt still exists at this location
+ let mut kept_position = next_excerpt
+ .map_or(false, |e| e.id == old_excerpt_id && e.contains(&anchor))
+ || old_excerpt_id == ExcerptId::max()
+ || old_excerpt_id == ExcerptId::min();
+
+ // If the old excerpt no longer exists at this location, then attempt to
+ // find an equivalent position for this anchor in an adjacent excerpt.
+ if !kept_position {
+ for excerpt in [next_excerpt, prev_excerpt].iter().filter_map(|e| *e) {
+ if excerpt.contains(&anchor) {
+ anchor.excerpt_id = excerpt.id.clone();
+ kept_position = true;
+ break;
+ }
+ }
+ }
+
+ // If there's no adjacent excerpt that contains the anchor's position,
+ // then report that the anchor has lost its position.
+ if !kept_position {
+ anchor = if let Some(excerpt) = next_excerpt {
+ let mut text_anchor = excerpt
+ .range
+ .context
+ .start
+ .bias(anchor.text_anchor.bias, &excerpt.buffer);
+ if text_anchor
+ .cmp(&excerpt.range.context.end, &excerpt.buffer)
+ .is_gt()
+ {
+ text_anchor = excerpt.range.context.end;
+ }
+ Anchor {
+ buffer_id: Some(excerpt.buffer_id),
+ excerpt_id: excerpt.id.clone(),
+ text_anchor,
+ }
+ } else if let Some(excerpt) = prev_excerpt {
+ let mut text_anchor = excerpt
+ .range
+ .context
+ .end
+ .bias(anchor.text_anchor.bias, &excerpt.buffer);
+ if text_anchor
+ .cmp(&excerpt.range.context.start, &excerpt.buffer)
+ .is_lt()
+ {
+ text_anchor = excerpt.range.context.start;
+ }
+ Anchor {
+ buffer_id: Some(excerpt.buffer_id),
+ excerpt_id: excerpt.id.clone(),
+ text_anchor,
+ }
+ } else if anchor.text_anchor.bias == Bias::Left {
+ Anchor::min()
+ } else {
+ Anchor::max()
+ };
+ }
+
+ result.push((anchor_ix, anchor, kept_position));
+ }
+ }
+ result.sort_unstable_by(|a, b| a.1.cmp(&b.1, self));
+ result
+ }
+
+ pub fn anchor_before<T: ToOffset>(&self, position: T) -> Anchor {
+ self.anchor_at(position, Bias::Left)
+ }
+
+ pub fn anchor_after<T: ToOffset>(&self, position: T) -> Anchor {
+ self.anchor_at(position, Bias::Right)
+ }
+
+ pub fn anchor_at<T: ToOffset>(&self, position: T, mut bias: Bias) -> Anchor {
+ let offset = position.to_offset(self);
+ if let Some((excerpt_id, buffer_id, buffer)) = self.as_singleton() {
+ return Anchor {
+ buffer_id: Some(buffer_id),
+ excerpt_id: excerpt_id.clone(),
+ text_anchor: buffer.anchor_at(offset, bias),
+ };
+ }
+
+ let mut cursor = self.excerpts.cursor::<(usize, Option<ExcerptId>)>();
+ cursor.seek(&offset, Bias::Right, &());
+ if cursor.item().is_none() && offset == cursor.start().0 && bias == Bias::Left {
+ cursor.prev(&());
+ }
+ if let Some(excerpt) = cursor.item() {
+ let mut overshoot = offset.saturating_sub(cursor.start().0);
+ if excerpt.has_trailing_newline && offset == cursor.end(&()).0 {
+ overshoot -= 1;
+ bias = Bias::Right;
+ }
+
+ let buffer_start = excerpt.range.context.start.to_offset(&excerpt.buffer);
+ let text_anchor =
+ excerpt.clip_anchor(excerpt.buffer.anchor_at(buffer_start + overshoot, bias));
+ Anchor {
+ buffer_id: Some(excerpt.buffer_id),
+ excerpt_id: excerpt.id.clone(),
+ text_anchor,
+ }
+ } else if offset == 0 && bias == Bias::Left {
+ Anchor::min()
+ } else {
+ Anchor::max()
+ }
+ }
+
+ pub fn anchor_in_excerpt(&self, excerpt_id: ExcerptId, text_anchor: text::Anchor) -> Anchor {
+ let locator = self.excerpt_locator_for_id(excerpt_id);
+ let mut cursor = self.excerpts.cursor::<Option<&Locator>>();
+ cursor.seek(locator, Bias::Left, &());
+ if let Some(excerpt) = cursor.item() {
+ if excerpt.id == excerpt_id {
+ let text_anchor = excerpt.clip_anchor(text_anchor);
+ drop(cursor);
+ return Anchor {
+ buffer_id: Some(excerpt.buffer_id),
+ excerpt_id,
+ text_anchor,
+ };
+ }
+ }
+ panic!("excerpt not found");
+ }
+
+ pub fn can_resolve(&self, anchor: &Anchor) -> bool {
+ if anchor.excerpt_id == ExcerptId::min() || anchor.excerpt_id == ExcerptId::max() {
+ true
+ } else if let Some(excerpt) = self.excerpt(anchor.excerpt_id) {
+ excerpt.buffer.can_resolve(&anchor.text_anchor)
+ } else {
+ false
+ }
+ }
+
+ pub fn excerpts(
+ &self,
+ ) -> impl Iterator<Item = (ExcerptId, &BufferSnapshot, ExcerptRange<text::Anchor>)> {
+ self.excerpts
+ .iter()
+ .map(|excerpt| (excerpt.id, &excerpt.buffer, excerpt.range.clone()))
+ }
+
+ pub fn excerpt_boundaries_in_range<R, T>(
+ &self,
+ range: R,
+ ) -> impl Iterator<Item = ExcerptBoundary> + '_
+ where
+ R: RangeBounds<T>,
+ T: ToOffset,
+ {
+ let start_offset;
+ let start = match range.start_bound() {
+ Bound::Included(start) => {
+ start_offset = start.to_offset(self);
+ Bound::Included(start_offset)
+ }
+ Bound::Excluded(start) => {
+ start_offset = start.to_offset(self);
+ Bound::Excluded(start_offset)
+ }
+ Bound::Unbounded => {
+ start_offset = 0;
+ Bound::Unbounded
+ }
+ };
+ let end = match range.end_bound() {
+ Bound::Included(end) => Bound::Included(end.to_offset(self)),
+ Bound::Excluded(end) => Bound::Excluded(end.to_offset(self)),
+ Bound::Unbounded => Bound::Unbounded,
+ };
+ let bounds = (start, end);
+
+ let mut cursor = self.excerpts.cursor::<(usize, Point)>();
+ cursor.seek(&start_offset, Bias::Right, &());
+ if cursor.item().is_none() {
+ cursor.prev(&());
+ }
+ if !bounds.contains(&cursor.start().0) {
+ cursor.next(&());
+ }
+
+ let mut prev_buffer_id = cursor.prev_item().map(|excerpt| excerpt.buffer_id);
+ std::iter::from_fn(move || {
+ if self.singleton {
+ None
+ } else if bounds.contains(&cursor.start().0) {
+ let excerpt = cursor.item()?;
+ let starts_new_buffer = Some(excerpt.buffer_id) != prev_buffer_id;
+ let boundary = ExcerptBoundary {
+ id: excerpt.id.clone(),
+ row: cursor.start().1.row,
+ buffer: excerpt.buffer.clone(),
+ range: excerpt.range.clone(),
+ starts_new_buffer,
+ };
+
+ prev_buffer_id = Some(excerpt.buffer_id);
+ cursor.next(&());
+ Some(boundary)
+ } else {
+ None
+ }
+ })
+ }
+
+ pub fn edit_count(&self) -> usize {
+ self.edit_count
+ }
+
+ pub fn parse_count(&self) -> usize {
+ self.parse_count
+ }
+
+ /// Returns the smallest enclosing bracket ranges containing the given range or
+ /// None if no brackets contain range or the range is not contained in a single
+ /// excerpt
+ pub fn innermost_enclosing_bracket_ranges<T: ToOffset>(
+ &self,
+ range: Range<T>,
+ ) -> Option<(Range<usize>, Range<usize>)> {
+ let range = range.start.to_offset(self)..range.end.to_offset(self);
+
+ // Get the ranges of the innermost pair of brackets.
+ let mut result: Option<(Range<usize>, Range<usize>)> = None;
+
+ let Some(enclosing_bracket_ranges) = self.enclosing_bracket_ranges(range.clone()) else {
+ return None;
+ };
+
+ for (open, close) in enclosing_bracket_ranges {
+ let len = close.end - open.start;
+
+ if let Some((existing_open, existing_close)) = &result {
+ let existing_len = existing_close.end - existing_open.start;
+ if len > existing_len {
+ continue;
+ }
+ }
+
+ result = Some((open, close));
+ }
+
+ result
+ }
+
+ /// Returns enclosing bracket ranges containing the given range or returns None if the range is
+ /// not contained in a single excerpt
+ pub fn enclosing_bracket_ranges<'a, T: ToOffset>(
+ &'a self,
+ range: Range<T>,
+ ) -> Option<impl Iterator<Item = (Range<usize>, Range<usize>)> + 'a> {
+ let range = range.start.to_offset(self)..range.end.to_offset(self);
+
+ self.bracket_ranges(range.clone()).map(|range_pairs| {
+ range_pairs
+ .filter(move |(open, close)| open.start <= range.start && close.end >= range.end)
+ })
+ }
+
+ /// Returns bracket range pairs overlapping the given `range` or returns None if the `range` is
+ /// not contained in a single excerpt
+ pub fn bracket_ranges<'a, T: ToOffset>(
+ &'a self,
+ range: Range<T>,
+ ) -> Option<impl Iterator<Item = (Range<usize>, Range<usize>)> + 'a> {
+ let range = range.start.to_offset(self)..range.end.to_offset(self);
+ let excerpt = self.excerpt_containing(range.clone());
+ excerpt.map(|(excerpt, excerpt_offset)| {
+ let excerpt_buffer_start = excerpt.range.context.start.to_offset(&excerpt.buffer);
+ let excerpt_buffer_end = excerpt_buffer_start + excerpt.text_summary.len;
+
+ let start_in_buffer = excerpt_buffer_start + range.start.saturating_sub(excerpt_offset);
+ let end_in_buffer = excerpt_buffer_start + range.end.saturating_sub(excerpt_offset);
+
+ excerpt
+ .buffer
+ .bracket_ranges(start_in_buffer..end_in_buffer)
+ .filter_map(move |(start_bracket_range, end_bracket_range)| {
+ if start_bracket_range.start < excerpt_buffer_start
+ || end_bracket_range.end > excerpt_buffer_end
+ {
+ return None;
+ }
+
+ let mut start_bracket_range = start_bracket_range.clone();
+ start_bracket_range.start =
+ excerpt_offset + (start_bracket_range.start - excerpt_buffer_start);
+ start_bracket_range.end =
+ excerpt_offset + (start_bracket_range.end - excerpt_buffer_start);
+
+ let mut end_bracket_range = end_bracket_range.clone();
+ end_bracket_range.start =
+ excerpt_offset + (end_bracket_range.start - excerpt_buffer_start);
+ end_bracket_range.end =
+ excerpt_offset + (end_bracket_range.end - excerpt_buffer_start);
+ Some((start_bracket_range, end_bracket_range))
+ })
+ })
+ }
+
+ pub fn diagnostics_update_count(&self) -> usize {
+ self.diagnostics_update_count
+ }
+
+ pub fn git_diff_update_count(&self) -> usize {
+ self.git_diff_update_count
+ }
+
+ pub fn trailing_excerpt_update_count(&self) -> usize {
+ self.trailing_excerpt_update_count
+ }
+
+ pub fn file_at<'a, T: ToOffset>(&'a self, point: T) -> Option<&'a Arc<dyn File>> {
+ self.point_to_buffer_offset(point)
+ .and_then(|(buffer, _)| buffer.file())
+ }
+
+ pub fn language_at<'a, T: ToOffset>(&'a self, point: T) -> Option<&'a Arc<Language>> {
+ self.point_to_buffer_offset(point)
+ .and_then(|(buffer, offset)| buffer.language_at(offset))
+ }
+
+ pub fn settings_at<'a, T: ToOffset>(
+ &'a self,
+ point: T,
+ cx: &'a AppContext,
+ ) -> &'a LanguageSettings {
+ let mut language = None;
+ let mut file = None;
+ if let Some((buffer, offset)) = self.point_to_buffer_offset(point) {
+ language = buffer.language_at(offset);
+ file = buffer.file();
+ }
+ language_settings(language, file, cx)
+ }
+
+ pub fn language_scope_at<'a, T: ToOffset>(&'a self, point: T) -> Option<LanguageScope> {
+ self.point_to_buffer_offset(point)
+ .and_then(|(buffer, offset)| buffer.language_scope_at(offset))
+ }
+
+ pub fn language_indent_size_at<T: ToOffset>(
+ &self,
+ position: T,
+ cx: &AppContext,
+ ) -> Option<IndentSize> {
+ let (buffer_snapshot, offset) = self.point_to_buffer_offset(position)?;
+ Some(buffer_snapshot.language_indent_size_at(offset, cx))
+ }
+
+ pub fn is_dirty(&self) -> bool {
+ self.is_dirty
+ }
+
+ pub fn has_conflict(&self) -> bool {
+ self.has_conflict
+ }
+
+ pub fn diagnostic_group<'a, O>(
+ &'a self,
+ group_id: usize,
+ ) -> impl Iterator<Item = DiagnosticEntry<O>> + 'a
+ where
+ O: text::FromAnchor + 'a,
+ {
+ self.as_singleton()
+ .into_iter()
+ .flat_map(move |(_, _, buffer)| buffer.diagnostic_group(group_id))
+ }
+
+ pub fn diagnostics_in_range<'a, T, O>(
+ &'a self,
+ range: Range<T>,
+ reversed: bool,
+ ) -> impl Iterator<Item = DiagnosticEntry<O>> + 'a
+ where
+ T: 'a + ToOffset,
+ O: 'a + text::FromAnchor + Ord,
+ {
+ self.as_singleton()
+ .into_iter()
+ .flat_map(move |(_, _, buffer)| {
+ buffer.diagnostics_in_range(
+ range.start.to_offset(self)..range.end.to_offset(self),
+ reversed,
+ )
+ })
+ }
+
+ pub fn has_git_diffs(&self) -> bool {
+ for excerpt in self.excerpts.iter() {
+ if !excerpt.buffer.git_diff.is_empty() {
+ return true;
+ }
+ }
+ false
+ }
+
+ pub fn git_diff_hunks_in_range_rev<'a>(
+ &'a self,
+ row_range: Range<u32>,
+ ) -> impl 'a + Iterator<Item = DiffHunk<u32>> {
+ let mut cursor = self.excerpts.cursor::<Point>();
+
+ cursor.seek(&Point::new(row_range.end, 0), Bias::Left, &());
+ if cursor.item().is_none() {
+ cursor.prev(&());
+ }
+
+ std::iter::from_fn(move || {
+ let excerpt = cursor.item()?;
+ let multibuffer_start = *cursor.start();
+ let multibuffer_end = multibuffer_start + excerpt.text_summary.lines;
+ if multibuffer_start.row >= row_range.end {
+ return None;
+ }
+
+ let mut buffer_start = excerpt.range.context.start;
+ let mut buffer_end = excerpt.range.context.end;
+ let excerpt_start_point = buffer_start.to_point(&excerpt.buffer);
+ let excerpt_end_point = excerpt_start_point + excerpt.text_summary.lines;
+
+ if row_range.start > multibuffer_start.row {
+ let buffer_start_point =
+ excerpt_start_point + Point::new(row_range.start - multibuffer_start.row, 0);
+ buffer_start = excerpt.buffer.anchor_before(buffer_start_point);
+ }
+
+ if row_range.end < multibuffer_end.row {
+ let buffer_end_point =
+ excerpt_start_point + Point::new(row_range.end - multibuffer_start.row, 0);
+ buffer_end = excerpt.buffer.anchor_before(buffer_end_point);
+ }
+
+ let buffer_hunks = excerpt
+ .buffer
+ .git_diff_hunks_intersecting_range_rev(buffer_start..buffer_end)
+ .filter_map(move |hunk| {
+ let start = multibuffer_start.row
+ + hunk
+ .buffer_range
+ .start
+ .saturating_sub(excerpt_start_point.row);
+ let end = multibuffer_start.row
+ + hunk
+ .buffer_range
+ .end
+ .min(excerpt_end_point.row + 1)
+ .saturating_sub(excerpt_start_point.row);
+
+ Some(DiffHunk {
+ buffer_range: start..end,
+ diff_base_byte_range: hunk.diff_base_byte_range.clone(),
+ })
+ });
+
+ cursor.prev(&());
+
+ Some(buffer_hunks)
+ })
+ .flatten()
+ }
+
+ pub fn git_diff_hunks_in_range<'a>(
+ &'a self,
+ row_range: Range<u32>,
+ ) -> impl 'a + Iterator<Item = DiffHunk<u32>> {
+ let mut cursor = self.excerpts.cursor::<Point>();
+
+ cursor.seek(&Point::new(row_range.start, 0), Bias::Right, &());
+
+ std::iter::from_fn(move || {
+ let excerpt = cursor.item()?;
+ let multibuffer_start = *cursor.start();
+ let multibuffer_end = multibuffer_start + excerpt.text_summary.lines;
+ if multibuffer_start.row >= row_range.end {
+ return None;
+ }
+
+ let mut buffer_start = excerpt.range.context.start;
+ let mut buffer_end = excerpt.range.context.end;
+ let excerpt_start_point = buffer_start.to_point(&excerpt.buffer);
+ let excerpt_end_point = excerpt_start_point + excerpt.text_summary.lines;
+
+ if row_range.start > multibuffer_start.row {
+ let buffer_start_point =
+ excerpt_start_point + Point::new(row_range.start - multibuffer_start.row, 0);
+ buffer_start = excerpt.buffer.anchor_before(buffer_start_point);
+ }
+
+ if row_range.end < multibuffer_end.row {
+ let buffer_end_point =
+ excerpt_start_point + Point::new(row_range.end - multibuffer_start.row, 0);
+ buffer_end = excerpt.buffer.anchor_before(buffer_end_point);
+ }
+
+ let buffer_hunks = excerpt
+ .buffer
+ .git_diff_hunks_intersecting_range(buffer_start..buffer_end)
+ .filter_map(move |hunk| {
+ let start = multibuffer_start.row
+ + hunk
+ .buffer_range
+ .start
+ .saturating_sub(excerpt_start_point.row);
+ let end = multibuffer_start.row
+ + hunk
+ .buffer_range
+ .end
+ .min(excerpt_end_point.row + 1)
+ .saturating_sub(excerpt_start_point.row);
+
+ Some(DiffHunk {
+ buffer_range: start..end,
+ diff_base_byte_range: hunk.diff_base_byte_range.clone(),
+ })
+ });
+
+ cursor.next(&());
+
+ Some(buffer_hunks)
+ })
+ .flatten()
+ }
+
+ pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
+ let range = range.start.to_offset(self)..range.end.to_offset(self);
+
+ self.excerpt_containing(range.clone())
+ .and_then(|(excerpt, excerpt_offset)| {
+ let excerpt_buffer_start = excerpt.range.context.start.to_offset(&excerpt.buffer);
+ let excerpt_buffer_end = excerpt_buffer_start + excerpt.text_summary.len;
+
+ let start_in_buffer =
+ excerpt_buffer_start + range.start.saturating_sub(excerpt_offset);
+ let end_in_buffer = excerpt_buffer_start + range.end.saturating_sub(excerpt_offset);
+ let mut ancestor_buffer_range = excerpt
+ .buffer
+ .range_for_syntax_ancestor(start_in_buffer..end_in_buffer)?;
+ ancestor_buffer_range.start =
+ cmp::max(ancestor_buffer_range.start, excerpt_buffer_start);
+ ancestor_buffer_range.end = cmp::min(ancestor_buffer_range.end, excerpt_buffer_end);
+
+ let start = excerpt_offset + (ancestor_buffer_range.start - excerpt_buffer_start);
+ let end = excerpt_offset + (ancestor_buffer_range.end - excerpt_buffer_start);
+ Some(start..end)
+ })
+ }
+
+ pub fn outline(&self, theme: Option<&SyntaxTheme>) -> Option<Outline<Anchor>> {
+ let (excerpt_id, _, buffer) = self.as_singleton()?;
+ let outline = buffer.outline(theme)?;
+ Some(Outline::new(
+ outline
+ .items
+ .into_iter()
+ .map(|item| OutlineItem {
+ depth: item.depth,
+ range: self.anchor_in_excerpt(excerpt_id.clone(), item.range.start)
+ ..self.anchor_in_excerpt(excerpt_id.clone(), item.range.end),
+ text: item.text,
+ highlight_ranges: item.highlight_ranges,
+ name_ranges: item.name_ranges,
+ })
+ .collect(),
+ ))
+ }
+
+ pub fn symbols_containing<T: ToOffset>(
+ &self,
+ offset: T,
+ theme: Option<&SyntaxTheme>,
+ ) -> Option<(u64, Vec<OutlineItem<Anchor>>)> {
+ let anchor = self.anchor_before(offset);
+ let excerpt_id = anchor.excerpt_id;
+ let excerpt = self.excerpt(excerpt_id)?;
+ Some((
+ excerpt.buffer_id,
+ excerpt
+ .buffer
+ .symbols_containing(anchor.text_anchor, theme)
+ .into_iter()
+ .flatten()
+ .map(|item| OutlineItem {
+ depth: item.depth,
+ range: self.anchor_in_excerpt(excerpt_id, item.range.start)
+ ..self.anchor_in_excerpt(excerpt_id, item.range.end),
+ text: item.text,
+ highlight_ranges: item.highlight_ranges,
+ name_ranges: item.name_ranges,
+ })
+ .collect(),
+ ))
+ }
+
+ fn excerpt_locator_for_id<'a>(&'a self, id: ExcerptId) -> &'a Locator {
+ if id == ExcerptId::min() {
+ Locator::min_ref()
+ } else if id == ExcerptId::max() {
+ Locator::max_ref()
+ } else {
+ let mut cursor = self.excerpt_ids.cursor::<ExcerptId>();
+ cursor.seek(&id, Bias::Left, &());
+ if let Some(entry) = cursor.item() {
+ if entry.id == id {
+ return &entry.locator;
+ }
+ }
+ panic!("invalid excerpt id {:?}", id)
+ }
+ }
+
+ pub fn buffer_id_for_excerpt(&self, excerpt_id: ExcerptId) -> Option<u64> {
+ Some(self.excerpt(excerpt_id)?.buffer_id)
+ }
+
+ pub fn buffer_for_excerpt(&self, excerpt_id: ExcerptId) -> Option<&BufferSnapshot> {
+ Some(&self.excerpt(excerpt_id)?.buffer)
+ }
+
+ fn excerpt<'a>(&'a self, excerpt_id: ExcerptId) -> Option<&'a Excerpt> {
+ let mut cursor = self.excerpts.cursor::<Option<&Locator>>();
+ let locator = self.excerpt_locator_for_id(excerpt_id);
+ cursor.seek(&Some(locator), Bias::Left, &());
+ if let Some(excerpt) = cursor.item() {
+ if excerpt.id == excerpt_id {
+ return Some(excerpt);
+ }
+ }
+ None
+ }
+
+ /// Returns the excerpt containing range and its offset start within the multibuffer or none if `range` spans multiple excerpts
+ fn excerpt_containing<'a, T: ToOffset>(
+ &'a self,
+ range: Range<T>,
+ ) -> Option<(&'a Excerpt, usize)> {
+ let range = range.start.to_offset(self)..range.end.to_offset(self);
+
+ let mut cursor = self.excerpts.cursor::<usize>();
+ cursor.seek(&range.start, Bias::Right, &());
+ let start_excerpt = cursor.item();
+
+ if range.start == range.end {
+ return start_excerpt.map(|excerpt| (excerpt, *cursor.start()));
+ }
+
+ cursor.seek(&range.end, Bias::Right, &());
+ let end_excerpt = cursor.item();
+
+ start_excerpt
+ .zip(end_excerpt)
+ .and_then(|(start_excerpt, end_excerpt)| {
+ if start_excerpt.id != end_excerpt.id {
+ return None;
+ }
+
+ Some((start_excerpt, *cursor.start()))
+ })
+ }
+
+ pub fn remote_selections_in_range<'a>(
+ &'a self,
+ range: &'a Range<Anchor>,
+ ) -> impl 'a + Iterator<Item = (ReplicaId, bool, CursorShape, Selection<Anchor>)> {
+ let mut cursor = self.excerpts.cursor::<ExcerptSummary>();
+ let start_locator = self.excerpt_locator_for_id(range.start.excerpt_id);
+ let end_locator = self.excerpt_locator_for_id(range.end.excerpt_id);
+ cursor.seek(start_locator, Bias::Left, &());
+ cursor
+ .take_while(move |excerpt| excerpt.locator <= *end_locator)
+ .flat_map(move |excerpt| {
+ let mut query_range = excerpt.range.context.start..excerpt.range.context.end;
+ if excerpt.id == range.start.excerpt_id {
+ query_range.start = range.start.text_anchor;
+ }
+ if excerpt.id == range.end.excerpt_id {
+ query_range.end = range.end.text_anchor;
+ }
+
+ excerpt
+ .buffer
+ .remote_selections_in_range(query_range)
+ .flat_map(move |(replica_id, line_mode, cursor_shape, selections)| {
+ selections.map(move |selection| {
+ let mut start = Anchor {
+ buffer_id: Some(excerpt.buffer_id),
+ excerpt_id: excerpt.id.clone(),
+ text_anchor: selection.start,
+ };
+ let mut end = Anchor {
+ buffer_id: Some(excerpt.buffer_id),
+ excerpt_id: excerpt.id.clone(),
+ text_anchor: selection.end,
+ };
+ if range.start.cmp(&start, self).is_gt() {
+ start = range.start.clone();
+ }
+ if range.end.cmp(&end, self).is_lt() {
+ end = range.end.clone();
+ }
+
+ (
+ replica_id,
+ line_mode,
+ cursor_shape,
+ Selection {
+ id: selection.id,
+ start,
+ end,
+ reversed: selection.reversed,
+ goal: selection.goal,
+ },
+ )
+ })
+ })
+ })
+ }
+}
+
+#[cfg(any(test, feature = "test-support"))]
+impl MultiBufferSnapshot {
+ pub fn random_byte_range(&self, start_offset: usize, rng: &mut impl rand::Rng) -> Range<usize> {
+ let end = self.clip_offset(rng.gen_range(start_offset..=self.len()), Bias::Right);
+ let start = self.clip_offset(rng.gen_range(start_offset..=end), Bias::Right);
+ start..end
+ }
+}
+
+impl History {
+ fn start_transaction(&mut self, now: Instant) -> Option<TransactionId> {
+ self.transaction_depth += 1;
+ if self.transaction_depth == 1 {
+ let id = self.next_transaction_id.tick();
+ self.undo_stack.push(Transaction {
+ id,
+ buffer_transactions: Default::default(),
+ first_edit_at: now,
+ last_edit_at: now,
+ suppress_grouping: false,
+ });
+ Some(id)
+ } else {
+ None
+ }
+ }
+
+ fn end_transaction(
+ &mut self,
+ now: Instant,
+ buffer_transactions: HashMap<u64, TransactionId>,
+ ) -> bool {
+ assert_ne!(self.transaction_depth, 0);
+ self.transaction_depth -= 1;
+ if self.transaction_depth == 0 {
+ if buffer_transactions.is_empty() {
+ self.undo_stack.pop();
+ false
+ } else {
+ self.redo_stack.clear();
+ let transaction = self.undo_stack.last_mut().unwrap();
+ transaction.last_edit_at = now;
+ for (buffer_id, transaction_id) in buffer_transactions {
+ transaction
+ .buffer_transactions
+ .entry(buffer_id)
+ .or_insert(transaction_id);
+ }
+ true
+ }
+ } else {
+ false
+ }
+ }
+
+ fn push_transaction<'a, T>(
+ &mut self,
+ buffer_transactions: T,
+ now: Instant,
+ cx: &mut ModelContext<MultiBuffer>,
+ ) where
+ T: IntoIterator<Item = (&'a Model<Buffer>, &'a language2::Transaction)>,
+ {
+ assert_eq!(self.transaction_depth, 0);
+ let transaction = Transaction {
+ id: self.next_transaction_id.tick(),
+ buffer_transactions: buffer_transactions
+ .into_iter()
+ .map(|(buffer, transaction)| (buffer.read(cx).remote_id(), transaction.id))
+ .collect(),
+ first_edit_at: now,
+ last_edit_at: now,
+ suppress_grouping: false,
+ };
+ if !transaction.buffer_transactions.is_empty() {
+ self.undo_stack.push(transaction);
+ self.redo_stack.clear();
+ }
+ }
+
+ fn finalize_last_transaction(&mut self) {
+ if let Some(transaction) = self.undo_stack.last_mut() {
+ transaction.suppress_grouping = true;
+ }
+ }
+
+ fn forget(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
+ if let Some(ix) = self
+ .undo_stack
+ .iter()
+ .rposition(|transaction| transaction.id == transaction_id)
+ {
+ Some(self.undo_stack.remove(ix))
+ } else if let Some(ix) = self
+ .redo_stack
+ .iter()
+ .rposition(|transaction| transaction.id == transaction_id)
+ {
+ Some(self.redo_stack.remove(ix))
+ } else {
+ None
+ }
+ }
+
+ fn transaction_mut(&mut self, transaction_id: TransactionId) -> Option<&mut Transaction> {
+ self.undo_stack
+ .iter_mut()
+ .find(|transaction| transaction.id == transaction_id)
+ .or_else(|| {
+ self.redo_stack
+ .iter_mut()
+ .find(|transaction| transaction.id == transaction_id)
+ })
+ }
+
+ fn pop_undo(&mut self) -> Option<&mut Transaction> {
+ assert_eq!(self.transaction_depth, 0);
+ if let Some(transaction) = self.undo_stack.pop() {
+ self.redo_stack.push(transaction);
+ self.redo_stack.last_mut()
+ } else {
+ None
+ }
+ }
+
+ fn pop_redo(&mut self) -> Option<&mut Transaction> {
+ assert_eq!(self.transaction_depth, 0);
+ if let Some(transaction) = self.redo_stack.pop() {
+ self.undo_stack.push(transaction);
+ self.undo_stack.last_mut()
+ } else {
+ None
+ }
+ }
+
+ fn remove_from_undo(&mut self, transaction_id: TransactionId) -> Option<&Transaction> {
+ let ix = self
+ .undo_stack
+ .iter()
+ .rposition(|transaction| transaction.id == transaction_id)?;
+ let transaction = self.undo_stack.remove(ix);
+ self.redo_stack.push(transaction);
+ self.redo_stack.last()
+ }
+
+ fn group(&mut self) -> Option<TransactionId> {
+ let mut count = 0;
+ let mut transactions = self.undo_stack.iter();
+ if let Some(mut transaction) = transactions.next_back() {
+ while let Some(prev_transaction) = transactions.next_back() {
+ if !prev_transaction.suppress_grouping
+ && transaction.first_edit_at - prev_transaction.last_edit_at
+ <= self.group_interval
+ {
+ transaction = prev_transaction;
+ count += 1;
+ } else {
+ break;
+ }
+ }
+ }
+ self.group_trailing(count)
+ }
+
+ fn group_until(&mut self, transaction_id: TransactionId) {
+ let mut count = 0;
+ for transaction in self.undo_stack.iter().rev() {
+ if transaction.id == transaction_id {
+ self.group_trailing(count);
+ break;
+ } else if transaction.suppress_grouping {
+ break;
+ } else {
+ count += 1;
+ }
+ }
+ }
+
+ fn group_trailing(&mut self, n: usize) -> Option<TransactionId> {
+ let new_len = self.undo_stack.len() - n;
+ let (transactions_to_keep, transactions_to_merge) = self.undo_stack.split_at_mut(new_len);
+ if let Some(last_transaction) = transactions_to_keep.last_mut() {
+ if let Some(transaction) = transactions_to_merge.last() {
+ last_transaction.last_edit_at = transaction.last_edit_at;
+ }
+ for to_merge in transactions_to_merge {
+ for (buffer_id, transaction_id) in &to_merge.buffer_transactions {
+ last_transaction
+ .buffer_transactions
+ .entry(*buffer_id)
+ .or_insert(*transaction_id);
+ }
+ }
+ }
+
+ self.undo_stack.truncate(new_len);
+ self.undo_stack.last().map(|t| t.id)
+ }
+}
+
+impl Excerpt {
+ fn new(
+ id: ExcerptId,
+ locator: Locator,
+ buffer_id: u64,
+ buffer: BufferSnapshot,
+ range: ExcerptRange<text::Anchor>,
+ has_trailing_newline: bool,
+ ) -> Self {
+ Excerpt {
+ id,
+ locator,
+ max_buffer_row: range.context.end.to_point(&buffer).row,
+ text_summary: buffer
+ .text_summary_for_range::<TextSummary, _>(range.context.to_offset(&buffer)),
+ buffer_id,
+ buffer,
+ range,
+ has_trailing_newline,
+ }
+ }
+
+ fn chunks_in_range(&self, range: Range<usize>, language_aware: bool) -> ExcerptChunks {
+ let content_start = self.range.context.start.to_offset(&self.buffer);
+ let chunks_start = content_start + range.start;
+ let chunks_end = content_start + cmp::min(range.end, self.text_summary.len);
+
+ let footer_height = if self.has_trailing_newline
+ && range.start <= self.text_summary.len
+ && range.end > self.text_summary.len
+ {
+ 1
+ } else {
+ 0
+ };
+
+ let content_chunks = self.buffer.chunks(chunks_start..chunks_end, language_aware);
+
+ ExcerptChunks {
+ content_chunks,
+ footer_height,
+ }
+ }
+
+ fn bytes_in_range(&self, range: Range<usize>) -> ExcerptBytes {
+ let content_start = self.range.context.start.to_offset(&self.buffer);
+ let bytes_start = content_start + range.start;
+ let bytes_end = content_start + cmp::min(range.end, self.text_summary.len);
+ let footer_height = if self.has_trailing_newline
+ && range.start <= self.text_summary.len
+ && range.end > self.text_summary.len
+ {
+ 1
+ } else {
+ 0
+ };
+ let content_bytes = self.buffer.bytes_in_range(bytes_start..bytes_end);
+
+ ExcerptBytes {
+ content_bytes,
+ footer_height,
+ }
+ }
+
+ fn reversed_bytes_in_range(&self, range: Range<usize>) -> ExcerptBytes {
+ let content_start = self.range.context.start.to_offset(&self.buffer);
+ let bytes_start = content_start + range.start;
+ let bytes_end = content_start + cmp::min(range.end, self.text_summary.len);
+ let footer_height = if self.has_trailing_newline
+ && range.start <= self.text_summary.len
+ && range.end > self.text_summary.len
+ {
+ 1
+ } else {
+ 0
+ };
+ let content_bytes = self.buffer.reversed_bytes_in_range(bytes_start..bytes_end);
+
+ ExcerptBytes {
+ content_bytes,
+ footer_height,
+ }
+ }
+
+ fn clip_anchor(&self, text_anchor: text::Anchor) -> text::Anchor {
+ if text_anchor
+ .cmp(&self.range.context.start, &self.buffer)
+ .is_lt()
+ {
+ self.range.context.start
+ } else if text_anchor
+ .cmp(&self.range.context.end, &self.buffer)
+ .is_gt()
+ {
+ self.range.context.end
+ } else {
+ text_anchor
+ }
+ }
+
+ fn contains(&self, anchor: &Anchor) -> bool {
+ Some(self.buffer_id) == anchor.buffer_id
+ && self
+ .range
+ .context
+ .start
+ .cmp(&anchor.text_anchor, &self.buffer)
+ .is_le()
+ && self
+ .range
+ .context
+ .end
+ .cmp(&anchor.text_anchor, &self.buffer)
+ .is_ge()
+ }
+}
+
+impl ExcerptId {
+ pub fn min() -> Self {
+ Self(0)
+ }
+
+ pub fn max() -> Self {
+ Self(usize::MAX)
+ }
+
+ pub fn to_proto(&self) -> u64 {
+ self.0 as _
+ }
+
+ pub fn from_proto(proto: u64) -> Self {
+ Self(proto as _)
+ }
+
+ pub fn cmp(&self, other: &Self, snapshot: &MultiBufferSnapshot) -> cmp::Ordering {
+ let a = snapshot.excerpt_locator_for_id(*self);
+ let b = snapshot.excerpt_locator_for_id(*other);
+ a.cmp(&b).then_with(|| self.0.cmp(&other.0))
+ }
+}
+
+impl Into<usize> for ExcerptId {
+ fn into(self) -> usize {
+ self.0
+ }
+}
+
+impl fmt::Debug for Excerpt {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("Excerpt")
+ .field("id", &self.id)
+ .field("locator", &self.locator)
+ .field("buffer_id", &self.buffer_id)
+ .field("range", &self.range)
+ .field("text_summary", &self.text_summary)
+ .field("has_trailing_newline", &self.has_trailing_newline)
+ .finish()
+ }
+}
+
+impl sum_tree::Item for Excerpt {
+ type Summary = ExcerptSummary;
+
+ fn summary(&self) -> Self::Summary {
+ let mut text = self.text_summary.clone();
+ if self.has_trailing_newline {
+ text += TextSummary::from("\n");
+ }
+ ExcerptSummary {
+ excerpt_id: self.id,
+ excerpt_locator: self.locator.clone(),
+ max_buffer_row: self.max_buffer_row,
+ text,
+ }
+ }
+}
+
+impl sum_tree::Item for ExcerptIdMapping {
+ type Summary = ExcerptId;
+
+ fn summary(&self) -> Self::Summary {
+ self.id
+ }
+}
+
+impl sum_tree::KeyedItem for ExcerptIdMapping {
+ type Key = ExcerptId;
+
+ fn key(&self) -> Self::Key {
+ self.id
+ }
+}
+
+impl sum_tree::Summary for ExcerptId {
+ type Context = ();
+
+ fn add_summary(&mut self, other: &Self, _: &()) {
+ *self = *other;
+ }
+}
+
+impl sum_tree::Summary for ExcerptSummary {
+ type Context = ();
+
+ fn add_summary(&mut self, summary: &Self, _: &()) {
+ debug_assert!(summary.excerpt_locator > self.excerpt_locator);
+ self.excerpt_locator = summary.excerpt_locator.clone();
+ self.text.add_summary(&summary.text, &());
+ self.max_buffer_row = cmp::max(self.max_buffer_row, summary.max_buffer_row);
+ }
+}
+
+impl<'a> sum_tree::Dimension<'a, ExcerptSummary> for TextSummary {
+ fn add_summary(&mut self, summary: &'a ExcerptSummary, _: &()) {
+ *self += &summary.text;
+ }
+}
+
+impl<'a> sum_tree::Dimension<'a, ExcerptSummary> for usize {
+ fn add_summary(&mut self, summary: &'a ExcerptSummary, _: &()) {
+ *self += summary.text.len;
+ }
+}
+
+impl<'a> sum_tree::SeekTarget<'a, ExcerptSummary, ExcerptSummary> for usize {
+ fn cmp(&self, cursor_location: &ExcerptSummary, _: &()) -> cmp::Ordering {
+ Ord::cmp(self, &cursor_location.text.len)
+ }
+}
+
+impl<'a> sum_tree::SeekTarget<'a, ExcerptSummary, Option<&'a Locator>> for Locator {
+ fn cmp(&self, cursor_location: &Option<&'a Locator>, _: &()) -> cmp::Ordering {
+ Ord::cmp(&Some(self), cursor_location)
+ }
+}
+
+impl<'a> sum_tree::SeekTarget<'a, ExcerptSummary, ExcerptSummary> for Locator {
+ fn cmp(&self, cursor_location: &ExcerptSummary, _: &()) -> cmp::Ordering {
+ Ord::cmp(self, &cursor_location.excerpt_locator)
+ }
+}
+
+impl<'a> sum_tree::Dimension<'a, ExcerptSummary> for OffsetUtf16 {
+ fn add_summary(&mut self, summary: &'a ExcerptSummary, _: &()) {
+ *self += summary.text.len_utf16;
+ }
+}
+
+impl<'a> sum_tree::Dimension<'a, ExcerptSummary> for Point {
+ fn add_summary(&mut self, summary: &'a ExcerptSummary, _: &()) {
+ *self += summary.text.lines;
+ }
+}
+
+impl<'a> sum_tree::Dimension<'a, ExcerptSummary> for PointUtf16 {
+ fn add_summary(&mut self, summary: &'a ExcerptSummary, _: &()) {
+ *self += summary.text.lines_utf16()
+ }
+}
+
+impl<'a> sum_tree::Dimension<'a, ExcerptSummary> for Option<&'a Locator> {
+ fn add_summary(&mut self, summary: &'a ExcerptSummary, _: &()) {
+ *self = Some(&summary.excerpt_locator);
+ }
+}
+
+impl<'a> sum_tree::Dimension<'a, ExcerptSummary> for Option<ExcerptId> {
+ fn add_summary(&mut self, summary: &'a ExcerptSummary, _: &()) {
+ *self = Some(summary.excerpt_id);
+ }
+}
+
+impl<'a> MultiBufferRows<'a> {
+ pub fn seek(&mut self, row: u32) {
+ self.buffer_row_range = 0..0;
+
+ self.excerpts
+ .seek_forward(&Point::new(row, 0), Bias::Right, &());
+ if self.excerpts.item().is_none() {
+ self.excerpts.prev(&());
+
+ if self.excerpts.item().is_none() && row == 0 {
+ self.buffer_row_range = 0..1;
+ return;
+ }
+ }
+
+ if let Some(excerpt) = self.excerpts.item() {
+ let overshoot = row - self.excerpts.start().row;
+ let excerpt_start = excerpt.range.context.start.to_point(&excerpt.buffer).row;
+ self.buffer_row_range.start = excerpt_start + overshoot;
+ self.buffer_row_range.end = excerpt_start + excerpt.text_summary.lines.row + 1;
+ }
+ }
+}
+
+impl<'a> Iterator for MultiBufferRows<'a> {
+ type Item = Option<u32>;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ loop {
+ if !self.buffer_row_range.is_empty() {
+ let row = Some(self.buffer_row_range.start);
+ self.buffer_row_range.start += 1;
+ return Some(row);
+ }
+ self.excerpts.item()?;
+ self.excerpts.next(&());
+ let excerpt = self.excerpts.item()?;
+ self.buffer_row_range.start = excerpt.range.context.start.to_point(&excerpt.buffer).row;
+ self.buffer_row_range.end =
+ self.buffer_row_range.start + excerpt.text_summary.lines.row + 1;
+ }
+ }
+}
+
+impl<'a> MultiBufferChunks<'a> {
+ pub fn offset(&self) -> usize {
+ self.range.start
+ }
+
+ pub fn seek(&mut self, offset: usize) {
+ self.range.start = offset;
+ self.excerpts.seek(&offset, Bias::Right, &());
+ if let Some(excerpt) = self.excerpts.item() {
+ self.excerpt_chunks = Some(excerpt.chunks_in_range(
+ self.range.start - self.excerpts.start()..self.range.end - self.excerpts.start(),
+ self.language_aware,
+ ));
+ } else {
+ self.excerpt_chunks = None;
+ }
+ }
+}
+
+impl<'a> Iterator for MultiBufferChunks<'a> {
+ type Item = Chunk<'a>;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ if self.range.is_empty() {
+ None
+ } else if let Some(chunk) = self.excerpt_chunks.as_mut()?.next() {
+ self.range.start += chunk.text.len();
+ Some(chunk)
+ } else {
+ self.excerpts.next(&());
+ let excerpt = self.excerpts.item()?;
+ self.excerpt_chunks = Some(excerpt.chunks_in_range(
+ 0..self.range.end - self.excerpts.start(),
+ self.language_aware,
+ ));
+ self.next()
+ }
+ }
+}
+
+impl<'a> MultiBufferBytes<'a> {
+ fn consume(&mut self, len: usize) {
+ self.range.start += len;
+ self.chunk = &self.chunk[len..];
+
+ if !self.range.is_empty() && self.chunk.is_empty() {
+ if let Some(chunk) = self.excerpt_bytes.as_mut().and_then(|bytes| bytes.next()) {
+ self.chunk = chunk;
+ } else {
+ self.excerpts.next(&());
+ if let Some(excerpt) = self.excerpts.item() {
+ let mut excerpt_bytes =
+ excerpt.bytes_in_range(0..self.range.end - self.excerpts.start());
+ self.chunk = excerpt_bytes.next().unwrap();
+ self.excerpt_bytes = Some(excerpt_bytes);
+ }
+ }
+ }
+ }
+}
+
+impl<'a> Iterator for MultiBufferBytes<'a> {
+ type Item = &'a [u8];
+
+ fn next(&mut self) -> Option<Self::Item> {
+ let chunk = self.chunk;
+ if chunk.is_empty() {
+ None
+ } else {
+ self.consume(chunk.len());
+ Some(chunk)
+ }
+ }
+}
+
+impl<'a> io::Read for MultiBufferBytes<'a> {
+ fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
+ let len = cmp::min(buf.len(), self.chunk.len());
+ buf[..len].copy_from_slice(&self.chunk[..len]);
+ if len > 0 {
+ self.consume(len);
+ }
+ Ok(len)
+ }
+}
+
+impl<'a> ReversedMultiBufferBytes<'a> {
+ fn consume(&mut self, len: usize) {
+ self.range.end -= len;
+ self.chunk = &self.chunk[..self.chunk.len() - len];
+
+ if !self.range.is_empty() && self.chunk.is_empty() {
+ if let Some(chunk) = self.excerpt_bytes.as_mut().and_then(|bytes| bytes.next()) {
+ self.chunk = chunk;
+ } else {
+ self.excerpts.next(&());
+ if let Some(excerpt) = self.excerpts.item() {
+ let mut excerpt_bytes =
+ excerpt.bytes_in_range(0..self.range.end - self.excerpts.start());
+ self.chunk = excerpt_bytes.next().unwrap();
+ self.excerpt_bytes = Some(excerpt_bytes);
+ }
+ }
+ }
+ }
+}
+
+impl<'a> io::Read for ReversedMultiBufferBytes<'a> {
+ fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
+ let len = cmp::min(buf.len(), self.chunk.len());
+ buf[..len].copy_from_slice(&self.chunk[..len]);
+ buf[..len].reverse();
+ if len > 0 {
+ self.consume(len);
+ }
+ Ok(len)
+ }
+}
+impl<'a> Iterator for ExcerptBytes<'a> {
+ type Item = &'a [u8];
+
+ fn next(&mut self) -> Option<Self::Item> {
+ if let Some(chunk) = self.content_bytes.next() {
+ if !chunk.is_empty() {
+ return Some(chunk);
+ }
+ }
+
+ if self.footer_height > 0 {
+ let result = &NEWLINES[..self.footer_height];
+ self.footer_height = 0;
+ return Some(result);
+ }
+
+ None
+ }
+}
+
+impl<'a> Iterator for ExcerptChunks<'a> {
+ type Item = Chunk<'a>;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ if let Some(chunk) = self.content_chunks.next() {
+ return Some(chunk);
+ }
+
+ if self.footer_height > 0 {
+ let text = unsafe { str::from_utf8_unchecked(&NEWLINES[..self.footer_height]) };
+ self.footer_height = 0;
+ return Some(Chunk {
+ text,
+ ..Default::default()
+ });
+ }
+
+ None
+ }
+}
+
+impl ToOffset for Point {
+ fn to_offset<'a>(&self, snapshot: &MultiBufferSnapshot) -> usize {
+ snapshot.point_to_offset(*self)
+ }
+}
+
+impl ToOffset for usize {
+ fn to_offset<'a>(&self, snapshot: &MultiBufferSnapshot) -> usize {
+ assert!(*self <= snapshot.len(), "offset is out of range");
+ *self
+ }
+}
+
+impl ToOffset for OffsetUtf16 {
+ fn to_offset<'a>(&self, snapshot: &MultiBufferSnapshot) -> usize {
+ snapshot.offset_utf16_to_offset(*self)
+ }
+}
+
+impl ToOffset for PointUtf16 {
+ fn to_offset<'a>(&self, snapshot: &MultiBufferSnapshot) -> usize {
+ snapshot.point_utf16_to_offset(*self)
+ }
+}
+
+impl ToOffsetUtf16 for OffsetUtf16 {
+ fn to_offset_utf16(&self, _snapshot: &MultiBufferSnapshot) -> OffsetUtf16 {
+ *self
+ }
+}
+
+impl ToOffsetUtf16 for usize {
+ fn to_offset_utf16(&self, snapshot: &MultiBufferSnapshot) -> OffsetUtf16 {
+ snapshot.offset_to_offset_utf16(*self)
+ }
+}
+
+impl ToPoint for usize {
+ fn to_point<'a>(&self, snapshot: &MultiBufferSnapshot) -> Point {
+ snapshot.offset_to_point(*self)
+ }
+}
+
+impl ToPoint for Point {
+ fn to_point<'a>(&self, _: &MultiBufferSnapshot) -> Point {
+ *self
+ }
+}
+
+impl ToPointUtf16 for usize {
+ fn to_point_utf16<'a>(&self, snapshot: &MultiBufferSnapshot) -> PointUtf16 {
+ snapshot.offset_to_point_utf16(*self)
+ }
+}
+
+impl ToPointUtf16 for Point {
+ fn to_point_utf16<'a>(&self, snapshot: &MultiBufferSnapshot) -> PointUtf16 {
+ snapshot.point_to_point_utf16(*self)
+ }
+}
+
+impl ToPointUtf16 for PointUtf16 {
+ fn to_point_utf16<'a>(&self, _: &MultiBufferSnapshot) -> PointUtf16 {
+ *self
+ }
+}
+
+fn build_excerpt_ranges<T>(
+ buffer: &BufferSnapshot,
+ ranges: &[Range<T>],
+ context_line_count: u32,
+) -> (Vec<ExcerptRange<Point>>, Vec<usize>)
+where
+ T: text::ToPoint,
+{
+ let max_point = buffer.max_point();
+ let mut range_counts = Vec::new();
+ let mut excerpt_ranges = Vec::new();
+ let mut range_iter = ranges
+ .iter()
+ .map(|range| range.start.to_point(buffer)..range.end.to_point(buffer))
+ .peekable();
+ while let Some(range) = range_iter.next() {
+ let excerpt_start = Point::new(range.start.row.saturating_sub(context_line_count), 0);
+ let mut excerpt_end = Point::new(range.end.row + 1 + context_line_count, 0).min(max_point);
+ let mut ranges_in_excerpt = 1;
+
+ while let Some(next_range) = range_iter.peek() {
+ if next_range.start.row <= excerpt_end.row + context_line_count {
+ excerpt_end =
+ Point::new(next_range.end.row + 1 + context_line_count, 0).min(max_point);
+ ranges_in_excerpt += 1;
+ range_iter.next();
+ } else {
+ break;
+ }
+ }
+
+ excerpt_ranges.push(ExcerptRange {
+ context: excerpt_start..excerpt_end,
+ primary: Some(range),
+ });
+ range_counts.push(ranges_in_excerpt);
+ }
+
+ (excerpt_ranges, range_counts)
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use futures::StreamExt;
+ use gpui2::{AppContext, Context, TestAppContext};
+ use language2::{Buffer, Rope};
+ use parking_lot::RwLock;
+ use rand::prelude::*;
+ use settings2::SettingsStore;
+ use std::env;
+ use util::test::sample_text;
+
+ #[gpui2::test]
+ fn test_singleton(cx: &mut AppContext) {
+ let buffer =
+ cx.build_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), sample_text(6, 6, 'a')));
+ let multibuffer = cx.build_model(|cx| MultiBuffer::singleton(buffer.clone(), cx));
+
+ let snapshot = multibuffer.read(cx).snapshot(cx);
+ assert_eq!(snapshot.text(), buffer.read(cx).text());
+
+ assert_eq!(
+ snapshot.buffer_rows(0).collect::<Vec<_>>(),
+ (0..buffer.read(cx).row_count())
+ .map(Some)
+ .collect::<Vec<_>>()
+ );
+
+ buffer.update(cx, |buffer, cx| buffer.edit([(1..3, "XXX\n")], None, cx));
+ let snapshot = multibuffer.read(cx).snapshot(cx);
+
+ assert_eq!(snapshot.text(), buffer.read(cx).text());
+ assert_eq!(
+ snapshot.buffer_rows(0).collect::<Vec<_>>(),
+ (0..buffer.read(cx).row_count())
+ .map(Some)
+ .collect::<Vec<_>>()
+ );
+ }
+
+ #[gpui2::test]
+ fn test_remote(cx: &mut AppContext) {
+ let host_buffer = cx.build_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), "a"));
+ let guest_buffer = cx.build_model(|cx| {
+ let state = host_buffer.read(cx).to_proto();
+ let ops = cx
+ .executor()
+ .block(host_buffer.read(cx).serialize_ops(None, cx));
+ let mut buffer = Buffer::from_proto(1, state, None).unwrap();
+ buffer
+ .apply_ops(
+ ops.into_iter()
+ .map(|op| language2::proto::deserialize_operation(op).unwrap()),
+ cx,
+ )
+ .unwrap();
+ buffer
+ });
+ let multibuffer = cx.build_model(|cx| MultiBuffer::singleton(guest_buffer.clone(), cx));
+ let snapshot = multibuffer.read(cx).snapshot(cx);
+ assert_eq!(snapshot.text(), "a");
+
+ guest_buffer.update(cx, |buffer, cx| buffer.edit([(1..1, "b")], None, cx));
+ let snapshot = multibuffer.read(cx).snapshot(cx);
+ assert_eq!(snapshot.text(), "ab");
+
+ guest_buffer.update(cx, |buffer, cx| buffer.edit([(2..2, "c")], None, cx));
+ let snapshot = multibuffer.read(cx).snapshot(cx);
+ assert_eq!(snapshot.text(), "abc");
+ }
+
+ #[gpui2::test]
+ fn test_excerpt_boundaries_and_clipping(cx: &mut AppContext) {
+ let buffer_1 =
+ cx.build_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), sample_text(6, 6, 'a')));
+ let buffer_2 =
+ cx.build_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), sample_text(6, 6, 'g')));
+ let multibuffer = cx.build_model(|_| MultiBuffer::new(0));
+
+ let events = Arc::new(RwLock::new(Vec::<Event>::new()));
+ multibuffer.update(cx, |_, cx| {
+ let events = events.clone();
+ cx.subscribe(&multibuffer, move |_, _, event, _| {
+ if let Event::Edited { .. } = event {
+ events.write().push(event.clone())
+ }
+ })
+ .detach();
+ });
+
+ let subscription = multibuffer.update(cx, |multibuffer, cx| {
+ let subscription = multibuffer.subscribe();
+ multibuffer.push_excerpts(
+ buffer_1.clone(),
+ [ExcerptRange {
+ context: Point::new(1, 2)..Point::new(2, 5),
+ primary: None,
+ }],
+ cx,
+ );
+ assert_eq!(
+ subscription.consume().into_inner(),
+ [Edit {
+ old: 0..0,
+ new: 0..10
+ }]
+ );
+
+ multibuffer.push_excerpts(
+ buffer_1.clone(),
+ [ExcerptRange {
+ context: Point::new(3, 3)..Point::new(4, 4),
+ primary: None,
+ }],
+ cx,
+ );
+ multibuffer.push_excerpts(
+ buffer_2.clone(),
+ [ExcerptRange {
+ context: Point::new(3, 1)..Point::new(3, 3),
+ primary: None,
+ }],
+ cx,
+ );
+ assert_eq!(
+ subscription.consume().into_inner(),
+ [Edit {
+ old: 10..10,
+ new: 10..22
+ }]
+ );
+
+ subscription
+ });
+
+ // Adding excerpts emits an edited event.
+ assert_eq!(
+ events.read().as_slice(),
+ &[
+ Event::Edited {
+ sigleton_buffer_edited: false
+ },
+ Event::Edited {
+ sigleton_buffer_edited: false
+ },
+ Event::Edited {
+ sigleton_buffer_edited: false
+ }
+ ]
+ );
+
+ let snapshot = multibuffer.read(cx).snapshot(cx);
+ assert_eq!(
+ snapshot.text(),
+ concat!(
+ "bbbb\n", // Preserve newlines
+ "ccccc\n", //
+ "ddd\n", //
+ "eeee\n", //
+ "jj" //
+ )
+ );
+ assert_eq!(
+ snapshot.buffer_rows(0).collect::<Vec<_>>(),
+ [Some(1), Some(2), Some(3), Some(4), Some(3)]
+ );
+ assert_eq!(
+ snapshot.buffer_rows(2).collect::<Vec<_>>(),
+ [Some(3), Some(4), Some(3)]
+ );
+ assert_eq!(snapshot.buffer_rows(4).collect::<Vec<_>>(), [Some(3)]);
+ assert_eq!(snapshot.buffer_rows(5).collect::<Vec<_>>(), []);
+
+ assert_eq!(
+ boundaries_in_range(Point::new(0, 0)..Point::new(4, 2), &snapshot),
+ &[
+ (0, "bbbb\nccccc".to_string(), true),
+ (2, "ddd\neeee".to_string(), false),
+ (4, "jj".to_string(), true),
+ ]
+ );
+ assert_eq!(
+ boundaries_in_range(Point::new(0, 0)..Point::new(2, 0), &snapshot),
+ &[(0, "bbbb\nccccc".to_string(), true)]
+ );
+ assert_eq!(
+ boundaries_in_range(Point::new(1, 0)..Point::new(1, 5), &snapshot),
+ &[]
+ );
+ assert_eq!(
+ boundaries_in_range(Point::new(1, 0)..Point::new(2, 0), &snapshot),
+ &[]
+ );
+ assert_eq!(
+ boundaries_in_range(Point::new(1, 0)..Point::new(4, 0), &snapshot),
+ &[(2, "ddd\neeee".to_string(), false)]
+ );
+ assert_eq!(
+ boundaries_in_range(Point::new(1, 0)..Point::new(4, 0), &snapshot),
+ &[(2, "ddd\neeee".to_string(), false)]
+ );
+ assert_eq!(
+ boundaries_in_range(Point::new(2, 0)..Point::new(3, 0), &snapshot),
+ &[(2, "ddd\neeee".to_string(), false)]
+ );
+ assert_eq!(
+ boundaries_in_range(Point::new(4, 0)..Point::new(4, 2), &snapshot),
+ &[(4, "jj".to_string(), true)]
+ );
+ assert_eq!(
+ boundaries_in_range(Point::new(4, 2)..Point::new(4, 2), &snapshot),
+ &[]
+ );
+
+ buffer_1.update(cx, |buffer, cx| {
+ let text = "\n";
+ buffer.edit(
+ [
+ (Point::new(0, 0)..Point::new(0, 0), text),
+ (Point::new(2, 1)..Point::new(2, 3), text),
+ ],
+ None,
+ cx,
+ );
+ });
+
+ let snapshot = multibuffer.read(cx).snapshot(cx);
+ assert_eq!(
+ snapshot.text(),
+ concat!(
+ "bbbb\n", // Preserve newlines
+ "c\n", //
+ "cc\n", //
+ "ddd\n", //
+ "eeee\n", //
+ "jj" //
+ )
+ );
+
+ assert_eq!(
+ subscription.consume().into_inner(),
+ [Edit {
+ old: 6..8,
+ new: 6..7
+ }]
+ );
+
+ let snapshot = multibuffer.read(cx).snapshot(cx);
+ assert_eq!(
+ snapshot.clip_point(Point::new(0, 5), Bias::Left),
+ Point::new(0, 4)
+ );
+ assert_eq!(
+ snapshot.clip_point(Point::new(0, 5), Bias::Right),
+ Point::new(0, 4)
+ );
+ assert_eq!(
+ snapshot.clip_point(Point::new(5, 1), Bias::Right),
+ Point::new(5, 1)
+ );
+ assert_eq!(
+ snapshot.clip_point(Point::new(5, 2), Bias::Right),
+ Point::new(5, 2)
+ );
+ assert_eq!(
+ snapshot.clip_point(Point::new(5, 3), Bias::Right),
+ Point::new(5, 2)
+ );
+
+ let snapshot = multibuffer.update(cx, |multibuffer, cx| {
+ let (buffer_2_excerpt_id, _) =
+ multibuffer.excerpts_for_buffer(&buffer_2, cx)[0].clone();
+ multibuffer.remove_excerpts([buffer_2_excerpt_id], cx);
+ multibuffer.snapshot(cx)
+ });
+
+ assert_eq!(
+ snapshot.text(),
+ concat!(
+ "bbbb\n", // Preserve newlines
+ "c\n", //
+ "cc\n", //
+ "ddd\n", //
+ "eeee", //
+ )
+ );
+
+ fn boundaries_in_range(
+ range: Range<Point>,
+ snapshot: &MultiBufferSnapshot,
+ ) -> Vec<(u32, String, bool)> {
+ snapshot
+ .excerpt_boundaries_in_range(range)
+ .map(|boundary| {
+ (
+ boundary.row,
+ boundary
+ .buffer
+ .text_for_range(boundary.range.context)
+ .collect::<String>(),
+ boundary.starts_new_buffer,
+ )
+ })
+ .collect::<Vec<_>>()
+ }
+ }
+
+ #[gpui2::test]
+ fn test_excerpt_events(cx: &mut AppContext) {
+ let buffer_1 =
+ cx.build_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), sample_text(10, 3, 'a')));
+ let buffer_2 =
+ cx.build_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), sample_text(10, 3, 'm')));
+
+ let leader_multibuffer = cx.build_model(|_| MultiBuffer::new(0));
+ let follower_multibuffer = cx.build_model(|_| MultiBuffer::new(0));
+ let follower_edit_event_count = Arc::new(RwLock::new(0));
+
+ follower_multibuffer.update(cx, |_, cx| {
+ let follower_edit_event_count = follower_edit_event_count.clone();
+ cx.subscribe(
+ &leader_multibuffer,
+ move |follower, _, event, cx| match event.clone() {
+ Event::ExcerptsAdded {
+ buffer,
+ predecessor,
+ excerpts,
+ } => follower.insert_excerpts_with_ids_after(predecessor, buffer, excerpts, cx),
+ Event::ExcerptsRemoved { ids } => follower.remove_excerpts(ids, cx),
+ Event::Edited { .. } => {
+ *follower_edit_event_count.write() += 1;
+ }
+ _ => {}
+ },
+ )
+ .detach();
+ });
+
+ leader_multibuffer.update(cx, |leader, cx| {
+ leader.push_excerpts(
+ buffer_1.clone(),
+ [
+ ExcerptRange {
+ context: 0..8,
+ primary: None,
+ },
+ ExcerptRange {
+ context: 12..16,
+ primary: None,
+ },
+ ],
+ cx,
+ );
+ leader.insert_excerpts_after(
+ leader.excerpt_ids()[0],
+ buffer_2.clone(),
+ [
+ ExcerptRange {
+ context: 0..5,
+ primary: None,
+ },
+ ExcerptRange {
+ context: 10..15,
+ primary: None,
+ },
+ ],
+ cx,
+ )
+ });
+ assert_eq!(
+ leader_multibuffer.read(cx).snapshot(cx).text(),
+ follower_multibuffer.read(cx).snapshot(cx).text(),
+ );
+ assert_eq!(*follower_edit_event_count.read(), 2);
+
+ leader_multibuffer.update(cx, |leader, cx| {
+ let excerpt_ids = leader.excerpt_ids();
+ leader.remove_excerpts([excerpt_ids[1], excerpt_ids[3]], cx);
+ });
+ assert_eq!(
+ leader_multibuffer.read(cx).snapshot(cx).text(),
+ follower_multibuffer.read(cx).snapshot(cx).text(),
+ );
+ assert_eq!(*follower_edit_event_count.read(), 3);
+
+ // Removing an empty set of excerpts is a noop.
+ leader_multibuffer.update(cx, |leader, cx| {
+ leader.remove_excerpts([], cx);
+ });
+ assert_eq!(
+ leader_multibuffer.read(cx).snapshot(cx).text(),
+ follower_multibuffer.read(cx).snapshot(cx).text(),
+ );
+ assert_eq!(*follower_edit_event_count.read(), 3);
+
+ // Adding an empty set of excerpts is a noop.
+ leader_multibuffer.update(cx, |leader, cx| {
+ leader.push_excerpts::<usize>(buffer_2.clone(), [], cx);
+ });
+ assert_eq!(
+ leader_multibuffer.read(cx).snapshot(cx).text(),
+ follower_multibuffer.read(cx).snapshot(cx).text(),
+ );
+ assert_eq!(*follower_edit_event_count.read(), 3);
+
+ leader_multibuffer.update(cx, |leader, cx| {
+ leader.clear(cx);
+ });
+ assert_eq!(
+ leader_multibuffer.read(cx).snapshot(cx).text(),
+ follower_multibuffer.read(cx).snapshot(cx).text(),
+ );
+ assert_eq!(*follower_edit_event_count.read(), 4);
+ }
+
+ #[gpui2::test]
+ fn test_push_excerpts_with_context_lines(cx: &mut AppContext) {
+ let buffer =
+ cx.build_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), sample_text(20, 3, 'a')));
+ let multibuffer = cx.build_model(|_| MultiBuffer::new(0));
+ let anchor_ranges = multibuffer.update(cx, |multibuffer, cx| {
+ multibuffer.push_excerpts_with_context_lines(
+ buffer.clone(),
+ vec![
+ Point::new(3, 2)..Point::new(4, 2),
+ Point::new(7, 1)..Point::new(7, 3),
+ Point::new(15, 0)..Point::new(15, 0),
+ ],
+ 2,
+ cx,
+ )
+ });
+
+ let snapshot = multibuffer.read(cx).snapshot(cx);
+ assert_eq!(
+ snapshot.text(),
+ "bbb\nccc\nddd\neee\nfff\nggg\nhhh\niii\njjj\n\nnnn\nooo\nppp\nqqq\nrrr\n"
+ );
+
+ assert_eq!(
+ anchor_ranges
+ .iter()
+ .map(|range| range.to_point(&snapshot))
+ .collect::<Vec<_>>(),
+ vec![
+ Point::new(2, 2)..Point::new(3, 2),
+ Point::new(6, 1)..Point::new(6, 3),
+ Point::new(12, 0)..Point::new(12, 0)
+ ]
+ );
+ }
+
+ #[gpui2::test]
+ async fn test_stream_excerpts_with_context_lines(cx: &mut TestAppContext) {
+ let buffer =
+ cx.build_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), sample_text(20, 3, 'a')));
+ let multibuffer = cx.build_model(|_| MultiBuffer::new(0));
+ let anchor_ranges = multibuffer.update(cx, |multibuffer, cx| {
+ let snapshot = buffer.read(cx);
+ let ranges = vec![
+ snapshot.anchor_before(Point::new(3, 2))..snapshot.anchor_before(Point::new(4, 2)),
+ snapshot.anchor_before(Point::new(7, 1))..snapshot.anchor_before(Point::new(7, 3)),
+ snapshot.anchor_before(Point::new(15, 0))
+ ..snapshot.anchor_before(Point::new(15, 0)),
+ ];
+ multibuffer.stream_excerpts_with_context_lines(buffer.clone(), ranges, 2, cx)
+ });
+
+ let anchor_ranges = anchor_ranges.collect::<Vec<_>>().await;
+
+ let snapshot = multibuffer.update(cx, |multibuffer, cx| multibuffer.snapshot(cx));
+ assert_eq!(
+ snapshot.text(),
+ "bbb\nccc\nddd\neee\nfff\nggg\nhhh\niii\njjj\n\nnnn\nooo\nppp\nqqq\nrrr\n"
+ );
+
+ assert_eq!(
+ anchor_ranges
+ .iter()
+ .map(|range| range.to_point(&snapshot))
+ .collect::<Vec<_>>(),
+ vec![
+ Point::new(2, 2)..Point::new(3, 2),
+ Point::new(6, 1)..Point::new(6, 3),
+ Point::new(12, 0)..Point::new(12, 0)
+ ]
+ );
+ }
+
+ #[gpui2::test]
+ fn test_empty_multibuffer(cx: &mut AppContext) {
+ let multibuffer = cx.build_model(|_| MultiBuffer::new(0));
+
+ let snapshot = multibuffer.read(cx).snapshot(cx);
+ assert_eq!(snapshot.text(), "");
+ assert_eq!(snapshot.buffer_rows(0).collect::<Vec<_>>(), &[Some(0)]);
+ assert_eq!(snapshot.buffer_rows(1).collect::<Vec<_>>(), &[]);
+ }
+
+ #[gpui2::test]
+ fn test_singleton_multibuffer_anchors(cx: &mut AppContext) {
+ let buffer = cx.build_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), "abcd"));
+ let multibuffer = cx.build_model(|cx| MultiBuffer::singleton(buffer.clone(), cx));
+ let old_snapshot = multibuffer.read(cx).snapshot(cx);
+ buffer.update(cx, |buffer, cx| {
+ buffer.edit([(0..0, "X")], None, cx);
+ buffer.edit([(5..5, "Y")], None, cx);
+ });
+ let new_snapshot = multibuffer.read(cx).snapshot(cx);
+
+ assert_eq!(old_snapshot.text(), "abcd");
+ assert_eq!(new_snapshot.text(), "XabcdY");
+
+ assert_eq!(old_snapshot.anchor_before(0).to_offset(&new_snapshot), 0);
+ assert_eq!(old_snapshot.anchor_after(0).to_offset(&new_snapshot), 1);
+ assert_eq!(old_snapshot.anchor_before(4).to_offset(&new_snapshot), 5);
+ assert_eq!(old_snapshot.anchor_after(4).to_offset(&new_snapshot), 6);
+ }
+
+ #[gpui2::test]
+ fn test_multibuffer_anchors(cx: &mut AppContext) {
+ let buffer_1 = cx.build_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), "abcd"));
+ let buffer_2 = cx.build_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), "efghi"));
+ let multibuffer = cx.build_model(|cx| {
+ let mut multibuffer = MultiBuffer::new(0);
+ multibuffer.push_excerpts(
+ buffer_1.clone(),
+ [ExcerptRange {
+ context: 0..4,
+ primary: None,
+ }],
+ cx,
+ );
+ multibuffer.push_excerpts(
+ buffer_2.clone(),
+ [ExcerptRange {
+ context: 0..5,
+ primary: None,
+ }],
+ cx,
+ );
+ multibuffer
+ });
+ let old_snapshot = multibuffer.read(cx).snapshot(cx);
+
+ assert_eq!(old_snapshot.anchor_before(0).to_offset(&old_snapshot), 0);
+ assert_eq!(old_snapshot.anchor_after(0).to_offset(&old_snapshot), 0);
+ assert_eq!(Anchor::min().to_offset(&old_snapshot), 0);
+ assert_eq!(Anchor::min().to_offset(&old_snapshot), 0);
+ assert_eq!(Anchor::max().to_offset(&old_snapshot), 10);
+ assert_eq!(Anchor::max().to_offset(&old_snapshot), 10);
+
+ buffer_1.update(cx, |buffer, cx| {
+ buffer.edit([(0..0, "W")], None, cx);
+ buffer.edit([(5..5, "X")], None, cx);
+ });
+ buffer_2.update(cx, |buffer, cx| {
+ buffer.edit([(0..0, "Y")], None, cx);
+ buffer.edit([(6..6, "Z")], None, cx);
+ });
+ let new_snapshot = multibuffer.read(cx).snapshot(cx);
+
+ assert_eq!(old_snapshot.text(), "abcd\nefghi");
+ assert_eq!(new_snapshot.text(), "WabcdX\nYefghiZ");
+
+ assert_eq!(old_snapshot.anchor_before(0).to_offset(&new_snapshot), 0);
+ assert_eq!(old_snapshot.anchor_after(0).to_offset(&new_snapshot), 1);
+ assert_eq!(old_snapshot.anchor_before(1).to_offset(&new_snapshot), 2);
+ assert_eq!(old_snapshot.anchor_after(1).to_offset(&new_snapshot), 2);
+ assert_eq!(old_snapshot.anchor_before(2).to_offset(&new_snapshot), 3);
+ assert_eq!(old_snapshot.anchor_after(2).to_offset(&new_snapshot), 3);
+ assert_eq!(old_snapshot.anchor_before(5).to_offset(&new_snapshot), 7);
+ assert_eq!(old_snapshot.anchor_after(5).to_offset(&new_snapshot), 8);
+ assert_eq!(old_snapshot.anchor_before(10).to_offset(&new_snapshot), 13);
+ assert_eq!(old_snapshot.anchor_after(10).to_offset(&new_snapshot), 14);
+ }
+
+ #[gpui2::test]
+ fn test_resolving_anchors_after_replacing_their_excerpts(cx: &mut AppContext) {
+ let buffer_1 = cx.build_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), "abcd"));
+ let buffer_2 =
+ cx.build_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), "ABCDEFGHIJKLMNOP"));
+ let multibuffer = cx.build_model(|_| MultiBuffer::new(0));
+
+ // Create an insertion id in buffer 1 that doesn't exist in buffer 2.
+ // Add an excerpt from buffer 1 that spans this new insertion.
+ buffer_1.update(cx, |buffer, cx| buffer.edit([(4..4, "123")], None, cx));
+ let excerpt_id_1 = multibuffer.update(cx, |multibuffer, cx| {
+ multibuffer
+ .push_excerpts(
+ buffer_1.clone(),
+ [ExcerptRange {
+ context: 0..7,
+ primary: None,
+ }],
+ cx,
+ )
+ .pop()
+ .unwrap()
+ });
+
+ let snapshot_1 = multibuffer.read(cx).snapshot(cx);
+ assert_eq!(snapshot_1.text(), "abcd123");
+
+ // Replace the buffer 1 excerpt with new excerpts from buffer 2.
+ let (excerpt_id_2, excerpt_id_3) = multibuffer.update(cx, |multibuffer, cx| {
+ multibuffer.remove_excerpts([excerpt_id_1], cx);
+ let mut ids = multibuffer
+ .push_excerpts(
+ buffer_2.clone(),
+ [
+ ExcerptRange {
+ context: 0..4,
+ primary: None,
+ },
+ ExcerptRange {
+ context: 6..10,
+ primary: None,
+ },
+ ExcerptRange {
+ context: 12..16,
+ primary: None,
+ },
+ ],
+ cx,
+ )
+ .into_iter();
+ (ids.next().unwrap(), ids.next().unwrap())
+ });
+ let snapshot_2 = multibuffer.read(cx).snapshot(cx);
+ assert_eq!(snapshot_2.text(), "ABCD\nGHIJ\nMNOP");
+
+ // The old excerpt id doesn't get reused.
+ assert_ne!(excerpt_id_2, excerpt_id_1);
+
+ // Resolve some anchors from the previous snapshot in the new snapshot.
+ // The current excerpts are from a different buffer, so we don't attempt to
+ // resolve the old text anchor in the new buffer.
+ assert_eq!(
+ snapshot_2.summary_for_anchor::<usize>(&snapshot_1.anchor_before(2)),
+ 0
+ );
+ assert_eq!(
+ snapshot_2.summaries_for_anchors::<usize, _>(&[
+ snapshot_1.anchor_before(2),
+ snapshot_1.anchor_after(3)
+ ]),
+ vec![0, 0]
+ );
+
+ // Refresh anchors from the old snapshot. The return value indicates that both
+ // anchors lost their original excerpt.
+ let refresh =
+ snapshot_2.refresh_anchors(&[snapshot_1.anchor_before(2), snapshot_1.anchor_after(3)]);
+ assert_eq!(
+ refresh,
+ &[
+ (0, snapshot_2.anchor_before(0), false),
+ (1, snapshot_2.anchor_after(0), false),
+ ]
+ );
+
+ // Replace the middle excerpt with a smaller excerpt in buffer 2,
+ // that intersects the old excerpt.
+ let excerpt_id_5 = multibuffer.update(cx, |multibuffer, cx| {
+ multibuffer.remove_excerpts([excerpt_id_3], cx);
+ multibuffer
+ .insert_excerpts_after(
+ excerpt_id_2,
+ buffer_2.clone(),
+ [ExcerptRange {
+ context: 5..8,
+ primary: None,
+ }],
+ cx,
+ )
+ .pop()
+ .unwrap()
+ });
+
+ let snapshot_3 = multibuffer.read(cx).snapshot(cx);
+ assert_eq!(snapshot_3.text(), "ABCD\nFGH\nMNOP");
+ assert_ne!(excerpt_id_5, excerpt_id_3);
+
+ // Resolve some anchors from the previous snapshot in the new snapshot.
+ // The third anchor can't be resolved, since its excerpt has been removed,
+ // so it resolves to the same position as its predecessor.
+ let anchors = [
+ snapshot_2.anchor_before(0),
+ snapshot_2.anchor_after(2),
+ snapshot_2.anchor_after(6),
+ snapshot_2.anchor_after(14),
+ ];
+ assert_eq!(
+ snapshot_3.summaries_for_anchors::<usize, _>(&anchors),
+ &[0, 2, 9, 13]
+ );
+
+ let new_anchors = snapshot_3.refresh_anchors(&anchors);
+ assert_eq!(
+ new_anchors.iter().map(|a| (a.0, a.2)).collect::<Vec<_>>(),
+ &[(0, true), (1, true), (2, true), (3, true)]
+ );
+ assert_eq!(
+ snapshot_3.summaries_for_anchors::<usize, _>(new_anchors.iter().map(|a| &a.1)),
+ &[0, 2, 7, 13]
+ );
+ }
+
+ #[gpui2::test(iterations = 100)]
+ fn test_random_multibuffer(cx: &mut AppContext, mut rng: StdRng) {
+ let operations = env::var("OPERATIONS")
+ .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
+ .unwrap_or(10);
+
+ let mut buffers: Vec<Model<Buffer>> = Vec::new();
+ let multibuffer = cx.build_model(|_| MultiBuffer::new(0));
+ let mut excerpt_ids = Vec::<ExcerptId>::new();
+ let mut expected_excerpts = Vec::<(Model<Buffer>, Range<text::Anchor>)>::new();
+ let mut anchors = Vec::new();
+ let mut old_versions = Vec::new();
+
+ for _ in 0..operations {
+ match rng.gen_range(0..100) {
+ 0..=19 if !buffers.is_empty() => {
+ let buffer = buffers.choose(&mut rng).unwrap();
+ buffer.update(cx, |buf, cx| buf.randomly_edit(&mut rng, 5, cx));
+ }
+ 20..=29 if !expected_excerpts.is_empty() => {
+ let mut ids_to_remove = vec![];
+ for _ in 0..rng.gen_range(1..=3) {
+ if expected_excerpts.is_empty() {
+ break;
+ }
+
+ let ix = rng.gen_range(0..expected_excerpts.len());
+ ids_to_remove.push(excerpt_ids.remove(ix));
+ let (buffer, range) = expected_excerpts.remove(ix);
+ let buffer = buffer.read(cx);
+ log::info!(
+ "Removing excerpt {}: {:?}",
+ ix,
+ buffer
+ .text_for_range(range.to_offset(buffer))
+ .collect::<String>(),
+ );
+ }
+ let snapshot = multibuffer.read(cx).read(cx);
+ ids_to_remove.sort_unstable_by(|a, b| a.cmp(&b, &snapshot));
+ drop(snapshot);
+ multibuffer.update(cx, |multibuffer, cx| {
+ multibuffer.remove_excerpts(ids_to_remove, cx)
+ });
+ }
+ 30..=39 if !expected_excerpts.is_empty() => {
+ let multibuffer = multibuffer.read(cx).read(cx);
+ let offset =
+ multibuffer.clip_offset(rng.gen_range(0..=multibuffer.len()), Bias::Left);
+ let bias = if rng.gen() { Bias::Left } else { Bias::Right };
+ log::info!("Creating anchor at {} with bias {:?}", offset, bias);
+ anchors.push(multibuffer.anchor_at(offset, bias));
+ anchors.sort_by(|a, b| a.cmp(b, &multibuffer));
+ }
+ 40..=44 if !anchors.is_empty() => {
+ let multibuffer = multibuffer.read(cx).read(cx);
+ let prev_len = anchors.len();
+ anchors = multibuffer
+ .refresh_anchors(&anchors)
+ .into_iter()
+ .map(|a| a.1)
+ .collect();
+
+ // Ensure the newly-refreshed anchors point to a valid excerpt and don't
+ // overshoot its boundaries.
+ assert_eq!(anchors.len(), prev_len);
+ for anchor in &anchors {
+ if anchor.excerpt_id == ExcerptId::min()
+ || anchor.excerpt_id == ExcerptId::max()
+ {
+ continue;
+ }
+
+ let excerpt = multibuffer.excerpt(anchor.excerpt_id).unwrap();
+ assert_eq!(excerpt.id, anchor.excerpt_id);
+ assert!(excerpt.contains(anchor));
+ }
+ }
+ _ => {
+ let buffer_handle = if buffers.is_empty() || rng.gen_bool(0.4) {
+ let base_text = util::RandomCharIter::new(&mut rng)
+ .take(10)
+ .collect::<String>();
+ buffers.push(
+ cx.build_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), base_text)),
+ );
+ buffers.last().unwrap()
+ } else {
+ buffers.choose(&mut rng).unwrap()
+ };
+
+ let buffer = buffer_handle.read(cx);
+ let end_ix = buffer.clip_offset(rng.gen_range(0..=buffer.len()), Bias::Right);
+ let start_ix = buffer.clip_offset(rng.gen_range(0..=end_ix), Bias::Left);
+ let anchor_range = buffer.anchor_before(start_ix)..buffer.anchor_after(end_ix);
+ let prev_excerpt_ix = rng.gen_range(0..=expected_excerpts.len());
+ let prev_excerpt_id = excerpt_ids
+ .get(prev_excerpt_ix)
+ .cloned()
+ .unwrap_or_else(ExcerptId::max);
+ let excerpt_ix = (prev_excerpt_ix + 1).min(expected_excerpts.len());
+
+ log::info!(
+ "Inserting excerpt at {} of {} for buffer {}: {:?}[{:?}] = {:?}",
+ excerpt_ix,
+ expected_excerpts.len(),
+ buffer_handle.read(cx).remote_id(),
+ buffer.text(),
+ start_ix..end_ix,
+ &buffer.text()[start_ix..end_ix]
+ );
+
+ let excerpt_id = multibuffer.update(cx, |multibuffer, cx| {
+ multibuffer
+ .insert_excerpts_after(
+ prev_excerpt_id,
+ buffer_handle.clone(),
+ [ExcerptRange {
+ context: start_ix..end_ix,
+ primary: None,
+ }],
+ cx,
+ )
+ .pop()
+ .unwrap()
+ });
+
+ excerpt_ids.insert(excerpt_ix, excerpt_id);
+ expected_excerpts.insert(excerpt_ix, (buffer_handle.clone(), anchor_range));
+ }
+ }
+
+ if rng.gen_bool(0.3) {
+ multibuffer.update(cx, |multibuffer, cx| {
+ old_versions.push((multibuffer.snapshot(cx), multibuffer.subscribe()));
+ })
+ }
+
+ let snapshot = multibuffer.read(cx).snapshot(cx);
+
+ let mut excerpt_starts = Vec::new();
+ let mut expected_text = String::new();
+ let mut expected_buffer_rows = Vec::new();
+ for (buffer, range) in &expected_excerpts {
+ let buffer = buffer.read(cx);
+ let buffer_range = range.to_offset(buffer);
+
+ excerpt_starts.push(TextSummary::from(expected_text.as_str()));
+ expected_text.extend(buffer.text_for_range(buffer_range.clone()));
+ expected_text.push('\n');
+
+ let buffer_row_range = buffer.offset_to_point(buffer_range.start).row
+ ..=buffer.offset_to_point(buffer_range.end).row;
+ for row in buffer_row_range {
+ expected_buffer_rows.push(Some(row));
+ }
+ }
+ // Remove final trailing newline.
+ if !expected_excerpts.is_empty() {
+ expected_text.pop();
+ }
+
+ // Always report one buffer row
+ if expected_buffer_rows.is_empty() {
+ expected_buffer_rows.push(Some(0));
+ }
+
+ assert_eq!(snapshot.text(), expected_text);
+ log::info!("MultiBuffer text: {:?}", expected_text);
+
+ assert_eq!(
+ snapshot.buffer_rows(0).collect::<Vec<_>>(),
+ expected_buffer_rows,
+ );
+
+ for _ in 0..5 {
+ let start_row = rng.gen_range(0..=expected_buffer_rows.len());
+ assert_eq!(
+ snapshot.buffer_rows(start_row as u32).collect::<Vec<_>>(),
+ &expected_buffer_rows[start_row..],
+ "buffer_rows({})",
+ start_row
+ );
+ }
+
+ assert_eq!(
+ snapshot.max_buffer_row(),
+ expected_buffer_rows.into_iter().flatten().max().unwrap()
+ );
+
+ let mut excerpt_starts = excerpt_starts.into_iter();
+ for (buffer, range) in &expected_excerpts {
+ let buffer = buffer.read(cx);
+ let buffer_id = buffer.remote_id();
+ let buffer_range = range.to_offset(buffer);
+ let buffer_start_point = buffer.offset_to_point(buffer_range.start);
+ let buffer_start_point_utf16 =
+ buffer.text_summary_for_range::<PointUtf16, _>(0..buffer_range.start);
+
+ let excerpt_start = excerpt_starts.next().unwrap();
+ let mut offset = excerpt_start.len;
+ let mut buffer_offset = buffer_range.start;
+ let mut point = excerpt_start.lines;
+ let mut buffer_point = buffer_start_point;
+ let mut point_utf16 = excerpt_start.lines_utf16();
+ let mut buffer_point_utf16 = buffer_start_point_utf16;
+ for ch in buffer
+ .snapshot()
+ .chunks(buffer_range.clone(), false)
+ .flat_map(|c| c.text.chars())
+ {
+ for _ in 0..ch.len_utf8() {
+ let left_offset = snapshot.clip_offset(offset, Bias::Left);
+ let right_offset = snapshot.clip_offset(offset, Bias::Right);
+ let buffer_left_offset = buffer.clip_offset(buffer_offset, Bias::Left);
+ let buffer_right_offset = buffer.clip_offset(buffer_offset, Bias::Right);
+ assert_eq!(
+ left_offset,
+ excerpt_start.len + (buffer_left_offset - buffer_range.start),
+ "clip_offset({:?}, Left). buffer: {:?}, buffer offset: {:?}",
+ offset,
+ buffer_id,
+ buffer_offset,
+ );
+ assert_eq!(
+ right_offset,
+ excerpt_start.len + (buffer_right_offset - buffer_range.start),
+ "clip_offset({:?}, Right). buffer: {:?}, buffer offset: {:?}",
+ offset,
+ buffer_id,
+ buffer_offset,
+ );
+
+ let left_point = snapshot.clip_point(point, Bias::Left);
+ let right_point = snapshot.clip_point(point, Bias::Right);
+ let buffer_left_point = buffer.clip_point(buffer_point, Bias::Left);
+ let buffer_right_point = buffer.clip_point(buffer_point, Bias::Right);
+ assert_eq!(
+ left_point,
+ excerpt_start.lines + (buffer_left_point - buffer_start_point),
+ "clip_point({:?}, Left). buffer: {:?}, buffer point: {:?}",
+ point,
+ buffer_id,
+ buffer_point,
+ );
+ assert_eq!(
+ right_point,
+ excerpt_start.lines + (buffer_right_point - buffer_start_point),
+ "clip_point({:?}, Right). buffer: {:?}, buffer point: {:?}",
+ point,
+ buffer_id,
+ buffer_point,
+ );
+
+ assert_eq!(
+ snapshot.point_to_offset(left_point),
+ left_offset,
+ "point_to_offset({:?})",
+ left_point,
+ );
+ assert_eq!(
+ snapshot.offset_to_point(left_offset),
+ left_point,
+ "offset_to_point({:?})",
+ left_offset,
+ );
+
+ offset += 1;
+ buffer_offset += 1;
+ if ch == '\n' {
+ point += Point::new(1, 0);
+ buffer_point += Point::new(1, 0);
+ } else {
+ point += Point::new(0, 1);
+ buffer_point += Point::new(0, 1);
+ }
+ }
+
+ for _ in 0..ch.len_utf16() {
+ let left_point_utf16 =
+ snapshot.clip_point_utf16(Unclipped(point_utf16), Bias::Left);
+ let right_point_utf16 =
+ snapshot.clip_point_utf16(Unclipped(point_utf16), Bias::Right);
+ let buffer_left_point_utf16 =
+ buffer.clip_point_utf16(Unclipped(buffer_point_utf16), Bias::Left);
+ let buffer_right_point_utf16 =
+ buffer.clip_point_utf16(Unclipped(buffer_point_utf16), Bias::Right);
+ assert_eq!(
+ left_point_utf16,
+ excerpt_start.lines_utf16()
+ + (buffer_left_point_utf16 - buffer_start_point_utf16),
+ "clip_point_utf16({:?}, Left). buffer: {:?}, buffer point_utf16: {:?}",
+ point_utf16,
+ buffer_id,
+ buffer_point_utf16,
+ );
+ assert_eq!(
+ right_point_utf16,
+ excerpt_start.lines_utf16()
+ + (buffer_right_point_utf16 - buffer_start_point_utf16),
+ "clip_point_utf16({:?}, Right). buffer: {:?}, buffer point_utf16: {:?}",
+ point_utf16,
+ buffer_id,
+ buffer_point_utf16,
+ );
+
+ if ch == '\n' {
+ point_utf16 += PointUtf16::new(1, 0);
+ buffer_point_utf16 += PointUtf16::new(1, 0);
+ } else {
+ point_utf16 += PointUtf16::new(0, 1);
+ buffer_point_utf16 += PointUtf16::new(0, 1);
+ }
+ }
+ }
+ }
+
+ for (row, line) in expected_text.split('\n').enumerate() {
+ assert_eq!(
+ snapshot.line_len(row as u32),
+ line.len() as u32,
+ "line_len({}).",
+ row
+ );
+ }
+
+ let text_rope = Rope::from(expected_text.as_str());
+ for _ in 0..10 {
+ let end_ix = text_rope.clip_offset(rng.gen_range(0..=text_rope.len()), Bias::Right);
+ let start_ix = text_rope.clip_offset(rng.gen_range(0..=end_ix), Bias::Left);
+
+ let text_for_range = snapshot
+ .text_for_range(start_ix..end_ix)
+ .collect::<String>();
+ assert_eq!(
+ text_for_range,
+ &expected_text[start_ix..end_ix],
+ "incorrect text for range {:?}",
+ start_ix..end_ix
+ );
+
+ let excerpted_buffer_ranges = multibuffer
+ .read(cx)
+ .range_to_buffer_ranges(start_ix..end_ix, cx);
+ let excerpted_buffers_text = excerpted_buffer_ranges
+ .iter()
+ .map(|(buffer, buffer_range, _)| {
+ buffer
+ .read(cx)
+ .text_for_range(buffer_range.clone())
+ .collect::<String>()
+ })
+ .collect::<Vec<_>>()
+ .join("\n");
+ assert_eq!(excerpted_buffers_text, text_for_range);
+ if !expected_excerpts.is_empty() {
+ assert!(!excerpted_buffer_ranges.is_empty());
+ }
+
+ let expected_summary = TextSummary::from(&expected_text[start_ix..end_ix]);
+ assert_eq!(
+ snapshot.text_summary_for_range::<TextSummary, _>(start_ix..end_ix),
+ expected_summary,
+ "incorrect summary for range {:?}",
+ start_ix..end_ix
+ );
+ }
+
+ // Anchor resolution
+ let summaries = snapshot.summaries_for_anchors::<usize, _>(&anchors);
+ assert_eq!(anchors.len(), summaries.len());
+ for (anchor, resolved_offset) in anchors.iter().zip(summaries) {
+ assert!(resolved_offset <= snapshot.len());
+ assert_eq!(
+ snapshot.summary_for_anchor::<usize>(anchor),
+ resolved_offset
+ );
+ }
+
+ for _ in 0..10 {
+ let end_ix = text_rope.clip_offset(rng.gen_range(0..=text_rope.len()), Bias::Right);
+ assert_eq!(
+ snapshot.reversed_chars_at(end_ix).collect::<String>(),
+ expected_text[..end_ix].chars().rev().collect::<String>(),
+ );
+ }
+
+ for _ in 0..10 {
+ let end_ix = rng.gen_range(0..=text_rope.len());
+ let start_ix = rng.gen_range(0..=end_ix);
+ assert_eq!(
+ snapshot
+ .bytes_in_range(start_ix..end_ix)
+ .flatten()
+ .copied()
+ .collect::<Vec<_>>(),
+ expected_text.as_bytes()[start_ix..end_ix].to_vec(),
+ "bytes_in_range({:?})",
+ start_ix..end_ix,
+ );
+ }
+ }
+
+ let snapshot = multibuffer.read(cx).snapshot(cx);
+ for (old_snapshot, subscription) in old_versions {
+ let edits = subscription.consume().into_inner();
+
+ log::info!(
+ "applying subscription edits to old text: {:?}: {:?}",
+ old_snapshot.text(),
+ edits,
+ );
+
+ let mut text = old_snapshot.text();
+ for edit in edits {
+ let new_text: String = snapshot.text_for_range(edit.new.clone()).collect();
+ text.replace_range(edit.new.start..edit.new.start + edit.old.len(), &new_text);
+ }
+ assert_eq!(text.to_string(), snapshot.text());
+ }
+ }
+
+ #[gpui2::test]
+ fn test_history(cx: &mut AppContext) {
+ let test_settings = SettingsStore::test(cx);
+ cx.set_global(test_settings);
+
+ let buffer_1 = cx.build_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), "1234"));
+ let buffer_2 = cx.build_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), "5678"));
+ let multibuffer = cx.build_model(|_| MultiBuffer::new(0));
+ let group_interval = multibuffer.read(cx).history.group_interval;
+ multibuffer.update(cx, |multibuffer, cx| {
+ multibuffer.push_excerpts(
+ buffer_1.clone(),
+ [ExcerptRange {
+ context: 0..buffer_1.read(cx).len(),
+ primary: None,
+ }],
+ cx,
+ );
+ multibuffer.push_excerpts(
+ buffer_2.clone(),
+ [ExcerptRange {
+ context: 0..buffer_2.read(cx).len(),
+ primary: None,
+ }],
+ cx,
+ );
+ });
+
+ let mut now = Instant::now();
+
+ multibuffer.update(cx, |multibuffer, cx| {
+ let transaction_1 = multibuffer.start_transaction_at(now, cx).unwrap();
+ multibuffer.edit(
+ [
+ (Point::new(0, 0)..Point::new(0, 0), "A"),
+ (Point::new(1, 0)..Point::new(1, 0), "A"),
+ ],
+ None,
+ cx,
+ );
+ multibuffer.edit(
+ [
+ (Point::new(0, 1)..Point::new(0, 1), "B"),
+ (Point::new(1, 1)..Point::new(1, 1), "B"),
+ ],
+ None,
+ cx,
+ );
+ multibuffer.end_transaction_at(now, cx);
+ assert_eq!(multibuffer.read(cx).text(), "AB1234\nAB5678");
+
+ // Edit buffer 1 through the multibuffer
+ now += 2 * group_interval;
+ multibuffer.start_transaction_at(now, cx);
+ multibuffer.edit([(2..2, "C")], None, cx);
+ multibuffer.end_transaction_at(now, cx);
+ assert_eq!(multibuffer.read(cx).text(), "ABC1234\nAB5678");
+
+ // Edit buffer 1 independently
+ buffer_1.update(cx, |buffer_1, cx| {
+ buffer_1.start_transaction_at(now);
+ buffer_1.edit([(3..3, "D")], None, cx);
+ buffer_1.end_transaction_at(now, cx);
+
+ now += 2 * group_interval;
+ buffer_1.start_transaction_at(now);
+ buffer_1.edit([(4..4, "E")], None, cx);
+ buffer_1.end_transaction_at(now, cx);
+ });
+ assert_eq!(multibuffer.read(cx).text(), "ABCDE1234\nAB5678");
+
+ // An undo in the multibuffer undoes the multibuffer transaction
+ // and also any individual buffer edits that have occurred since
+ // that transaction.
+ multibuffer.undo(cx);
+ assert_eq!(multibuffer.read(cx).text(), "AB1234\nAB5678");
+
+ multibuffer.undo(cx);
+ assert_eq!(multibuffer.read(cx).text(), "1234\n5678");
+
+ multibuffer.redo(cx);
+ assert_eq!(multibuffer.read(cx).text(), "AB1234\nAB5678");
+
+ multibuffer.redo(cx);
+ assert_eq!(multibuffer.read(cx).text(), "ABCDE1234\nAB5678");
+
+ // Undo buffer 2 independently.
+ buffer_2.update(cx, |buffer_2, cx| buffer_2.undo(cx));
+ assert_eq!(multibuffer.read(cx).text(), "ABCDE1234\n5678");
+
+ // An undo in the multibuffer undoes the components of the
+ // the last multibuffer transaction that are not already undone.
+ multibuffer.undo(cx);
+ assert_eq!(multibuffer.read(cx).text(), "AB1234\n5678");
+
+ multibuffer.undo(cx);
+ assert_eq!(multibuffer.read(cx).text(), "1234\n5678");
+
+ multibuffer.redo(cx);
+ assert_eq!(multibuffer.read(cx).text(), "AB1234\nAB5678");
+
+ buffer_1.update(cx, |buffer_1, cx| buffer_1.redo(cx));
+ assert_eq!(multibuffer.read(cx).text(), "ABCD1234\nAB5678");
+
+ // Redo stack gets cleared after an edit.
+ now += 2 * group_interval;
+ multibuffer.start_transaction_at(now, cx);
+ multibuffer.edit([(0..0, "X")], None, cx);
+ multibuffer.end_transaction_at(now, cx);
+ assert_eq!(multibuffer.read(cx).text(), "XABCD1234\nAB5678");
+ multibuffer.redo(cx);
+ assert_eq!(multibuffer.read(cx).text(), "XABCD1234\nAB5678");
+ multibuffer.undo(cx);
+ assert_eq!(multibuffer.read(cx).text(), "ABCD1234\nAB5678");
+ multibuffer.undo(cx);
+ assert_eq!(multibuffer.read(cx).text(), "1234\n5678");
+
+ // Transactions can be grouped manually.
+ multibuffer.redo(cx);
+ multibuffer.redo(cx);
+ assert_eq!(multibuffer.read(cx).text(), "XABCD1234\nAB5678");
+ multibuffer.group_until_transaction(transaction_1, cx);
+ multibuffer.undo(cx);
+ assert_eq!(multibuffer.read(cx).text(), "1234\n5678");
+ multibuffer.redo(cx);
+ assert_eq!(multibuffer.read(cx).text(), "XABCD1234\nAB5678");
+ });
+ }
+}
@@ -2604,64 +2604,64 @@ async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
}
-#[gpui::test]
-async fn test_save_as(cx: &mut gpui::TestAppContext) {
- init_test(cx);
-
- let fs = FakeFs::new(cx.background());
- fs.insert_tree("/dir", json!({})).await;
-
- let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
-
- let languages = project.read_with(cx, |project, _| project.languages().clone());
- languages.register(
- "/some/path",
- LanguageConfig {
- name: "Rust".into(),
- path_suffixes: vec!["rs".into()],
- ..Default::default()
- },
- tree_sitter_rust::language(),
- vec![],
- |_| Default::default(),
- );
-
- let buffer = project.update(cx, |project, cx| {
- project.create_buffer("", None, cx).unwrap()
- });
- buffer.update(cx, |buffer, cx| {
- buffer.edit([(0..0, "abc")], None, cx);
- assert!(buffer.is_dirty());
- assert!(!buffer.has_conflict());
- assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text");
- });
- project
- .update(cx, |project, cx| {
- project.save_buffer_as(buffer.clone(), "/dir/file1.rs".into(), cx)
- })
- .await
- .unwrap();
- assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
-
- cx.foreground().run_until_parked();
- buffer.read_with(cx, |buffer, cx| {
- assert_eq!(
- buffer.file().unwrap().full_path(cx),
- Path::new("dir/file1.rs")
- );
- assert!(!buffer.is_dirty());
- assert!(!buffer.has_conflict());
- assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust");
- });
-
- let opened_buffer = project
- .update(cx, |project, cx| {
- project.open_local_buffer("/dir/file1.rs", cx)
- })
- .await
- .unwrap();
- assert_eq!(opened_buffer, buffer);
-}
+// #[gpui::test]
+// async fn test_save_as(cx: &mut gpui::TestAppContext) {
+// init_test(cx);
+
+// let fs = FakeFs::new(cx.background());
+// fs.insert_tree("/dir", json!({})).await;
+
+// let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
+
+// let languages = project.read_with(cx, |project, _| project.languages().clone());
+// languages.register(
+// "/some/path",
+// LanguageConfig {
+// name: "Rust".into(),
+// path_suffixes: vec!["rs".into()],
+// ..Default::default()
+// },
+// tree_sitter_rust::language(),
+// vec![],
+// |_| Default::default(),
+// );
+
+// let buffer = project.update(cx, |project, cx| {
+// project.create_buffer("", None, cx).unwrap()
+// });
+// buffer.update(cx, |buffer, cx| {
+// buffer.edit([(0..0, "abc")], None, cx);
+// assert!(buffer.is_dirty());
+// assert!(!buffer.has_conflict());
+// assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text");
+// });
+// project
+// .update(cx, |project, cx| {
+// project.save_buffer_as(buffer.clone(), "/dir/file1.rs".into(), cx)
+// })
+// .await
+// .unwrap();
+// assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
+
+// cx.foreground().run_until_parked();
+// buffer.read_with(cx, |buffer, cx| {
+// assert_eq!(
+// buffer.file().unwrap().full_path(cx),
+// Path::new("dir/file1.rs")
+// );
+// assert!(!buffer.is_dirty());
+// assert!(!buffer.has_conflict());
+// assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust");
+// });
+
+// let opened_buffer = project
+// .update(cx, |project, cx| {
+// project.open_local_buffer("/dir/file1.rs", cx)
+// })
+// .await
+// .unwrap();
+// assert_eq!(opened_buffer, buffer);
+// }
#[gpui::test(retries = 5)]
async fn test_rescan_and_remote_updates(
@@ -16,6 +16,7 @@ test-support = [
"settings2/test-support",
"text/test-support",
"prettier2/test-support",
+ "gpui2/test-support",
]
[dependencies]
@@ -855,39 +855,39 @@ impl Project {
}
}
- // #[cfg(any(test, feature = "test-support"))]
- // pub async fn test(
- // fs: Arc<dyn Fs>,
- // root_paths: impl IntoIterator<Item = &Path>,
- // cx: &mut gpui::TestAppContext,
- // ) -> Handle<Project> {
- // let mut languages = LanguageRegistry::test();
- // languages.set_executor(cx.background());
- // let http_client = util::http::FakeHttpClient::with_404_response();
- // let client = cx.update(|cx| client2::Client::new(http_client.clone(), cx));
- // let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
- // let project = cx.update(|cx| {
- // Project::local(
- // client,
- // node_runtime::FakeNodeRuntime::new(),
- // user_store,
- // Arc::new(languages),
- // fs,
- // cx,
- // )
- // });
- // for path in root_paths {
- // let (tree, _) = project
- // .update(cx, |project, cx| {
- // project.find_or_create_local_worktree(path, true, cx)
- // })
- // .await
- // .unwrap();
- // tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
- // .await;
- // }
- // project
- // }
+ #[cfg(any(test, feature = "test-support"))]
+ pub async fn test(
+ fs: Arc<dyn Fs>,
+ root_paths: impl IntoIterator<Item = &Path>,
+ cx: &mut gpui2::TestAppContext,
+ ) -> Model<Project> {
+ let mut languages = LanguageRegistry::test();
+ languages.set_executor(cx.executor().clone());
+ let http_client = util::http::FakeHttpClient::with_404_response();
+ let client = cx.update(|cx| client2::Client::new(http_client.clone(), cx));
+ let user_store = cx.build_model(|cx| UserStore::new(client.clone(), http_client, cx));
+ let project = cx.update(|cx| {
+ Project::local(
+ client,
+ node_runtime::FakeNodeRuntime::new(),
+ user_store,
+ Arc::new(languages),
+ fs,
+ cx,
+ )
+ });
+ for path in root_paths {
+ let (tree, _) = project
+ .update(cx, |project, cx| {
+ project.find_or_create_local_worktree(path, true, cx)
+ })
+ .await
+ .unwrap();
+ tree.update(cx, |tree, _| tree.as_local().unwrap().scan_complete())
+ .await;
+ }
+ project
+ }
fn on_settings_changed(&mut self, cx: &mut ModelContext<Self>) {
let mut language_servers_to_start = Vec::new();
@@ -1,32 +1,24 @@
-// use crate::{search::PathMatcher, worktree::WorktreeModelHandle, Event, *};
-// use fs::{FakeFs, RealFs};
-// use futures::{future, StreamExt};
-// use gpui::{executor::Deterministic, test::subscribe, AppContext};
-// use language2::{
-// language_settings::{AllLanguageSettings, LanguageSettingsContent},
-// tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
-// LineEnding, OffsetRangeExt, Point, ToPoint,
-// };
-// use lsp2::Url;
-// use parking_lot::Mutex;
-// use pretty_assertions::assert_eq;
-// use serde_json::json;
-// use std::{cell::RefCell, os::unix, rc::Rc, task::Poll};
-// use unindent::Unindent as _;
-// use util::{assert_set_eq, test::temp_tree};
-
-// #[cfg(test)]
-// #[ctor::ctor]
-// fn init_logger() {
-// if std::env::var("RUST_LOG").is_ok() {
-// env_logger::init();
-// }
-// }
+use crate::{search::PathMatcher, Event, *};
+use fs2::FakeFs;
+use futures::{future, StreamExt};
+use gpui2::AppContext;
+use language2::{
+ language_settings::{AllLanguageSettings, LanguageSettingsContent},
+ tree_sitter_rust, Diagnostic, FakeLspAdapter, LanguageConfig, LineEnding, OffsetRangeExt,
+ Point, ToPoint,
+};
+use lsp2::Url;
+use parking_lot::Mutex;
+use pretty_assertions::assert_eq;
+use serde_json::json;
+use std::task::Poll;
+use unindent::Unindent as _;
+use util::assert_set_eq;
-// #[gpui::test]
-// async fn test_symlinks(cx: &mut gpui::TestAppContext) {
+// #[gpui2::test]
+// async fn test_symlinks(cx: &mut gpui2::TestAppContext) {
// init_test(cx);
-// cx.foreground().allow_parking();
+// cx.executor().allow_parking();
// let dir = temp_tree(json!({
// "root": {
@@ -52,8 +44,8 @@
// .unwrap();
// let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
-// project.read_with(cx, |project, cx| {
-// let tree = project.worktrees(cx).next().unwrap().read(cx);
+// project.update(cx, |project, cx| {
+// let tree = project.worktrees().next().unwrap().read(cx);
// assert_eq!(tree.file_count(), 5);
// assert_eq!(
// tree.inode_for_path("fennel/grape"),
@@ -62,107 +54,2260 @@
// });
// }
-// #[gpui::test]
-// async fn test_managing_project_specific_settings(
-// deterministic: Arc<Deterministic>,
-// cx: &mut gpui::TestAppContext,
-// ) {
+#[gpui2::test]
+async fn test_managing_project_specific_settings(cx: &mut gpui2::TestAppContext) {
+ init_test(cx);
+
+ let fs = FakeFs::new(cx.executor().clone());
+ fs.insert_tree(
+ "/the-root",
+ json!({
+ ".zed": {
+ "settings.json": r#"{ "tab_size": 8 }"#
+ },
+ "a": {
+ "a.rs": "fn a() {\n A\n}"
+ },
+ "b": {
+ ".zed": {
+ "settings.json": r#"{ "tab_size": 2 }"#
+ },
+ "b.rs": "fn b() {\n B\n}"
+ }
+ }),
+ )
+ .await;
+
+ let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
+ let worktree = project.update(cx, |project, _| project.worktrees().next().unwrap());
+
+ cx.executor().run_until_parked();
+ cx.update(|cx| {
+ let tree = worktree.read(cx);
+
+ let settings_a = language_settings(
+ None,
+ Some(
+ &(File::for_entry(
+ tree.entry_for_path("a/a.rs").unwrap().clone(),
+ worktree.clone(),
+ ) as _),
+ ),
+ cx,
+ );
+ let settings_b = language_settings(
+ None,
+ Some(
+ &(File::for_entry(
+ tree.entry_for_path("b/b.rs").unwrap().clone(),
+ worktree.clone(),
+ ) as _),
+ ),
+ cx,
+ );
+
+ assert_eq!(settings_a.tab_size.get(), 8);
+ assert_eq!(settings_b.tab_size.get(), 2);
+ });
+}
+
+#[gpui2::test]
+async fn test_managing_language_servers(cx: &mut gpui2::TestAppContext) {
+ init_test(cx);
+
+ let mut rust_language = Language::new(
+ LanguageConfig {
+ name: "Rust".into(),
+ path_suffixes: vec!["rs".to_string()],
+ ..Default::default()
+ },
+ Some(tree_sitter_rust::language()),
+ );
+ let mut json_language = Language::new(
+ LanguageConfig {
+ name: "JSON".into(),
+ path_suffixes: vec!["json".to_string()],
+ ..Default::default()
+ },
+ None,
+ );
+ let mut fake_rust_servers = rust_language
+ .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
+ name: "the-rust-language-server",
+ capabilities: lsp2::ServerCapabilities {
+ completion_provider: Some(lsp2::CompletionOptions {
+ trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
+ ..Default::default()
+ }),
+ ..Default::default()
+ },
+ ..Default::default()
+ }))
+ .await;
+ let mut fake_json_servers = json_language
+ .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
+ name: "the-json-language-server",
+ capabilities: lsp2::ServerCapabilities {
+ completion_provider: Some(lsp2::CompletionOptions {
+ trigger_characters: Some(vec![":".to_string()]),
+ ..Default::default()
+ }),
+ ..Default::default()
+ },
+ ..Default::default()
+ }))
+ .await;
+
+ let fs = FakeFs::new(cx.executor().clone());
+ fs.insert_tree(
+ "/the-root",
+ json!({
+ "test.rs": "const A: i32 = 1;",
+ "test2.rs": "",
+ "Cargo.toml": "a = 1",
+ "package.json": "{\"a\": 1}",
+ }),
+ )
+ .await;
+
+ let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
+
+ // Open a buffer without an associated language server.
+ let toml_buffer = project
+ .update(cx, |project, cx| {
+ project.open_local_buffer("/the-root/Cargo.toml", cx)
+ })
+ .await
+ .unwrap();
+
+ // Open a buffer with an associated language server before the language for it has been loaded.
+ let rust_buffer = project
+ .update(cx, |project, cx| {
+ project.open_local_buffer("/the-root/test.rs", cx)
+ })
+ .await
+ .unwrap();
+ rust_buffer.update(cx, |buffer, _| {
+ assert_eq!(buffer.language().map(|l| l.name()), None);
+ });
+
+ // Now we add the languages to the project, and ensure they get assigned to all
+ // the relevant open buffers.
+ project.update(cx, |project, _| {
+ project.languages.add(Arc::new(json_language));
+ project.languages.add(Arc::new(rust_language));
+ });
+ cx.executor().run_until_parked();
+ rust_buffer.update(cx, |buffer, _| {
+ assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
+ });
+
+ // A server is started up, and it is notified about Rust files.
+ let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
+ assert_eq!(
+ fake_rust_server
+ .receive_notification::<lsp2::notification::DidOpenTextDocument>()
+ .await
+ .text_document,
+ lsp2::TextDocumentItem {
+ uri: lsp2::Url::from_file_path("/the-root/test.rs").unwrap(),
+ version: 0,
+ text: "const A: i32 = 1;".to_string(),
+ language_id: Default::default()
+ }
+ );
+
+ // The buffer is configured based on the language server's capabilities.
+ rust_buffer.update(cx, |buffer, _| {
+ assert_eq!(
+ buffer.completion_triggers(),
+ &[".".to_string(), "::".to_string()]
+ );
+ });
+ toml_buffer.update(cx, |buffer, _| {
+ assert!(buffer.completion_triggers().is_empty());
+ });
+
+ // Edit a buffer. The changes are reported to the language server.
+ rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
+ assert_eq!(
+ fake_rust_server
+ .receive_notification::<lsp2::notification::DidChangeTextDocument>()
+ .await
+ .text_document,
+ lsp2::VersionedTextDocumentIdentifier::new(
+ lsp2::Url::from_file_path("/the-root/test.rs").unwrap(),
+ 1
+ )
+ );
+
+ // Open a third buffer with a different associated language server.
+ let json_buffer = project
+ .update(cx, |project, cx| {
+ project.open_local_buffer("/the-root/package.json", cx)
+ })
+ .await
+ .unwrap();
+
+ // A json language server is started up and is only notified about the json buffer.
+ let mut fake_json_server = fake_json_servers.next().await.unwrap();
+ assert_eq!(
+ fake_json_server
+ .receive_notification::<lsp2::notification::DidOpenTextDocument>()
+ .await
+ .text_document,
+ lsp2::TextDocumentItem {
+ uri: lsp2::Url::from_file_path("/the-root/package.json").unwrap(),
+ version: 0,
+ text: "{\"a\": 1}".to_string(),
+ language_id: Default::default()
+ }
+ );
+
+ // This buffer is configured based on the second language server's
+ // capabilities.
+ json_buffer.update(cx, |buffer, _| {
+ assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
+ });
+
+ // When opening another buffer whose language server is already running,
+ // it is also configured based on the existing language server's capabilities.
+ let rust_buffer2 = project
+ .update(cx, |project, cx| {
+ project.open_local_buffer("/the-root/test2.rs", cx)
+ })
+ .await
+ .unwrap();
+ rust_buffer2.update(cx, |buffer, _| {
+ assert_eq!(
+ buffer.completion_triggers(),
+ &[".".to_string(), "::".to_string()]
+ );
+ });
+
+ // Changes are reported only to servers matching the buffer's language.
+ toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
+ rust_buffer2.update(cx, |buffer, cx| {
+ buffer.edit([(0..0, "let x = 1;")], None, cx)
+ });
+ assert_eq!(
+ fake_rust_server
+ .receive_notification::<lsp2::notification::DidChangeTextDocument>()
+ .await
+ .text_document,
+ lsp2::VersionedTextDocumentIdentifier::new(
+ lsp2::Url::from_file_path("/the-root/test2.rs").unwrap(),
+ 1
+ )
+ );
+
+ // Save notifications are reported to all servers.
+ project
+ .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
+ .await
+ .unwrap();
+ assert_eq!(
+ fake_rust_server
+ .receive_notification::<lsp2::notification::DidSaveTextDocument>()
+ .await
+ .text_document,
+ lsp2::TextDocumentIdentifier::new(
+ lsp2::Url::from_file_path("/the-root/Cargo.toml").unwrap()
+ )
+ );
+ assert_eq!(
+ fake_json_server
+ .receive_notification::<lsp2::notification::DidSaveTextDocument>()
+ .await
+ .text_document,
+ lsp2::TextDocumentIdentifier::new(
+ lsp2::Url::from_file_path("/the-root/Cargo.toml").unwrap()
+ )
+ );
+
+ // Renames are reported only to servers matching the buffer's language.
+ fs.rename(
+ Path::new("/the-root/test2.rs"),
+ Path::new("/the-root/test3.rs"),
+ Default::default(),
+ )
+ .await
+ .unwrap();
+ assert_eq!(
+ fake_rust_server
+ .receive_notification::<lsp2::notification::DidCloseTextDocument>()
+ .await
+ .text_document,
+ lsp2::TextDocumentIdentifier::new(lsp2::Url::from_file_path("/the-root/test2.rs").unwrap()),
+ );
+ assert_eq!(
+ fake_rust_server
+ .receive_notification::<lsp2::notification::DidOpenTextDocument>()
+ .await
+ .text_document,
+ lsp2::TextDocumentItem {
+ uri: lsp2::Url::from_file_path("/the-root/test3.rs").unwrap(),
+ version: 0,
+ text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
+ language_id: Default::default()
+ },
+ );
+
+ rust_buffer2.update(cx, |buffer, cx| {
+ buffer.update_diagnostics(
+ LanguageServerId(0),
+ DiagnosticSet::from_sorted_entries(
+ vec![DiagnosticEntry {
+ diagnostic: Default::default(),
+ range: Anchor::MIN..Anchor::MAX,
+ }],
+ &buffer.snapshot(),
+ ),
+ cx,
+ );
+ assert_eq!(
+ buffer
+ .snapshot()
+ .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
+ .count(),
+ 1
+ );
+ });
+
+ // When the rename changes the extension of the file, the buffer gets closed on the old
+ // language server and gets opened on the new one.
+ fs.rename(
+ Path::new("/the-root/test3.rs"),
+ Path::new("/the-root/test3.json"),
+ Default::default(),
+ )
+ .await
+ .unwrap();
+ assert_eq!(
+ fake_rust_server
+ .receive_notification::<lsp2::notification::DidCloseTextDocument>()
+ .await
+ .text_document,
+ lsp2::TextDocumentIdentifier::new(lsp2::Url::from_file_path("/the-root/test3.rs").unwrap(),),
+ );
+ assert_eq!(
+ fake_json_server
+ .receive_notification::<lsp2::notification::DidOpenTextDocument>()
+ .await
+ .text_document,
+ lsp2::TextDocumentItem {
+ uri: lsp2::Url::from_file_path("/the-root/test3.json").unwrap(),
+ version: 0,
+ text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
+ language_id: Default::default()
+ },
+ );
+
+ // We clear the diagnostics, since the language has changed.
+ rust_buffer2.update(cx, |buffer, _| {
+ assert_eq!(
+ buffer
+ .snapshot()
+ .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
+ .count(),
+ 0
+ );
+ });
+
+ // The renamed file's version resets after changing language server.
+ rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
+ assert_eq!(
+ fake_json_server
+ .receive_notification::<lsp2::notification::DidChangeTextDocument>()
+ .await
+ .text_document,
+ lsp2::VersionedTextDocumentIdentifier::new(
+ lsp2::Url::from_file_path("/the-root/test3.json").unwrap(),
+ 1
+ )
+ );
+
+ // Restart language servers
+ project.update(cx, |project, cx| {
+ project.restart_language_servers_for_buffers(
+ vec![rust_buffer.clone(), json_buffer.clone()],
+ cx,
+ );
+ });
+
+ let mut rust_shutdown_requests = fake_rust_server
+ .handle_request::<lsp2::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
+ let mut json_shutdown_requests = fake_json_server
+ .handle_request::<lsp2::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
+ futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
+
+ let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
+ let mut fake_json_server = fake_json_servers.next().await.unwrap();
+
+ // Ensure rust document is reopened in new rust language server
+ assert_eq!(
+ fake_rust_server
+ .receive_notification::<lsp2::notification::DidOpenTextDocument>()
+ .await
+ .text_document,
+ lsp2::TextDocumentItem {
+ uri: lsp2::Url::from_file_path("/the-root/test.rs").unwrap(),
+ version: 0,
+ text: rust_buffer.update(cx, |buffer, _| buffer.text()),
+ language_id: Default::default()
+ }
+ );
+
+ // Ensure json documents are reopened in new json language server
+ assert_set_eq!(
+ [
+ fake_json_server
+ .receive_notification::<lsp2::notification::DidOpenTextDocument>()
+ .await
+ .text_document,
+ fake_json_server
+ .receive_notification::<lsp2::notification::DidOpenTextDocument>()
+ .await
+ .text_document,
+ ],
+ [
+ lsp2::TextDocumentItem {
+ uri: lsp2::Url::from_file_path("/the-root/package.json").unwrap(),
+ version: 0,
+ text: json_buffer.update(cx, |buffer, _| buffer.text()),
+ language_id: Default::default()
+ },
+ lsp2::TextDocumentItem {
+ uri: lsp2::Url::from_file_path("/the-root/test3.json").unwrap(),
+ version: 0,
+ text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
+ language_id: Default::default()
+ }
+ ]
+ );
+
+ // Close notifications are reported only to servers matching the buffer's language.
+ cx.update(|_| drop(json_buffer));
+ let close_message = lsp2::DidCloseTextDocumentParams {
+ text_document: lsp2::TextDocumentIdentifier::new(
+ lsp2::Url::from_file_path("/the-root/package.json").unwrap(),
+ ),
+ };
+ assert_eq!(
+ fake_json_server
+ .receive_notification::<lsp2::notification::DidCloseTextDocument>()
+ .await,
+ close_message,
+ );
+}
+
+#[gpui2::test]
+async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui2::TestAppContext) {
+ init_test(cx);
+
+ let mut language = Language::new(
+ LanguageConfig {
+ name: "Rust".into(),
+ path_suffixes: vec!["rs".to_string()],
+ ..Default::default()
+ },
+ Some(tree_sitter_rust::language()),
+ );
+ let mut fake_servers = language
+ .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
+ name: "the-language-server",
+ ..Default::default()
+ }))
+ .await;
+
+ let fs = FakeFs::new(cx.executor().clone());
+ fs.insert_tree(
+ "/the-root",
+ json!({
+ ".gitignore": "target\n",
+ "src": {
+ "a.rs": "",
+ "b.rs": "",
+ },
+ "target": {
+ "x": {
+ "out": {
+ "x.rs": ""
+ }
+ },
+ "y": {
+ "out": {
+ "y.rs": "",
+ }
+ },
+ "z": {
+ "out": {
+ "z.rs": ""
+ }
+ }
+ }
+ }),
+ )
+ .await;
+
+ let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
+ project.update(cx, |project, _| {
+ project.languages.add(Arc::new(language));
+ });
+ cx.executor().run_until_parked();
+
+ // Start the language server by opening a buffer with a compatible file extension.
+ let _buffer = project
+ .update(cx, |project, cx| {
+ project.open_local_buffer("/the-root/src/a.rs", cx)
+ })
+ .await
+ .unwrap();
+
+ // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
+ project.update(cx, |project, cx| {
+ let worktree = project.worktrees().next().unwrap();
+ assert_eq!(
+ worktree
+ .read(cx)
+ .snapshot()
+ .entries(true)
+ .map(|entry| (entry.path.as_ref(), entry.is_ignored))
+ .collect::<Vec<_>>(),
+ &[
+ (Path::new(""), false),
+ (Path::new(".gitignore"), false),
+ (Path::new("src"), false),
+ (Path::new("src/a.rs"), false),
+ (Path::new("src/b.rs"), false),
+ (Path::new("target"), true),
+ ]
+ );
+ });
+
+ let prev_read_dir_count = fs.read_dir_call_count();
+
+ // Keep track of the FS events reported to the language server.
+ let fake_server = fake_servers.next().await.unwrap();
+ let file_changes = Arc::new(Mutex::new(Vec::new()));
+ fake_server
+ .request::<lsp2::request::RegisterCapability>(lsp2::RegistrationParams {
+ registrations: vec![lsp2::Registration {
+ id: Default::default(),
+ method: "workspace/didChangeWatchedFiles".to_string(),
+ register_options: serde_json::to_value(
+ lsp2::DidChangeWatchedFilesRegistrationOptions {
+ watchers: vec![
+ lsp2::FileSystemWatcher {
+ glob_pattern: lsp2::GlobPattern::String(
+ "/the-root/Cargo.toml".to_string(),
+ ),
+ kind: None,
+ },
+ lsp2::FileSystemWatcher {
+ glob_pattern: lsp2::GlobPattern::String(
+ "/the-root/src/*.{rs,c}".to_string(),
+ ),
+ kind: None,
+ },
+ lsp2::FileSystemWatcher {
+ glob_pattern: lsp2::GlobPattern::String(
+ "/the-root/target/y/**/*.rs".to_string(),
+ ),
+ kind: None,
+ },
+ ],
+ },
+ )
+ .ok(),
+ }],
+ })
+ .await
+ .unwrap();
+ fake_server.handle_notification::<lsp2::notification::DidChangeWatchedFiles, _>({
+ let file_changes = file_changes.clone();
+ move |params, _| {
+ let mut file_changes = file_changes.lock();
+ file_changes.extend(params.changes);
+ file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
+ }
+ });
+
+ cx.executor().run_until_parked();
+ assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
+ assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
+
+ // Now the language server has asked us to watch an ignored directory path,
+ // so we recursively load it.
+ project.update(cx, |project, cx| {
+ let worktree = project.worktrees().next().unwrap();
+ assert_eq!(
+ worktree
+ .read(cx)
+ .snapshot()
+ .entries(true)
+ .map(|entry| (entry.path.as_ref(), entry.is_ignored))
+ .collect::<Vec<_>>(),
+ &[
+ (Path::new(""), false),
+ (Path::new(".gitignore"), false),
+ (Path::new("src"), false),
+ (Path::new("src/a.rs"), false),
+ (Path::new("src/b.rs"), false),
+ (Path::new("target"), true),
+ (Path::new("target/x"), true),
+ (Path::new("target/y"), true),
+ (Path::new("target/y/out"), true),
+ (Path::new("target/y/out/y.rs"), true),
+ (Path::new("target/z"), true),
+ ]
+ );
+ });
+
+ // Perform some file system mutations, two of which match the watched patterns,
+ // and one of which does not.
+ fs.create_file("/the-root/src/c.rs".as_ref(), Default::default())
+ .await
+ .unwrap();
+ fs.create_file("/the-root/src/d.txt".as_ref(), Default::default())
+ .await
+ .unwrap();
+ fs.remove_file("/the-root/src/b.rs".as_ref(), Default::default())
+ .await
+ .unwrap();
+ fs.create_file("/the-root/target/x/out/x2.rs".as_ref(), Default::default())
+ .await
+ .unwrap();
+ fs.create_file("/the-root/target/y/out/y2.rs".as_ref(), Default::default())
+ .await
+ .unwrap();
+
+ // The language server receives events for the FS mutations that match its watch patterns.
+ cx.executor().run_until_parked();
+ assert_eq!(
+ &*file_changes.lock(),
+ &[
+ lsp2::FileEvent {
+ uri: lsp2::Url::from_file_path("/the-root/src/b.rs").unwrap(),
+ typ: lsp2::FileChangeType::DELETED,
+ },
+ lsp2::FileEvent {
+ uri: lsp2::Url::from_file_path("/the-root/src/c.rs").unwrap(),
+ typ: lsp2::FileChangeType::CREATED,
+ },
+ lsp2::FileEvent {
+ uri: lsp2::Url::from_file_path("/the-root/target/y/out/y2.rs").unwrap(),
+ typ: lsp2::FileChangeType::CREATED,
+ },
+ ]
+ );
+}
+
+#[gpui2::test]
+async fn test_single_file_worktrees_diagnostics(cx: &mut gpui2::TestAppContext) {
+ init_test(cx);
+
+ let fs = FakeFs::new(cx.executor().clone());
+ fs.insert_tree(
+ "/dir",
+ json!({
+ "a.rs": "let a = 1;",
+ "b.rs": "let b = 2;"
+ }),
+ )
+ .await;
+
+ let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
+
+ let buffer_a = project
+ .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
+ .await
+ .unwrap();
+ let buffer_b = project
+ .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
+ .await
+ .unwrap();
+
+ project.update(cx, |project, cx| {
+ project
+ .update_diagnostics(
+ LanguageServerId(0),
+ lsp2::PublishDiagnosticsParams {
+ uri: Url::from_file_path("/dir/a.rs").unwrap(),
+ version: None,
+ diagnostics: vec![lsp2::Diagnostic {
+ range: lsp2::Range::new(
+ lsp2::Position::new(0, 4),
+ lsp2::Position::new(0, 5),
+ ),
+ severity: Some(lsp2::DiagnosticSeverity::ERROR),
+ message: "error 1".to_string(),
+ ..Default::default()
+ }],
+ },
+ &[],
+ cx,
+ )
+ .unwrap();
+ project
+ .update_diagnostics(
+ LanguageServerId(0),
+ lsp2::PublishDiagnosticsParams {
+ uri: Url::from_file_path("/dir/b.rs").unwrap(),
+ version: None,
+ diagnostics: vec![lsp2::Diagnostic {
+ range: lsp2::Range::new(
+ lsp2::Position::new(0, 4),
+ lsp2::Position::new(0, 5),
+ ),
+ severity: Some(lsp2::DiagnosticSeverity::WARNING),
+ message: "error 2".to_string(),
+ ..Default::default()
+ }],
+ },
+ &[],
+ cx,
+ )
+ .unwrap();
+ });
+
+ buffer_a.update(cx, |buffer, _| {
+ let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
+ assert_eq!(
+ chunks
+ .iter()
+ .map(|(s, d)| (s.as_str(), *d))
+ .collect::<Vec<_>>(),
+ &[
+ ("let ", None),
+ ("a", Some(DiagnosticSeverity::ERROR)),
+ (" = 1;", None),
+ ]
+ );
+ });
+ buffer_b.update(cx, |buffer, _| {
+ let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
+ assert_eq!(
+ chunks
+ .iter()
+ .map(|(s, d)| (s.as_str(), *d))
+ .collect::<Vec<_>>(),
+ &[
+ ("let ", None),
+ ("b", Some(DiagnosticSeverity::WARNING)),
+ (" = 2;", None),
+ ]
+ );
+ });
+}
+
+#[gpui2::test]
+async fn test_hidden_worktrees_diagnostics(cx: &mut gpui2::TestAppContext) {
+ init_test(cx);
+
+ let fs = FakeFs::new(cx.executor().clone());
+ fs.insert_tree(
+ "/root",
+ json!({
+ "dir": {
+ "a.rs": "let a = 1;",
+ },
+ "other.rs": "let b = c;"
+ }),
+ )
+ .await;
+
+ let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
+
+ let (worktree, _) = project
+ .update(cx, |project, cx| {
+ project.find_or_create_local_worktree("/root/other.rs", false, cx)
+ })
+ .await
+ .unwrap();
+ let worktree_id = worktree.update(cx, |tree, _| tree.id());
+
+ project.update(cx, |project, cx| {
+ project
+ .update_diagnostics(
+ LanguageServerId(0),
+ lsp2::PublishDiagnosticsParams {
+ uri: Url::from_file_path("/root/other.rs").unwrap(),
+ version: None,
+ diagnostics: vec![lsp2::Diagnostic {
+ range: lsp2::Range::new(
+ lsp2::Position::new(0, 8),
+ lsp2::Position::new(0, 9),
+ ),
+ severity: Some(lsp2::DiagnosticSeverity::ERROR),
+ message: "unknown variable 'c'".to_string(),
+ ..Default::default()
+ }],
+ },
+ &[],
+ cx,
+ )
+ .unwrap();
+ });
+
+ let buffer = project
+ .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
+ .await
+ .unwrap();
+ buffer.update(cx, |buffer, _| {
+ let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
+ assert_eq!(
+ chunks
+ .iter()
+ .map(|(s, d)| (s.as_str(), *d))
+ .collect::<Vec<_>>(),
+ &[
+ ("let b = ", None),
+ ("c", Some(DiagnosticSeverity::ERROR)),
+ (";", None),
+ ]
+ );
+ });
+
+ project.update(cx, |project, cx| {
+ assert_eq!(project.diagnostic_summaries(cx).next(), None);
+ assert_eq!(project.diagnostic_summary(cx).error_count, 0);
+ });
+}
+
+#[gpui2::test]
+async fn test_disk_based_diagnostics_progress(cx: &mut gpui2::TestAppContext) {
+ init_test(cx);
+
+ let progress_token = "the-progress-token";
+ let mut language = Language::new(
+ LanguageConfig {
+ name: "Rust".into(),
+ path_suffixes: vec!["rs".to_string()],
+ ..Default::default()
+ },
+ Some(tree_sitter_rust::language()),
+ );
+ let mut fake_servers = language
+ .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
+ disk_based_diagnostics_progress_token: Some(progress_token.into()),
+ disk_based_diagnostics_sources: vec!["disk".into()],
+ ..Default::default()
+ }))
+ .await;
+
+ let fs = FakeFs::new(cx.executor().clone());
+ fs.insert_tree(
+ "/dir",
+ json!({
+ "a.rs": "fn a() { A }",
+ "b.rs": "const y: i32 = 1",
+ }),
+ )
+ .await;
+
+ let project = Project::test(fs, ["/dir".as_ref()], cx).await;
+ project.update(cx, |project, _| project.languages.add(Arc::new(language)));
+ let worktree_id = project.update(cx, |p, cx| p.worktrees().next().unwrap().read(cx).id());
+
+ // Cause worktree to start the fake language server
+ let _buffer = project
+ .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
+ .await
+ .unwrap();
+
+ let mut events = cx.subscribe(&project);
+
+ let fake_server = fake_servers.next().await.unwrap();
+ assert_eq!(
+ events.next().await.unwrap(),
+ Event::LanguageServerAdded(LanguageServerId(0)),
+ );
+
+ fake_server
+ .start_progress(format!("{}/0", progress_token))
+ .await;
+ assert_eq!(
+ events.next().await.unwrap(),
+ Event::DiskBasedDiagnosticsStarted {
+ language_server_id: LanguageServerId(0),
+ }
+ );
+
+ fake_server.notify::<lsp2::notification::PublishDiagnostics>(lsp2::PublishDiagnosticsParams {
+ uri: Url::from_file_path("/dir/a.rs").unwrap(),
+ version: None,
+ diagnostics: vec![lsp2::Diagnostic {
+ range: lsp2::Range::new(lsp2::Position::new(0, 9), lsp2::Position::new(0, 10)),
+ severity: Some(lsp2::DiagnosticSeverity::ERROR),
+ message: "undefined variable 'A'".to_string(),
+ ..Default::default()
+ }],
+ });
+ assert_eq!(
+ events.next().await.unwrap(),
+ Event::DiagnosticsUpdated {
+ language_server_id: LanguageServerId(0),
+ path: (worktree_id, Path::new("a.rs")).into()
+ }
+ );
+
+ fake_server.end_progress(format!("{}/0", progress_token));
+ assert_eq!(
+ events.next().await.unwrap(),
+ Event::DiskBasedDiagnosticsFinished {
+ language_server_id: LanguageServerId(0)
+ }
+ );
+
+ let buffer = project
+ .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
+ .await
+ .unwrap();
+
+ buffer.update(cx, |buffer, _| {
+ let snapshot = buffer.snapshot();
+ let diagnostics = snapshot
+ .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
+ .collect::<Vec<_>>();
+ assert_eq!(
+ diagnostics,
+ &[DiagnosticEntry {
+ range: Point::new(0, 9)..Point::new(0, 10),
+ diagnostic: Diagnostic {
+ severity: lsp2::DiagnosticSeverity::ERROR,
+ message: "undefined variable 'A'".to_string(),
+ group_id: 0,
+ is_primary: true,
+ ..Default::default()
+ }
+ }]
+ )
+ });
+
+ // Ensure publishing empty diagnostics twice only results in one update event.
+ fake_server.notify::<lsp2::notification::PublishDiagnostics>(lsp2::PublishDiagnosticsParams {
+ uri: Url::from_file_path("/dir/a.rs").unwrap(),
+ version: None,
+ diagnostics: Default::default(),
+ });
+ assert_eq!(
+ events.next().await.unwrap(),
+ Event::DiagnosticsUpdated {
+ language_server_id: LanguageServerId(0),
+ path: (worktree_id, Path::new("a.rs")).into()
+ }
+ );
+
+ fake_server.notify::<lsp2::notification::PublishDiagnostics>(lsp2::PublishDiagnosticsParams {
+ uri: Url::from_file_path("/dir/a.rs").unwrap(),
+ version: None,
+ diagnostics: Default::default(),
+ });
+ cx.executor().run_until_parked();
+ assert_eq!(futures::poll!(events.next()), Poll::Pending);
+}
+
+#[gpui2::test]
+async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui2::TestAppContext) {
+ init_test(cx);
+
+ let progress_token = "the-progress-token";
+ let mut language = Language::new(
+ LanguageConfig {
+ path_suffixes: vec!["rs".to_string()],
+ ..Default::default()
+ },
+ None,
+ );
+ let mut fake_servers = language
+ .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
+ disk_based_diagnostics_sources: vec!["disk".into()],
+ disk_based_diagnostics_progress_token: Some(progress_token.into()),
+ ..Default::default()
+ }))
+ .await;
+
+ let fs = FakeFs::new(cx.executor().clone());
+ fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
+
+ let project = Project::test(fs, ["/dir".as_ref()], cx).await;
+ project.update(cx, |project, _| project.languages.add(Arc::new(language)));
+
+ let buffer = project
+ .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
+ .await
+ .unwrap();
+
+ // Simulate diagnostics starting to update.
+ let fake_server = fake_servers.next().await.unwrap();
+ fake_server.start_progress(progress_token).await;
+
+ // Restart the server before the diagnostics finish updating.
+ project.update(cx, |project, cx| {
+ project.restart_language_servers_for_buffers([buffer], cx);
+ });
+ let mut events = cx.subscribe(&project);
+
+ // Simulate the newly started server sending more diagnostics.
+ let fake_server = fake_servers.next().await.unwrap();
+ assert_eq!(
+ events.next().await.unwrap(),
+ Event::LanguageServerAdded(LanguageServerId(1))
+ );
+ fake_server.start_progress(progress_token).await;
+ assert_eq!(
+ events.next().await.unwrap(),
+ Event::DiskBasedDiagnosticsStarted {
+ language_server_id: LanguageServerId(1)
+ }
+ );
+ project.update(cx, |project, _| {
+ assert_eq!(
+ project
+ .language_servers_running_disk_based_diagnostics()
+ .collect::<Vec<_>>(),
+ [LanguageServerId(1)]
+ );
+ });
+
+ // All diagnostics are considered done, despite the old server's diagnostic
+ // task never completing.
+ fake_server.end_progress(progress_token);
+ assert_eq!(
+ events.next().await.unwrap(),
+ Event::DiskBasedDiagnosticsFinished {
+ language_server_id: LanguageServerId(1)
+ }
+ );
+ project.update(cx, |project, _| {
+ assert_eq!(
+ project
+ .language_servers_running_disk_based_diagnostics()
+ .collect::<Vec<_>>(),
+ [LanguageServerId(0); 0]
+ );
+ });
+}
+
+#[gpui2::test]
+async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui2::TestAppContext) {
+ init_test(cx);
+
+ let mut language = Language::new(
+ LanguageConfig {
+ path_suffixes: vec!["rs".to_string()],
+ ..Default::default()
+ },
+ None,
+ );
+ let mut fake_servers = language
+ .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
+ ..Default::default()
+ }))
+ .await;
+
+ let fs = FakeFs::new(cx.executor().clone());
+ fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
+
+ let project = Project::test(fs, ["/dir".as_ref()], cx).await;
+ project.update(cx, |project, _| project.languages.add(Arc::new(language)));
+
+ let buffer = project
+ .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
+ .await
+ .unwrap();
+
+ // Publish diagnostics
+ let fake_server = fake_servers.next().await.unwrap();
+ fake_server.notify::<lsp2::notification::PublishDiagnostics>(lsp2::PublishDiagnosticsParams {
+ uri: Url::from_file_path("/dir/a.rs").unwrap(),
+ version: None,
+ diagnostics: vec![lsp2::Diagnostic {
+ range: lsp2::Range::new(lsp2::Position::new(0, 0), lsp2::Position::new(0, 0)),
+ severity: Some(lsp2::DiagnosticSeverity::ERROR),
+ message: "the message".to_string(),
+ ..Default::default()
+ }],
+ });
+
+ cx.executor().run_until_parked();
+ buffer.update(cx, |buffer, _| {
+ assert_eq!(
+ buffer
+ .snapshot()
+ .diagnostics_in_range::<_, usize>(0..1, false)
+ .map(|entry| entry.diagnostic.message.clone())
+ .collect::<Vec<_>>(),
+ ["the message".to_string()]
+ );
+ });
+ project.update(cx, |project, cx| {
+ assert_eq!(
+ project.diagnostic_summary(cx),
+ DiagnosticSummary {
+ error_count: 1,
+ warning_count: 0,
+ }
+ );
+ });
+
+ project.update(cx, |project, cx| {
+ project.restart_language_servers_for_buffers([buffer.clone()], cx);
+ });
+
+ // The diagnostics are cleared.
+ cx.executor().run_until_parked();
+ buffer.update(cx, |buffer, _| {
+ assert_eq!(
+ buffer
+ .snapshot()
+ .diagnostics_in_range::<_, usize>(0..1, false)
+ .map(|entry| entry.diagnostic.message.clone())
+ .collect::<Vec<_>>(),
+ Vec::<String>::new(),
+ );
+ });
+ project.update(cx, |project, cx| {
+ assert_eq!(
+ project.diagnostic_summary(cx),
+ DiagnosticSummary {
+ error_count: 0,
+ warning_count: 0,
+ }
+ );
+ });
+}
+
+#[gpui2::test]
+async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui2::TestAppContext) {
+ init_test(cx);
+
+ let mut language = Language::new(
+ LanguageConfig {
+ path_suffixes: vec!["rs".to_string()],
+ ..Default::default()
+ },
+ None,
+ );
+ let mut fake_servers = language
+ .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
+ name: "the-lsp",
+ ..Default::default()
+ }))
+ .await;
+
+ let fs = FakeFs::new(cx.executor().clone());
+ fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
+
+ let project = Project::test(fs, ["/dir".as_ref()], cx).await;
+ project.update(cx, |project, _| project.languages.add(Arc::new(language)));
+
+ let buffer = project
+ .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
+ .await
+ .unwrap();
+
+ // Before restarting the server, report diagnostics with an unknown buffer version.
+ let fake_server = fake_servers.next().await.unwrap();
+ fake_server.notify::<lsp2::notification::PublishDiagnostics>(lsp2::PublishDiagnosticsParams {
+ uri: lsp2::Url::from_file_path("/dir/a.rs").unwrap(),
+ version: Some(10000),
+ diagnostics: Vec::new(),
+ });
+ cx.executor().run_until_parked();
+
+ project.update(cx, |project, cx| {
+ project.restart_language_servers_for_buffers([buffer.clone()], cx);
+ });
+ let mut fake_server = fake_servers.next().await.unwrap();
+ let notification = fake_server
+ .receive_notification::<lsp2::notification::DidOpenTextDocument>()
+ .await
+ .text_document;
+ assert_eq!(notification.version, 0);
+}
+
+#[gpui2::test]
+async fn test_toggling_enable_language_server(cx: &mut gpui2::TestAppContext) {
+ init_test(cx);
+
+ let mut rust = Language::new(
+ LanguageConfig {
+ name: Arc::from("Rust"),
+ path_suffixes: vec!["rs".to_string()],
+ ..Default::default()
+ },
+ None,
+ );
+ let mut fake_rust_servers = rust
+ .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
+ name: "rust-lsp",
+ ..Default::default()
+ }))
+ .await;
+ let mut js = Language::new(
+ LanguageConfig {
+ name: Arc::from("JavaScript"),
+ path_suffixes: vec!["js".to_string()],
+ ..Default::default()
+ },
+ None,
+ );
+ let mut fake_js_servers = js
+ .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
+ name: "js-lsp",
+ ..Default::default()
+ }))
+ .await;
+
+ let fs = FakeFs::new(cx.executor().clone());
+ fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
+ .await;
+
+ let project = Project::test(fs, ["/dir".as_ref()], cx).await;
+ project.update(cx, |project, _| {
+ project.languages.add(Arc::new(rust));
+ project.languages.add(Arc::new(js));
+ });
+
+ let _rs_buffer = project
+ .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
+ .await
+ .unwrap();
+ let _js_buffer = project
+ .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
+ .await
+ .unwrap();
+
+ let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
+ assert_eq!(
+ fake_rust_server_1
+ .receive_notification::<lsp2::notification::DidOpenTextDocument>()
+ .await
+ .text_document
+ .uri
+ .as_str(),
+ "file:///dir/a.rs"
+ );
+
+ let mut fake_js_server = fake_js_servers.next().await.unwrap();
+ assert_eq!(
+ fake_js_server
+ .receive_notification::<lsp2::notification::DidOpenTextDocument>()
+ .await
+ .text_document
+ .uri
+ .as_str(),
+ "file:///dir/b.js"
+ );
+
+ // Disable Rust language server, ensuring only that server gets stopped.
+ cx.update(|cx| {
+ cx.update_global(|settings: &mut SettingsStore, cx| {
+ settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
+ settings.languages.insert(
+ Arc::from("Rust"),
+ LanguageSettingsContent {
+ enable_language_server: Some(false),
+ ..Default::default()
+ },
+ );
+ });
+ })
+ });
+ fake_rust_server_1
+ .receive_notification::<lsp2::notification::Exit>()
+ .await;
+
+ // Enable Rust and disable JavaScript language servers, ensuring that the
+ // former gets started again and that the latter stops.
+ cx.update(|cx| {
+ cx.update_global(|settings: &mut SettingsStore, cx| {
+ settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
+ settings.languages.insert(
+ Arc::from("Rust"),
+ LanguageSettingsContent {
+ enable_language_server: Some(true),
+ ..Default::default()
+ },
+ );
+ settings.languages.insert(
+ Arc::from("JavaScript"),
+ LanguageSettingsContent {
+ enable_language_server: Some(false),
+ ..Default::default()
+ },
+ );
+ });
+ })
+ });
+ let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
+ assert_eq!(
+ fake_rust_server_2
+ .receive_notification::<lsp2::notification::DidOpenTextDocument>()
+ .await
+ .text_document
+ .uri
+ .as_str(),
+ "file:///dir/a.rs"
+ );
+ fake_js_server
+ .receive_notification::<lsp2::notification::Exit>()
+ .await;
+}
+
+#[gpui2::test(iterations = 3)]
+async fn test_transforming_diagnostics(cx: &mut gpui2::TestAppContext) {
+ init_test(cx);
+
+ let mut language = Language::new(
+ LanguageConfig {
+ name: "Rust".into(),
+ path_suffixes: vec!["rs".to_string()],
+ ..Default::default()
+ },
+ Some(tree_sitter_rust::language()),
+ );
+ let mut fake_servers = language
+ .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
+ disk_based_diagnostics_sources: vec!["disk".into()],
+ ..Default::default()
+ }))
+ .await;
+
+ let text = "
+ fn a() { A }
+ fn b() { BB }
+ fn c() { CCC }
+ "
+ .unindent();
+
+ let fs = FakeFs::new(cx.executor().clone());
+ fs.insert_tree("/dir", json!({ "a.rs": text })).await;
+
+ let project = Project::test(fs, ["/dir".as_ref()], cx).await;
+ project.update(cx, |project, _| project.languages.add(Arc::new(language)));
+
+ let buffer = project
+ .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
+ .await
+ .unwrap();
+
+ let mut fake_server = fake_servers.next().await.unwrap();
+ let open_notification = fake_server
+ .receive_notification::<lsp2::notification::DidOpenTextDocument>()
+ .await;
+
+ // Edit the buffer, moving the content down
+ buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
+ let change_notification_1 = fake_server
+ .receive_notification::<lsp2::notification::DidChangeTextDocument>()
+ .await;
+ assert!(change_notification_1.text_document.version > open_notification.text_document.version);
+
+ // Report some diagnostics for the initial version of the buffer
+ fake_server.notify::<lsp2::notification::PublishDiagnostics>(lsp2::PublishDiagnosticsParams {
+ uri: lsp2::Url::from_file_path("/dir/a.rs").unwrap(),
+ version: Some(open_notification.text_document.version),
+ diagnostics: vec![
+ lsp2::Diagnostic {
+ range: lsp2::Range::new(lsp2::Position::new(0, 9), lsp2::Position::new(0, 10)),
+ severity: Some(DiagnosticSeverity::ERROR),
+ message: "undefined variable 'A'".to_string(),
+ source: Some("disk".to_string()),
+ ..Default::default()
+ },
+ lsp2::Diagnostic {
+ range: lsp2::Range::new(lsp2::Position::new(1, 9), lsp2::Position::new(1, 11)),
+ severity: Some(DiagnosticSeverity::ERROR),
+ message: "undefined variable 'BB'".to_string(),
+ source: Some("disk".to_string()),
+ ..Default::default()
+ },
+ lsp2::Diagnostic {
+ range: lsp2::Range::new(lsp2::Position::new(2, 9), lsp2::Position::new(2, 12)),
+ severity: Some(DiagnosticSeverity::ERROR),
+ source: Some("disk".to_string()),
+ message: "undefined variable 'CCC'".to_string(),
+ ..Default::default()
+ },
+ ],
+ });
+
+ // The diagnostics have moved down since they were created.
+ cx.executor().run_until_parked();
+ buffer.update(cx, |buffer, _| {
+ assert_eq!(
+ buffer
+ .snapshot()
+ .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
+ .collect::<Vec<_>>(),
+ &[
+ DiagnosticEntry {
+ range: Point::new(3, 9)..Point::new(3, 11),
+ diagnostic: Diagnostic {
+ source: Some("disk".into()),
+ severity: DiagnosticSeverity::ERROR,
+ message: "undefined variable 'BB'".to_string(),
+ is_disk_based: true,
+ group_id: 1,
+ is_primary: true,
+ ..Default::default()
+ },
+ },
+ DiagnosticEntry {
+ range: Point::new(4, 9)..Point::new(4, 12),
+ diagnostic: Diagnostic {
+ source: Some("disk".into()),
+ severity: DiagnosticSeverity::ERROR,
+ message: "undefined variable 'CCC'".to_string(),
+ is_disk_based: true,
+ group_id: 2,
+ is_primary: true,
+ ..Default::default()
+ }
+ }
+ ]
+ );
+ assert_eq!(
+ chunks_with_diagnostics(buffer, 0..buffer.len()),
+ [
+ ("\n\nfn a() { ".to_string(), None),
+ ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
+ (" }\nfn b() { ".to_string(), None),
+ ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
+ (" }\nfn c() { ".to_string(), None),
+ ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
+ (" }\n".to_string(), None),
+ ]
+ );
+ assert_eq!(
+ chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
+ [
+ ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
+ (" }\nfn c() { ".to_string(), None),
+ ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
+ ]
+ );
+ });
+
+ // Ensure overlapping diagnostics are highlighted correctly.
+ fake_server.notify::<lsp2::notification::PublishDiagnostics>(lsp2::PublishDiagnosticsParams {
+ uri: lsp2::Url::from_file_path("/dir/a.rs").unwrap(),
+ version: Some(open_notification.text_document.version),
+ diagnostics: vec![
+ lsp2::Diagnostic {
+ range: lsp2::Range::new(lsp2::Position::new(0, 9), lsp2::Position::new(0, 10)),
+ severity: Some(DiagnosticSeverity::ERROR),
+ message: "undefined variable 'A'".to_string(),
+ source: Some("disk".to_string()),
+ ..Default::default()
+ },
+ lsp2::Diagnostic {
+ range: lsp2::Range::new(lsp2::Position::new(0, 9), lsp2::Position::new(0, 12)),
+ severity: Some(DiagnosticSeverity::WARNING),
+ message: "unreachable statement".to_string(),
+ source: Some("disk".to_string()),
+ ..Default::default()
+ },
+ ],
+ });
+
+ cx.executor().run_until_parked();
+ buffer.update(cx, |buffer, _| {
+ assert_eq!(
+ buffer
+ .snapshot()
+ .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
+ .collect::<Vec<_>>(),
+ &[
+ DiagnosticEntry {
+ range: Point::new(2, 9)..Point::new(2, 12),
+ diagnostic: Diagnostic {
+ source: Some("disk".into()),
+ severity: DiagnosticSeverity::WARNING,
+ message: "unreachable statement".to_string(),
+ is_disk_based: true,
+ group_id: 4,
+ is_primary: true,
+ ..Default::default()
+ }
+ },
+ DiagnosticEntry {
+ range: Point::new(2, 9)..Point::new(2, 10),
+ diagnostic: Diagnostic {
+ source: Some("disk".into()),
+ severity: DiagnosticSeverity::ERROR,
+ message: "undefined variable 'A'".to_string(),
+ is_disk_based: true,
+ group_id: 3,
+ is_primary: true,
+ ..Default::default()
+ },
+ }
+ ]
+ );
+ assert_eq!(
+ chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
+ [
+ ("fn a() { ".to_string(), None),
+ ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
+ (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
+ ("\n".to_string(), None),
+ ]
+ );
+ assert_eq!(
+ chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
+ [
+ (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
+ ("\n".to_string(), None),
+ ]
+ );
+ });
+
+ // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
+ // changes since the last save.
+ buffer.update(cx, |buffer, cx| {
+ buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
+ buffer.edit(
+ [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
+ None,
+ cx,
+ );
+ buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
+ });
+ let change_notification_2 = fake_server
+ .receive_notification::<lsp2::notification::DidChangeTextDocument>()
+ .await;
+ assert!(
+ change_notification_2.text_document.version > change_notification_1.text_document.version
+ );
+
+ // Handle out-of-order diagnostics
+ fake_server.notify::<lsp2::notification::PublishDiagnostics>(lsp2::PublishDiagnosticsParams {
+ uri: lsp2::Url::from_file_path("/dir/a.rs").unwrap(),
+ version: Some(change_notification_2.text_document.version),
+ diagnostics: vec![
+ lsp2::Diagnostic {
+ range: lsp2::Range::new(lsp2::Position::new(1, 9), lsp2::Position::new(1, 11)),
+ severity: Some(DiagnosticSeverity::ERROR),
+ message: "undefined variable 'BB'".to_string(),
+ source: Some("disk".to_string()),
+ ..Default::default()
+ },
+ lsp2::Diagnostic {
+ range: lsp2::Range::new(lsp2::Position::new(0, 9), lsp2::Position::new(0, 10)),
+ severity: Some(DiagnosticSeverity::WARNING),
+ message: "undefined variable 'A'".to_string(),
+ source: Some("disk".to_string()),
+ ..Default::default()
+ },
+ ],
+ });
+
+ cx.executor().run_until_parked();
+ buffer.update(cx, |buffer, _| {
+ assert_eq!(
+ buffer
+ .snapshot()
+ .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
+ .collect::<Vec<_>>(),
+ &[
+ DiagnosticEntry {
+ range: Point::new(2, 21)..Point::new(2, 22),
+ diagnostic: Diagnostic {
+ source: Some("disk".into()),
+ severity: DiagnosticSeverity::WARNING,
+ message: "undefined variable 'A'".to_string(),
+ is_disk_based: true,
+ group_id: 6,
+ is_primary: true,
+ ..Default::default()
+ }
+ },
+ DiagnosticEntry {
+ range: Point::new(3, 9)..Point::new(3, 14),
+ diagnostic: Diagnostic {
+ source: Some("disk".into()),
+ severity: DiagnosticSeverity::ERROR,
+ message: "undefined variable 'BB'".to_string(),
+ is_disk_based: true,
+ group_id: 5,
+ is_primary: true,
+ ..Default::default()
+ },
+ }
+ ]
+ );
+ });
+}
+
+#[gpui2::test]
+async fn test_empty_diagnostic_ranges(cx: &mut gpui2::TestAppContext) {
+ init_test(cx);
+
+ let text = concat!(
+ "let one = ;\n", //
+ "let two = \n",
+ "let three = 3;\n",
+ );
+
+ let fs = FakeFs::new(cx.executor().clone());
+ fs.insert_tree("/dir", json!({ "a.rs": text })).await;
+
+ let project = Project::test(fs, ["/dir".as_ref()], cx).await;
+ let buffer = project
+ .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
+ .await
+ .unwrap();
+
+ project.update(cx, |project, cx| {
+ project
+ .update_buffer_diagnostics(
+ &buffer,
+ LanguageServerId(0),
+ None,
+ vec![
+ DiagnosticEntry {
+ range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 10)),
+ diagnostic: Diagnostic {
+ severity: DiagnosticSeverity::ERROR,
+ message: "syntax error 1".to_string(),
+ ..Default::default()
+ },
+ },
+ DiagnosticEntry {
+ range: Unclipped(PointUtf16::new(1, 10))..Unclipped(PointUtf16::new(1, 10)),
+ diagnostic: Diagnostic {
+ severity: DiagnosticSeverity::ERROR,
+ message: "syntax error 2".to_string(),
+ ..Default::default()
+ },
+ },
+ ],
+ cx,
+ )
+ .unwrap();
+ });
+
+ // An empty range is extended forward to include the following character.
+ // At the end of a line, an empty range is extended backward to include
+ // the preceding character.
+ buffer.update(cx, |buffer, _| {
+ let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
+ assert_eq!(
+ chunks
+ .iter()
+ .map(|(s, d)| (s.as_str(), *d))
+ .collect::<Vec<_>>(),
+ &[
+ ("let one = ", None),
+ (";", Some(DiagnosticSeverity::ERROR)),
+ ("\nlet two =", None),
+ (" ", Some(DiagnosticSeverity::ERROR)),
+ ("\nlet three = 3;\n", None)
+ ]
+ );
+ });
+}
+
+#[gpui2::test]
+async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui2::TestAppContext) {
+ init_test(cx);
+
+ let fs = FakeFs::new(cx.executor().clone());
+ fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
+ .await;
+
+ let project = Project::test(fs, ["/dir".as_ref()], cx).await;
+
+ project.update(cx, |project, cx| {
+ project
+ .update_diagnostic_entries(
+ LanguageServerId(0),
+ Path::new("/dir/a.rs").to_owned(),
+ None,
+ vec![DiagnosticEntry {
+ range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
+ diagnostic: Diagnostic {
+ severity: DiagnosticSeverity::ERROR,
+ is_primary: true,
+ message: "syntax error a1".to_string(),
+ ..Default::default()
+ },
+ }],
+ cx,
+ )
+ .unwrap();
+ project
+ .update_diagnostic_entries(
+ LanguageServerId(1),
+ Path::new("/dir/a.rs").to_owned(),
+ None,
+ vec![DiagnosticEntry {
+ range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
+ diagnostic: Diagnostic {
+ severity: DiagnosticSeverity::ERROR,
+ is_primary: true,
+ message: "syntax error b1".to_string(),
+ ..Default::default()
+ },
+ }],
+ cx,
+ )
+ .unwrap();
+
+ assert_eq!(
+ project.diagnostic_summary(cx),
+ DiagnosticSummary {
+ error_count: 2,
+ warning_count: 0,
+ }
+ );
+ });
+}
+
+#[gpui2::test]
+async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui2::TestAppContext) {
+ init_test(cx);
+
+ let mut language = Language::new(
+ LanguageConfig {
+ name: "Rust".into(),
+ path_suffixes: vec!["rs".to_string()],
+ ..Default::default()
+ },
+ Some(tree_sitter_rust::language()),
+ );
+ let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
+
+ let text = "
+ fn a() {
+ f1();
+ }
+ fn b() {
+ f2();
+ }
+ fn c() {
+ f3();
+ }
+ "
+ .unindent();
+
+ let fs = FakeFs::new(cx.executor().clone());
+ fs.insert_tree(
+ "/dir",
+ json!({
+ "a.rs": text.clone(),
+ }),
+ )
+ .await;
+
+ let project = Project::test(fs, ["/dir".as_ref()], cx).await;
+ project.update(cx, |project, _| project.languages.add(Arc::new(language)));
+ let buffer = project
+ .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
+ .await
+ .unwrap();
+
+ let mut fake_server = fake_servers.next().await.unwrap();
+ let lsp_document_version = fake_server
+ .receive_notification::<lsp2::notification::DidOpenTextDocument>()
+ .await
+ .text_document
+ .version;
+
+ // Simulate editing the buffer after the language server computes some edits.
+ buffer.update(cx, |buffer, cx| {
+ buffer.edit(
+ [(
+ Point::new(0, 0)..Point::new(0, 0),
+ "// above first function\n",
+ )],
+ None,
+ cx,
+ );
+ buffer.edit(
+ [(
+ Point::new(2, 0)..Point::new(2, 0),
+ " // inside first function\n",
+ )],
+ None,
+ cx,
+ );
+ buffer.edit(
+ [(
+ Point::new(6, 4)..Point::new(6, 4),
+ "// inside second function ",
+ )],
+ None,
+ cx,
+ );
+
+ assert_eq!(
+ buffer.text(),
+ "
+ // above first function
+ fn a() {
+ // inside first function
+ f1();
+ }
+ fn b() {
+ // inside second function f2();
+ }
+ fn c() {
+ f3();
+ }
+ "
+ .unindent()
+ );
+ });
+
+ let edits = project
+ .update(cx, |project, cx| {
+ project.edits_from_lsp(
+ &buffer,
+ vec![
+ // replace body of first function
+ lsp2::TextEdit {
+ range: lsp2::Range::new(
+ lsp2::Position::new(0, 0),
+ lsp2::Position::new(3, 0),
+ ),
+ new_text: "
+ fn a() {
+ f10();
+ }
+ "
+ .unindent(),
+ },
+ // edit inside second function
+ lsp2::TextEdit {
+ range: lsp2::Range::new(
+ lsp2::Position::new(4, 6),
+ lsp2::Position::new(4, 6),
+ ),
+ new_text: "00".into(),
+ },
+ // edit inside third function via two distinct edits
+ lsp2::TextEdit {
+ range: lsp2::Range::new(
+ lsp2::Position::new(7, 5),
+ lsp2::Position::new(7, 5),
+ ),
+ new_text: "4000".into(),
+ },
+ lsp2::TextEdit {
+ range: lsp2::Range::new(
+ lsp2::Position::new(7, 5),
+ lsp2::Position::new(7, 6),
+ ),
+ new_text: "".into(),
+ },
+ ],
+ LanguageServerId(0),
+ Some(lsp_document_version),
+ cx,
+ )
+ })
+ .await
+ .unwrap();
+
+ buffer.update(cx, |buffer, cx| {
+ for (range, new_text) in edits {
+ buffer.edit([(range, new_text)], None, cx);
+ }
+ assert_eq!(
+ buffer.text(),
+ "
+ // above first function
+ fn a() {
+ // inside first function
+ f10();
+ }
+ fn b() {
+ // inside second function f200();
+ }
+ fn c() {
+ f4000();
+ }
+ "
+ .unindent()
+ );
+ });
+}
+
+#[gpui2::test]
+async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui2::TestAppContext) {
+ init_test(cx);
+
+ let text = "
+ use a::b;
+ use a::c;
+
+ fn f() {
+ b();
+ c();
+ }
+ "
+ .unindent();
+
+ let fs = FakeFs::new(cx.executor().clone());
+ fs.insert_tree(
+ "/dir",
+ json!({
+ "a.rs": text.clone(),
+ }),
+ )
+ .await;
+
+ let project = Project::test(fs, ["/dir".as_ref()], cx).await;
+ let buffer = project
+ .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
+ .await
+ .unwrap();
+
+ // Simulate the language server sending us a small edit in the form of a very large diff.
+ // Rust-analyzer does this when performing a merge-imports code action.
+ let edits = project
+ .update(cx, |project, cx| {
+ project.edits_from_lsp(
+ &buffer,
+ [
+ // Replace the first use statement without editing the semicolon.
+ lsp2::TextEdit {
+ range: lsp2::Range::new(
+ lsp2::Position::new(0, 4),
+ lsp2::Position::new(0, 8),
+ ),
+ new_text: "a::{b, c}".into(),
+ },
+ // Reinsert the remainder of the file between the semicolon and the final
+ // newline of the file.
+ lsp2::TextEdit {
+ range: lsp2::Range::new(
+ lsp2::Position::new(0, 9),
+ lsp2::Position::new(0, 9),
+ ),
+ new_text: "\n\n".into(),
+ },
+ lsp2::TextEdit {
+ range: lsp2::Range::new(
+ lsp2::Position::new(0, 9),
+ lsp2::Position::new(0, 9),
+ ),
+ new_text: "
+ fn f() {
+ b();
+ c();
+ }"
+ .unindent(),
+ },
+ // Delete everything after the first newline of the file.
+ lsp2::TextEdit {
+ range: lsp2::Range::new(
+ lsp2::Position::new(1, 0),
+ lsp2::Position::new(7, 0),
+ ),
+ new_text: "".into(),
+ },
+ ],
+ LanguageServerId(0),
+ None,
+ cx,
+ )
+ })
+ .await
+ .unwrap();
+
+ buffer.update(cx, |buffer, cx| {
+ let edits = edits
+ .into_iter()
+ .map(|(range, text)| {
+ (
+ range.start.to_point(buffer)..range.end.to_point(buffer),
+ text,
+ )
+ })
+ .collect::<Vec<_>>();
+
+ assert_eq!(
+ edits,
+ [
+ (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
+ (Point::new(1, 0)..Point::new(2, 0), "".into())
+ ]
+ );
+
+ for (range, new_text) in edits {
+ buffer.edit([(range, new_text)], None, cx);
+ }
+ assert_eq!(
+ buffer.text(),
+ "
+ use a::{b, c};
+
+ fn f() {
+ b();
+ c();
+ }
+ "
+ .unindent()
+ );
+ });
+}
+
+// #[gpui2::test]
+// async fn test_invalid_edits_from_lsp2(cx: &mut gpui2::TestAppContext) {
// init_test(cx);
-// let fs = FakeFs::new(cx.background());
+// let text = "
+// use a::b;
+// use a::c;
+
+// fn f() {
+// b();
+// c();
+// }
+// "
+// .unindent();
+
+// let fs = FakeFs::new(cx.executor().clone());
// fs.insert_tree(
-// "/the-root",
+// "/dir",
// json!({
-// ".zed": {
-// "settings.json": r#"{ "tab_size": 8 }"#
-// },
-// "a": {
-// "a.rs": "fn a() {\n A\n}"
-// },
-// "b": {
-// ".zed": {
-// "settings.json": r#"{ "tab_size": 2 }"#
-// },
-// "b.rs": "fn b() {\n B\n}"
-// }
+// "a.rs": text.clone(),
// }),
// )
// .await;
-// let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
-// let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
-
-// deterministic.run_until_parked();
-// cx.read(|cx| {
-// let tree = worktree.read(cx);
-
-// let settings_a = language_settings(
-// None,
-// Some(
-// &(File::for_entry(
-// tree.entry_for_path("a/a.rs").unwrap().clone(),
-// worktree.clone(),
-// ) as _),
-// ),
-// cx,
-// );
-// let settings_b = language_settings(
-// None,
-// Some(
-// &(File::for_entry(
-// tree.entry_for_path("b/b.rs").unwrap().clone(),
-// worktree.clone(),
-// ) as _),
-// ),
-// cx,
+// let project = Project::test(fs, ["/dir".as_ref()], cx).await;
+// let buffer = project
+// .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
+// .await
+// .unwrap();
+
+// // Simulate the language server sending us edits in a non-ordered fashion,
+// // with ranges sometimes being inverted or pointing to invalid locations.
+// let edits = project
+// .update(cx, |project, cx| {
+// project.edits_from_lsp(
+// &buffer,
+// [
+// lsp2::TextEdit {
+// range: lsp2::Range::new(
+// lsp2::Position::new(0, 9),
+// lsp2::Position::new(0, 9),
+// ),
+// new_text: "\n\n".into(),
+// },
+// lsp2::TextEdit {
+// range: lsp2::Range::new(
+// lsp2::Position::new(0, 8),
+// lsp2::Position::new(0, 4),
+// ),
+// new_text: "a::{b, c}".into(),
+// },
+// lsp2::TextEdit {
+// range: lsp2::Range::new(
+// lsp2::Position::new(1, 0),
+// lsp2::Position::new(99, 0),
+// ),
+// new_text: "".into(),
+// },
+// lsp2::TextEdit {
+// range: lsp2::Range::new(
+// lsp2::Position::new(0, 9),
+// lsp2::Position::new(0, 9),
+// ),
+// new_text: "
+// fn f() {
+// b();
+// c();
+// }"
+// .unindent(),
+// },
+// ],
+// LanguageServerId(0),
+// None,
+// cx,
+// )
+// })
+// .await
+// .unwrap();
+
+// buffer.update(cx, |buffer, cx| {
+// let edits = edits
+// .into_iter()
+// .map(|(range, text)| {
+// (
+// range.start.to_point(buffer)..range.end.to_point(buffer),
+// text,
+// )
+// })
+// .collect::<Vec<_>>();
+
+// assert_eq!(
+// edits,
+// [
+// (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
+// (Point::new(1, 0)..Point::new(2, 0), "".into())
+// ]
// );
-// assert_eq!(settings_a.tab_size.get(), 8);
-// assert_eq!(settings_b.tab_size.get(), 2);
+// for (range, new_text) in edits {
+// buffer.edit([(range, new_text)], None, cx);
+// }
+// assert_eq!(
+// buffer.text(),
+// "
+// use a::{b, c};
+
+// fn f() {
+// b();
+// c();
+// }
+// "
+// .unindent()
+// );
// });
// }
-// #[gpui::test]
-// async fn test_managing_language_servers(
-// deterministic: Arc<Deterministic>,
-// cx: &mut gpui::TestAppContext,
-// ) {
+fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
+ buffer: &Buffer,
+ range: Range<T>,
+) -> Vec<(String, Option<DiagnosticSeverity>)> {
+ let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
+ for chunk in buffer.snapshot().chunks(range, true) {
+ if chunks.last().map_or(false, |prev_chunk| {
+ prev_chunk.1 == chunk.diagnostic_severity
+ }) {
+ chunks.last_mut().unwrap().0.push_str(chunk.text);
+ } else {
+ chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
+ }
+ }
+ chunks
+}
+
+#[gpui2::test(iterations = 10)]
+async fn test_definition(cx: &mut gpui2::TestAppContext) {
+ init_test(cx);
+
+ let mut language = Language::new(
+ LanguageConfig {
+ name: "Rust".into(),
+ path_suffixes: vec!["rs".to_string()],
+ ..Default::default()
+ },
+ Some(tree_sitter_rust::language()),
+ );
+ let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
+
+ let fs = FakeFs::new(cx.executor().clone());
+ fs.insert_tree(
+ "/dir",
+ json!({
+ "a.rs": "const fn a() { A }",
+ "b.rs": "const y: i32 = crate::a()",
+ }),
+ )
+ .await;
+
+ let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
+ project.update(cx, |project, _| project.languages.add(Arc::new(language)));
+
+ let buffer = project
+ .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
+ .await
+ .unwrap();
+
+ let fake_server = fake_servers.next().await.unwrap();
+ fake_server.handle_request::<lsp2::request::GotoDefinition, _, _>(|params, _| async move {
+ let params = params.text_document_position_params;
+ assert_eq!(
+ params.text_document.uri.to_file_path().unwrap(),
+ Path::new("/dir/b.rs"),
+ );
+ assert_eq!(params.position, lsp2::Position::new(0, 22));
+
+ Ok(Some(lsp2::GotoDefinitionResponse::Scalar(
+ lsp2::Location::new(
+ lsp2::Url::from_file_path("/dir/a.rs").unwrap(),
+ lsp2::Range::new(lsp2::Position::new(0, 9), lsp2::Position::new(0, 10)),
+ ),
+ )))
+ });
+
+ let mut definitions = project
+ .update(cx, |project, cx| project.definition(&buffer, 22, cx))
+ .await
+ .unwrap();
+
+ // Assert no new language server started
+ cx.executor().run_until_parked();
+ assert!(fake_servers.try_next().is_err());
+
+ assert_eq!(definitions.len(), 1);
+ let definition = definitions.pop().unwrap();
+ cx.update(|cx| {
+ let target_buffer = definition.target.buffer.read(cx);
+ assert_eq!(
+ target_buffer
+ .file()
+ .unwrap()
+ .as_local()
+ .unwrap()
+ .abs_path(cx),
+ Path::new("/dir/a.rs"),
+ );
+ assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
+ assert_eq!(
+ list_worktrees(&project, cx),
+ [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
+ );
+
+ drop(definition);
+ });
+ cx.update(|cx| {
+ assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
+ });
+
+ fn list_worktrees<'a>(
+ project: &'a Model<Project>,
+ cx: &'a AppContext,
+ ) -> Vec<(&'a Path, bool)> {
+ project
+ .read(cx)
+ .worktrees()
+ .map(|worktree| {
+ let worktree = worktree.read(cx);
+ (
+ worktree.as_local().unwrap().abs_path().as_ref(),
+ worktree.is_visible(),
+ )
+ })
+ .collect::<Vec<_>>()
+ }
+}
+
+// #[gpui2::test]
+// async fn test_completions_without_edit_ranges(cx: &mut gpui2::TestAppContext) {
// init_test(cx);
-// let mut rust_language = Language::new(
-// LanguageConfig {
-// name: "Rust".into(),
-// path_suffixes: vec!["rs".to_string()],
-// ..Default::default()
-// },
-// Some(tree_sitter_rust::language()),
-// );
-// let mut json_language = Language::new(
+// let mut language = Language::new(
// LanguageConfig {
-// name: "JSON".into(),
-// path_suffixes: vec!["json".to_string()],
+// name: "TypeScript".into(),
+// path_suffixes: vec!["ts".to_string()],
// ..Default::default()
// },
-// None,
+// Some(tree_sitter_typescript::language_typescript()),
// );
-// let mut fake_rust_servers = rust_language
-// .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
-// name: "the-rust-language-server",
-// capabilities: lsp::ServerCapabilities {
-// completion_provider: Some(lsp::CompletionOptions {
-// trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
-// ..Default::default()
-// }),
-// ..Default::default()
-// },
-// ..Default::default()
-// }))
-// .await;
-// let mut fake_json_servers = json_language
+// let mut fake_language_servers = language
// .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
-// name: "the-json-language-server",
-// capabilities: lsp::ServerCapabilities {
-// completion_provider: Some(lsp::CompletionOptions {
+// capabilities: lsp2::ServerCapabilities {
+// completion_provider: Some(lsp2::CompletionOptions {
// trigger_characters: Some(vec![":".to_string()]),
// ..Default::default()
// }),
@@ -4030,53 +4030,52 @@ struct UpdateIgnoreStatusJob {
scan_queue: Sender<ScanJob>,
}
-// todo!("re-enable when we have tests")
-// pub trait WorktreeModelHandle {
-// #[cfg(any(test, feature = "test-support"))]
-// fn flush_fs_events<'a>(
-// &self,
-// cx: &'a gpui::TestAppContext,
-// ) -> futures::future::LocalBoxFuture<'a, ()>;
-// }
-
-// impl WorktreeModelHandle for Handle<Worktree> {
-// // When the worktree's FS event stream sometimes delivers "redundant" events for FS changes that
-// // occurred before the worktree was constructed. These events can cause the worktree to perform
-// // extra directory scans, and emit extra scan-state notifications.
-// //
-// // This function mutates the worktree's directory and waits for those mutations to be picked up,
-// // to ensure that all redundant FS events have already been processed.
-// #[cfg(any(test, feature = "test-support"))]
-// fn flush_fs_events<'a>(
-// &self,
-// cx: &'a gpui::TestAppContext,
-// ) -> futures::future::LocalBoxFuture<'a, ()> {
-// let filename = "fs-event-sentinel";
-// let tree = self.clone();
-// let (fs, root_path) = self.read_with(cx, |tree, _| {
-// let tree = tree.as_local().unwrap();
-// (tree.fs.clone(), tree.abs_path().clone())
-// });
-
-// async move {
-// fs.create_file(&root_path.join(filename), Default::default())
-// .await
-// .unwrap();
-// tree.condition(cx, |tree, _| tree.entry_for_path(filename).is_some())
-// .await;
-
-// fs.remove_file(&root_path.join(filename), Default::default())
-// .await
-// .unwrap();
-// tree.condition(cx, |tree, _| tree.entry_for_path(filename).is_none())
-// .await;
-
-// cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
-// .await;
-// }
-// .boxed_local()
-// }
-// }
+pub trait WorktreeModelHandle {
+ #[cfg(any(test, feature = "test-support"))]
+ fn flush_fs_events<'a>(
+ &self,
+ cx: &'a mut gpui2::TestAppContext,
+ ) -> futures::future::LocalBoxFuture<'a, ()>;
+}
+
+impl WorktreeModelHandle for Model<Worktree> {
+ // When the worktree's FS event stream sometimes delivers "redundant" events for FS changes that
+ // occurred before the worktree was constructed. These events can cause the worktree to perform
+ // extra directory scans, and emit extra scan-state notifications.
+ //
+ // This function mutates the worktree's directory and waits for those mutations to be picked up,
+ // to ensure that all redundant FS events have already been processed.
+ #[cfg(any(test, feature = "test-support"))]
+ fn flush_fs_events<'a>(
+ &self,
+ cx: &'a mut gpui2::TestAppContext,
+ ) -> futures::future::LocalBoxFuture<'a, ()> {
+ let filename = "fs-event-sentinel";
+ let tree = self.clone();
+ let (fs, root_path) = self.update(cx, |tree, _| {
+ let tree = tree.as_local().unwrap();
+ (tree.fs.clone(), tree.abs_path().clone())
+ });
+
+ async move {
+ fs.create_file(&root_path.join(filename), Default::default())
+ .await
+ .unwrap();
+ cx.executor().run_until_parked();
+ assert!(tree.update(cx, |tree, _| tree.entry_for_path(filename).is_some()));
+
+ fs.remove_file(&root_path.join(filename), Default::default())
+ .await
+ .unwrap();
+ cx.executor().run_until_parked();
+ assert!(tree.update(cx, |tree, _| tree.entry_for_path(filename).is_none()));
+
+ cx.update(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+ .await;
+ }
+ .boxed_local()
+ }
+}
#[derive(Clone, Debug)]
struct TraversalProgress<'a> {
@@ -81,7 +81,12 @@ fn main() {
}),
..Default::default()
},
- move |cx| cx.build_view(|cx| StoryWrapper::new(selector.story(cx))),
+ move |cx| {
+ let theme_settings = ThemeSettings::get_global(cx);
+ cx.set_rem_size(theme_settings.ui_font_size);
+
+ cx.build_view(|cx| StoryWrapper::new(selector.story(cx)))
+ },
);
cx.activate(true);
@@ -0,0 +1,37 @@
+[package]
+name = "text2"
+version = "0.1.0"
+edition = "2021"
+publish = false
+
+[lib]
+path = "src/text2.rs"
+doctest = false
+
+[features]
+test-support = ["rand"]
+
+[dependencies]
+clock = { path = "../clock" }
+collections = { path = "../collections" }
+rope = { path = "../rope" }
+sum_tree = { path = "../sum_tree" }
+util = { path = "../util" }
+
+anyhow.workspace = true
+digest = { version = "0.9", features = ["std"] }
+lazy_static.workspace = true
+log.workspace = true
+parking_lot.workspace = true
+postage.workspace = true
+rand = { workspace = true, optional = true }
+smallvec.workspace = true
+regex.workspace = true
+
+[dev-dependencies]
+collections = { path = "../collections", features = ["test-support"] }
+gpui2 = { path = "../gpui2", features = ["test-support"] }
+util = { path = "../util", features = ["test-support"] }
+ctor.workspace = true
+env_logger.workspace = true
+rand.workspace = true
@@ -0,0 +1,144 @@
+use crate::{
+ locator::Locator, BufferSnapshot, Point, PointUtf16, TextDimension, ToOffset, ToPoint,
+ ToPointUtf16,
+};
+use anyhow::Result;
+use std::{cmp::Ordering, fmt::Debug, ops::Range};
+use sum_tree::Bias;
+
+#[derive(Copy, Clone, Eq, PartialEq, Debug, Hash, Default)]
+pub struct Anchor {
+ pub timestamp: clock::Lamport,
+ pub offset: usize,
+ pub bias: Bias,
+ pub buffer_id: Option<u64>,
+}
+
+impl Anchor {
+ pub const MIN: Self = Self {
+ timestamp: clock::Lamport::MIN,
+ offset: usize::MIN,
+ bias: Bias::Left,
+ buffer_id: None,
+ };
+
+ pub const MAX: Self = Self {
+ timestamp: clock::Lamport::MAX,
+ offset: usize::MAX,
+ bias: Bias::Right,
+ buffer_id: None,
+ };
+
+ pub fn cmp(&self, other: &Anchor, buffer: &BufferSnapshot) -> Ordering {
+ let fragment_id_comparison = if self.timestamp == other.timestamp {
+ Ordering::Equal
+ } else {
+ buffer
+ .fragment_id_for_anchor(self)
+ .cmp(buffer.fragment_id_for_anchor(other))
+ };
+
+ fragment_id_comparison
+ .then_with(|| self.offset.cmp(&other.offset))
+ .then_with(|| self.bias.cmp(&other.bias))
+ }
+
+ pub fn min(&self, other: &Self, buffer: &BufferSnapshot) -> Self {
+ if self.cmp(other, buffer).is_le() {
+ *self
+ } else {
+ *other
+ }
+ }
+
+ pub fn max(&self, other: &Self, buffer: &BufferSnapshot) -> Self {
+ if self.cmp(other, buffer).is_ge() {
+ *self
+ } else {
+ *other
+ }
+ }
+
+ pub fn bias(&self, bias: Bias, buffer: &BufferSnapshot) -> Anchor {
+ if bias == Bias::Left {
+ self.bias_left(buffer)
+ } else {
+ self.bias_right(buffer)
+ }
+ }
+
+ pub fn bias_left(&self, buffer: &BufferSnapshot) -> Anchor {
+ if self.bias == Bias::Left {
+ *self
+ } else {
+ buffer.anchor_before(self)
+ }
+ }
+
+ pub fn bias_right(&self, buffer: &BufferSnapshot) -> Anchor {
+ if self.bias == Bias::Right {
+ *self
+ } else {
+ buffer.anchor_after(self)
+ }
+ }
+
+ pub fn summary<D>(&self, content: &BufferSnapshot) -> D
+ where
+ D: TextDimension,
+ {
+ content.summary_for_anchor(self)
+ }
+
+ /// Returns true when the [Anchor] is located inside a visible fragment.
+ pub fn is_valid(&self, buffer: &BufferSnapshot) -> bool {
+ if *self == Anchor::MIN || *self == Anchor::MAX {
+ true
+ } else {
+ let fragment_id = buffer.fragment_id_for_anchor(self);
+ let mut fragment_cursor = buffer.fragments.cursor::<(Option<&Locator>, usize)>();
+ fragment_cursor.seek(&Some(fragment_id), Bias::Left, &None);
+ fragment_cursor
+ .item()
+ .map_or(false, |fragment| fragment.visible)
+ }
+ }
+}
+
+pub trait OffsetRangeExt {
+ fn to_offset(&self, snapshot: &BufferSnapshot) -> Range<usize>;
+ fn to_point(&self, snapshot: &BufferSnapshot) -> Range<Point>;
+ fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> Range<PointUtf16>;
+}
+
+impl<T> OffsetRangeExt for Range<T>
+where
+ T: ToOffset,
+{
+ fn to_offset(&self, snapshot: &BufferSnapshot) -> Range<usize> {
+ self.start.to_offset(snapshot)..self.end.to_offset(snapshot)
+ }
+
+ fn to_point(&self, snapshot: &BufferSnapshot) -> Range<Point> {
+ self.start.to_offset(snapshot).to_point(snapshot)
+ ..self.end.to_offset(snapshot).to_point(snapshot)
+ }
+
+ fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> Range<PointUtf16> {
+ self.start.to_offset(snapshot).to_point_utf16(snapshot)
+ ..self.end.to_offset(snapshot).to_point_utf16(snapshot)
+ }
+}
+
+pub trait AnchorRangeExt {
+ fn cmp(&self, b: &Range<Anchor>, buffer: &BufferSnapshot) -> Result<Ordering>;
+}
+
+impl AnchorRangeExt for Range<Anchor> {
+ fn cmp(&self, other: &Range<Anchor>, buffer: &BufferSnapshot) -> Result<Ordering> {
+ Ok(match self.start.cmp(&other.start, buffer) {
+ Ordering::Equal => other.end.cmp(&self.end, buffer),
+ ord => ord,
+ })
+ }
+}
@@ -0,0 +1,125 @@
+use lazy_static::lazy_static;
+use smallvec::{smallvec, SmallVec};
+use std::iter;
+
+lazy_static! {
+ static ref MIN: Locator = Locator::min();
+ static ref MAX: Locator = Locator::max();
+}
+
+#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub struct Locator(SmallVec<[u64; 4]>);
+
+impl Locator {
+ pub fn min() -> Self {
+ Self(smallvec![u64::MIN])
+ }
+
+ pub fn max() -> Self {
+ Self(smallvec![u64::MAX])
+ }
+
+ pub fn min_ref() -> &'static Self {
+ &*MIN
+ }
+
+ pub fn max_ref() -> &'static Self {
+ &*MAX
+ }
+
+ pub fn assign(&mut self, other: &Self) {
+ self.0.resize(other.0.len(), 0);
+ self.0.copy_from_slice(&other.0);
+ }
+
+ pub fn between(lhs: &Self, rhs: &Self) -> Self {
+ let lhs = lhs.0.iter().copied().chain(iter::repeat(u64::MIN));
+ let rhs = rhs.0.iter().copied().chain(iter::repeat(u64::MAX));
+ let mut location = SmallVec::new();
+ for (lhs, rhs) in lhs.zip(rhs) {
+ let mid = lhs + ((rhs.saturating_sub(lhs)) >> 48);
+ location.push(mid);
+ if mid > lhs {
+ break;
+ }
+ }
+ Self(location)
+ }
+
+ pub fn len(&self) -> usize {
+ self.0.len()
+ }
+
+ pub fn is_empty(&self) -> bool {
+ self.len() == 0
+ }
+}
+
+impl Default for Locator {
+ fn default() -> Self {
+ Self::min()
+ }
+}
+
+impl sum_tree::Item for Locator {
+ type Summary = Locator;
+
+ fn summary(&self) -> Self::Summary {
+ self.clone()
+ }
+}
+
+impl sum_tree::KeyedItem for Locator {
+ type Key = Locator;
+
+ fn key(&self) -> Self::Key {
+ self.clone()
+ }
+}
+
+impl sum_tree::Summary for Locator {
+ type Context = ();
+
+ fn add_summary(&mut self, summary: &Self, _: &()) {
+ self.assign(summary);
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use rand::prelude::*;
+ use std::mem;
+
+ #[gpui2::test(iterations = 100)]
+ fn test_locators(mut rng: StdRng) {
+ let mut lhs = Default::default();
+ let mut rhs = Default::default();
+ while lhs == rhs {
+ lhs = Locator(
+ (0..rng.gen_range(1..=5))
+ .map(|_| rng.gen_range(0..=100))
+ .collect(),
+ );
+ rhs = Locator(
+ (0..rng.gen_range(1..=5))
+ .map(|_| rng.gen_range(0..=100))
+ .collect(),
+ );
+ }
+
+ if lhs > rhs {
+ mem::swap(&mut lhs, &mut rhs);
+ }
+
+ let middle = Locator::between(&lhs, &rhs);
+ assert!(middle > lhs);
+ assert!(middle < rhs);
+ for ix in 0..middle.0.len() - 1 {
+ assert!(
+ middle.0[ix] == *lhs.0.get(ix).unwrap_or(&0)
+ || middle.0[ix] == *rhs.0.get(ix).unwrap_or(&0)
+ );
+ }
+ }
+}
@@ -0,0 +1,69 @@
+use clock::ReplicaId;
+
+pub struct Network<T: Clone, R: rand::Rng> {
+ inboxes: std::collections::BTreeMap<ReplicaId, Vec<Envelope<T>>>,
+ all_messages: Vec<T>,
+ rng: R,
+}
+
+#[derive(Clone)]
+struct Envelope<T: Clone> {
+ message: T,
+}
+
+impl<T: Clone, R: rand::Rng> Network<T, R> {
+ pub fn new(rng: R) -> Self {
+ Network {
+ inboxes: Default::default(),
+ all_messages: Vec::new(),
+ rng,
+ }
+ }
+
+ pub fn add_peer(&mut self, id: ReplicaId) {
+ self.inboxes.insert(id, Vec::new());
+ }
+
+ pub fn replicate(&mut self, old_replica_id: ReplicaId, new_replica_id: ReplicaId) {
+ self.inboxes
+ .insert(new_replica_id, self.inboxes[&old_replica_id].clone());
+ }
+
+ pub fn is_idle(&self) -> bool {
+ self.inboxes.values().all(|i| i.is_empty())
+ }
+
+ pub fn broadcast(&mut self, sender: ReplicaId, messages: Vec<T>) {
+ for (replica, inbox) in self.inboxes.iter_mut() {
+ if *replica != sender {
+ for message in &messages {
+ // Insert one or more duplicates of this message, potentially *before* the previous
+ // message sent by this peer to simulate out-of-order delivery.
+ for _ in 0..self.rng.gen_range(1..4) {
+ let insertion_index = self.rng.gen_range(0..inbox.len() + 1);
+ inbox.insert(
+ insertion_index,
+ Envelope {
+ message: message.clone(),
+ },
+ );
+ }
+ }
+ }
+ }
+ self.all_messages.extend(messages);
+ }
+
+ pub fn has_unreceived(&self, receiver: ReplicaId) -> bool {
+ !self.inboxes[&receiver].is_empty()
+ }
+
+ pub fn receive(&mut self, receiver: ReplicaId) -> Vec<T> {
+ let inbox = self.inboxes.get_mut(&receiver).unwrap();
+ let count = self.rng.gen_range(0..inbox.len() + 1);
+ inbox
+ .drain(0..count)
+ .map(|envelope| envelope.message)
+ .collect()
+ }
+}
@@ -0,0 +1,153 @@
+use std::{fmt::Debug, ops::Add};
+use sum_tree::{Dimension, Edit, Item, KeyedItem, SumTree, Summary};
+
+pub trait Operation: Clone + Debug {
+ fn lamport_timestamp(&self) -> clock::Lamport;
+}
+
+#[derive(Clone, Debug)]
+struct OperationItem<T>(T);
+
+#[derive(Clone, Debug)]
+pub struct OperationQueue<T: Operation>(SumTree<OperationItem<T>>);
+
+#[derive(Clone, Copy, Debug, Default, Eq, Ord, PartialEq, PartialOrd)]
+pub struct OperationKey(clock::Lamport);
+
+#[derive(Clone, Copy, Debug, Default, Eq, PartialEq)]
+pub struct OperationSummary {
+ pub key: OperationKey,
+ pub len: usize,
+}
+
+impl OperationKey {
+ pub fn new(timestamp: clock::Lamport) -> Self {
+ Self(timestamp)
+ }
+}
+
+impl<T: Operation> Default for OperationQueue<T> {
+ fn default() -> Self {
+ OperationQueue::new()
+ }
+}
+
+impl<T: Operation> OperationQueue<T> {
+ pub fn new() -> Self {
+ OperationQueue(SumTree::new())
+ }
+
+ pub fn len(&self) -> usize {
+ self.0.summary().len
+ }
+
+ pub fn is_empty(&self) -> bool {
+ self.len() == 0
+ }
+
+ pub fn insert(&mut self, mut ops: Vec<T>) {
+ ops.sort_by_key(|op| op.lamport_timestamp());
+ ops.dedup_by_key(|op| op.lamport_timestamp());
+ self.0.edit(
+ ops.into_iter()
+ .map(|op| Edit::Insert(OperationItem(op)))
+ .collect(),
+ &(),
+ );
+ }
+
+ pub fn drain(&mut self) -> Self {
+ let clone = self.clone();
+ self.0 = SumTree::new();
+ clone
+ }
+
+ pub fn iter(&self) -> impl Iterator<Item = &T> {
+ self.0.iter().map(|i| &i.0)
+ }
+}
+
+impl Summary for OperationSummary {
+ type Context = ();
+
+ fn add_summary(&mut self, other: &Self, _: &()) {
+ assert!(self.key < other.key);
+ self.key = other.key;
+ self.len += other.len;
+ }
+}
+
+impl<'a> Add<&'a Self> for OperationSummary {
+ type Output = Self;
+
+ fn add(self, other: &Self) -> Self {
+ assert!(self.key < other.key);
+ OperationSummary {
+ key: other.key,
+ len: self.len + other.len,
+ }
+ }
+}
+
+impl<'a> Dimension<'a, OperationSummary> for OperationKey {
+ fn add_summary(&mut self, summary: &OperationSummary, _: &()) {
+ assert!(*self <= summary.key);
+ *self = summary.key;
+ }
+}
+
+impl<T: Operation> Item for OperationItem<T> {
+ type Summary = OperationSummary;
+
+ fn summary(&self) -> Self::Summary {
+ OperationSummary {
+ key: OperationKey::new(self.0.lamport_timestamp()),
+ len: 1,
+ }
+ }
+}
+
+impl<T: Operation> KeyedItem for OperationItem<T> {
+ type Key = OperationKey;
+
+ fn key(&self) -> Self::Key {
+ OperationKey::new(self.0.lamport_timestamp())
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn test_len() {
+ let mut clock = clock::Lamport::new(0);
+
+ let mut queue = OperationQueue::new();
+ assert_eq!(queue.len(), 0);
+
+ queue.insert(vec![
+ TestOperation(clock.tick()),
+ TestOperation(clock.tick()),
+ ]);
+ assert_eq!(queue.len(), 2);
+
+ queue.insert(vec![TestOperation(clock.tick())]);
+ assert_eq!(queue.len(), 3);
+
+ drop(queue.drain());
+ assert_eq!(queue.len(), 0);
+
+ queue.insert(vec![TestOperation(clock.tick())]);
+ assert_eq!(queue.len(), 1);
+ }
+
+ #[derive(Clone, Debug, Eq, PartialEq)]
+ struct TestOperation(clock::Lamport);
+
+ impl Operation for TestOperation {
+ fn lamport_timestamp(&self) -> clock::Lamport {
+ self.0
+ }
+ }
+}
@@ -0,0 +1,594 @@
+use crate::Edit;
+use std::{
+ cmp, mem,
+ ops::{Add, AddAssign, Sub},
+};
+
+#[derive(Clone, Default, Debug, PartialEq, Eq)]
+pub struct Patch<T>(Vec<Edit<T>>);
+
+impl<T> Patch<T>
+where
+ T: 'static
+ + Clone
+ + Copy
+ + Ord
+ + Sub<T, Output = T>
+ + Add<T, Output = T>
+ + AddAssign
+ + Default
+ + PartialEq,
+{
+ pub fn new(edits: Vec<Edit<T>>) -> Self {
+ #[cfg(debug_assertions)]
+ {
+ let mut last_edit: Option<&Edit<T>> = None;
+ for edit in &edits {
+ if let Some(last_edit) = last_edit {
+ assert!(edit.old.start > last_edit.old.end);
+ assert!(edit.new.start > last_edit.new.end);
+ }
+ last_edit = Some(edit);
+ }
+ }
+ Self(edits)
+ }
+
+ pub fn edits(&self) -> &[Edit<T>] {
+ &self.0
+ }
+
+ pub fn into_inner(self) -> Vec<Edit<T>> {
+ self.0
+ }
+
+ pub fn compose(&self, new_edits_iter: impl IntoIterator<Item = Edit<T>>) -> Self {
+ let mut old_edits_iter = self.0.iter().cloned().peekable();
+ let mut new_edits_iter = new_edits_iter.into_iter().peekable();
+ let mut composed = Patch(Vec::new());
+
+ let mut old_start = T::default();
+ let mut new_start = T::default();
+ loop {
+ let old_edit = old_edits_iter.peek_mut();
+ let new_edit = new_edits_iter.peek_mut();
+
+ // Push the old edit if its new end is before the new edit's old start.
+ if let Some(old_edit) = old_edit.as_ref() {
+ let new_edit = new_edit.as_ref();
+ if new_edit.map_or(true, |new_edit| old_edit.new.end < new_edit.old.start) {
+ let catchup = old_edit.old.start - old_start;
+ old_start += catchup;
+ new_start += catchup;
+
+ let old_end = old_start + old_edit.old_len();
+ let new_end = new_start + old_edit.new_len();
+ composed.push(Edit {
+ old: old_start..old_end,
+ new: new_start..new_end,
+ });
+ old_start = old_end;
+ new_start = new_end;
+ old_edits_iter.next();
+ continue;
+ }
+ }
+
+ // Push the new edit if its old end is before the old edit's new start.
+ if let Some(new_edit) = new_edit.as_ref() {
+ let old_edit = old_edit.as_ref();
+ if old_edit.map_or(true, |old_edit| new_edit.old.end < old_edit.new.start) {
+ let catchup = new_edit.new.start - new_start;
+ old_start += catchup;
+ new_start += catchup;
+
+ let old_end = old_start + new_edit.old_len();
+ let new_end = new_start + new_edit.new_len();
+ composed.push(Edit {
+ old: old_start..old_end,
+ new: new_start..new_end,
+ });
+ old_start = old_end;
+ new_start = new_end;
+ new_edits_iter.next();
+ continue;
+ }
+ }
+
+ // If we still have edits by this point then they must intersect, so we compose them.
+ if let Some((old_edit, new_edit)) = old_edit.zip(new_edit) {
+ if old_edit.new.start < new_edit.old.start {
+ let catchup = old_edit.old.start - old_start;
+ old_start += catchup;
+ new_start += catchup;
+
+ let overshoot = new_edit.old.start - old_edit.new.start;
+ let old_end = cmp::min(old_start + overshoot, old_edit.old.end);
+ let new_end = new_start + overshoot;
+ composed.push(Edit {
+ old: old_start..old_end,
+ new: new_start..new_end,
+ });
+
+ old_edit.old.start = old_end;
+ old_edit.new.start += overshoot;
+ old_start = old_end;
+ new_start = new_end;
+ } else {
+ let catchup = new_edit.new.start - new_start;
+ old_start += catchup;
+ new_start += catchup;
+
+ let overshoot = old_edit.new.start - new_edit.old.start;
+ let old_end = old_start + overshoot;
+ let new_end = cmp::min(new_start + overshoot, new_edit.new.end);
+ composed.push(Edit {
+ old: old_start..old_end,
+ new: new_start..new_end,
+ });
+
+ new_edit.old.start += overshoot;
+ new_edit.new.start = new_end;
+ old_start = old_end;
+ new_start = new_end;
+ }
+
+ if old_edit.new.end > new_edit.old.end {
+ let old_end = old_start + cmp::min(old_edit.old_len(), new_edit.old_len());
+ let new_end = new_start + new_edit.new_len();
+ composed.push(Edit {
+ old: old_start..old_end,
+ new: new_start..new_end,
+ });
+
+ old_edit.old.start = old_end;
+ old_edit.new.start = new_edit.old.end;
+ old_start = old_end;
+ new_start = new_end;
+ new_edits_iter.next();
+ } else {
+ let old_end = old_start + old_edit.old_len();
+ let new_end = new_start + cmp::min(old_edit.new_len(), new_edit.new_len());
+ composed.push(Edit {
+ old: old_start..old_end,
+ new: new_start..new_end,
+ });
+
+ new_edit.old.start = old_edit.new.end;
+ new_edit.new.start = new_end;
+ old_start = old_end;
+ new_start = new_end;
+ old_edits_iter.next();
+ }
+ } else {
+ break;
+ }
+ }
+
+ composed
+ }
+
+ pub fn invert(&mut self) -> &mut Self {
+ for edit in &mut self.0 {
+ mem::swap(&mut edit.old, &mut edit.new);
+ }
+ self
+ }
+
+ pub fn clear(&mut self) {
+ self.0.clear();
+ }
+
+ pub fn is_empty(&self) -> bool {
+ self.0.is_empty()
+ }
+
+ pub fn push(&mut self, edit: Edit<T>) {
+ if edit.is_empty() {
+ return;
+ }
+
+ if let Some(last) = self.0.last_mut() {
+ if last.old.end >= edit.old.start {
+ last.old.end = edit.old.end;
+ last.new.end = edit.new.end;
+ } else {
+ self.0.push(edit);
+ }
+ } else {
+ self.0.push(edit);
+ }
+ }
+
+ pub fn old_to_new(&self, old: T) -> T {
+ let ix = match self.0.binary_search_by(|probe| probe.old.start.cmp(&old)) {
+ Ok(ix) => ix,
+ Err(ix) => {
+ if ix == 0 {
+ return old;
+ } else {
+ ix - 1
+ }
+ }
+ };
+ if let Some(edit) = self.0.get(ix) {
+ if old >= edit.old.end {
+ edit.new.end + (old - edit.old.end)
+ } else {
+ edit.new.start
+ }
+ } else {
+ old
+ }
+ }
+}
+
+impl<T: Clone> IntoIterator for Patch<T> {
+ type Item = Edit<T>;
+ type IntoIter = std::vec::IntoIter<Edit<T>>;
+
+ fn into_iter(self) -> Self::IntoIter {
+ self.0.into_iter()
+ }
+}
+
+impl<'a, T: Clone> IntoIterator for &'a Patch<T> {
+ type Item = Edit<T>;
+ type IntoIter = std::iter::Cloned<std::slice::Iter<'a, Edit<T>>>;
+
+ fn into_iter(self) -> Self::IntoIter {
+ self.0.iter().cloned()
+ }
+}
+
+impl<'a, T: Clone> IntoIterator for &'a mut Patch<T> {
+ type Item = Edit<T>;
+ type IntoIter = std::iter::Cloned<std::slice::Iter<'a, Edit<T>>>;
+
+ fn into_iter(self) -> Self::IntoIter {
+ self.0.iter().cloned()
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use rand::prelude::*;
+ use std::env;
+
+ #[gpui2::test]
+ fn test_one_disjoint_edit() {
+ assert_patch_composition(
+ Patch(vec![Edit {
+ old: 1..3,
+ new: 1..4,
+ }]),
+ Patch(vec![Edit {
+ old: 0..0,
+ new: 0..4,
+ }]),
+ Patch(vec![
+ Edit {
+ old: 0..0,
+ new: 0..4,
+ },
+ Edit {
+ old: 1..3,
+ new: 5..8,
+ },
+ ]),
+ );
+
+ assert_patch_composition(
+ Patch(vec![Edit {
+ old: 1..3,
+ new: 1..4,
+ }]),
+ Patch(vec![Edit {
+ old: 5..9,
+ new: 5..7,
+ }]),
+ Patch(vec![
+ Edit {
+ old: 1..3,
+ new: 1..4,
+ },
+ Edit {
+ old: 4..8,
+ new: 5..7,
+ },
+ ]),
+ );
+ }
+
+ #[gpui2::test]
+ fn test_one_overlapping_edit() {
+ assert_patch_composition(
+ Patch(vec![Edit {
+ old: 1..3,
+ new: 1..4,
+ }]),
+ Patch(vec![Edit {
+ old: 3..5,
+ new: 3..6,
+ }]),
+ Patch(vec![Edit {
+ old: 1..4,
+ new: 1..6,
+ }]),
+ );
+ }
+
+ #[gpui2::test]
+ fn test_two_disjoint_and_overlapping() {
+ assert_patch_composition(
+ Patch(vec![
+ Edit {
+ old: 1..3,
+ new: 1..4,
+ },
+ Edit {
+ old: 8..12,
+ new: 9..11,
+ },
+ ]),
+ Patch(vec![
+ Edit {
+ old: 0..0,
+ new: 0..4,
+ },
+ Edit {
+ old: 3..10,
+ new: 7..9,
+ },
+ ]),
+ Patch(vec![
+ Edit {
+ old: 0..0,
+ new: 0..4,
+ },
+ Edit {
+ old: 1..12,
+ new: 5..10,
+ },
+ ]),
+ );
+ }
+
+ #[gpui2::test]
+ fn test_two_new_edits_overlapping_one_old_edit() {
+ assert_patch_composition(
+ Patch(vec![Edit {
+ old: 0..0,
+ new: 0..3,
+ }]),
+ Patch(vec![
+ Edit {
+ old: 0..0,
+ new: 0..1,
+ },
+ Edit {
+ old: 1..2,
+ new: 2..2,
+ },
+ ]),
+ Patch(vec![Edit {
+ old: 0..0,
+ new: 0..3,
+ }]),
+ );
+
+ assert_patch_composition(
+ Patch(vec![Edit {
+ old: 2..3,
+ new: 2..4,
+ }]),
+ Patch(vec![
+ Edit {
+ old: 0..2,
+ new: 0..1,
+ },
+ Edit {
+ old: 3..3,
+ new: 2..5,
+ },
+ ]),
+ Patch(vec![Edit {
+ old: 0..3,
+ new: 0..6,
+ }]),
+ );
+
+ assert_patch_composition(
+ Patch(vec![Edit {
+ old: 0..0,
+ new: 0..2,
+ }]),
+ Patch(vec![
+ Edit {
+ old: 0..0,
+ new: 0..2,
+ },
+ Edit {
+ old: 2..5,
+ new: 4..4,
+ },
+ ]),
+ Patch(vec![Edit {
+ old: 0..3,
+ new: 0..4,
+ }]),
+ );
+ }
+
+ #[gpui2::test]
+ fn test_two_new_edits_touching_one_old_edit() {
+ assert_patch_composition(
+ Patch(vec![
+ Edit {
+ old: 2..3,
+ new: 2..4,
+ },
+ Edit {
+ old: 7..7,
+ new: 8..11,
+ },
+ ]),
+ Patch(vec![
+ Edit {
+ old: 2..3,
+ new: 2..2,
+ },
+ Edit {
+ old: 4..4,
+ new: 3..4,
+ },
+ ]),
+ Patch(vec![
+ Edit {
+ old: 2..3,
+ new: 2..4,
+ },
+ Edit {
+ old: 7..7,
+ new: 8..11,
+ },
+ ]),
+ );
+ }
+
+ #[gpui2::test]
+ fn test_old_to_new() {
+ let patch = Patch(vec![
+ Edit {
+ old: 2..4,
+ new: 2..4,
+ },
+ Edit {
+ old: 7..8,
+ new: 7..11,
+ },
+ ]);
+ assert_eq!(patch.old_to_new(0), 0);
+ assert_eq!(patch.old_to_new(1), 1);
+ assert_eq!(patch.old_to_new(2), 2);
+ assert_eq!(patch.old_to_new(3), 2);
+ assert_eq!(patch.old_to_new(4), 4);
+ assert_eq!(patch.old_to_new(5), 5);
+ assert_eq!(patch.old_to_new(6), 6);
+ assert_eq!(patch.old_to_new(7), 7);
+ assert_eq!(patch.old_to_new(8), 11);
+ assert_eq!(patch.old_to_new(9), 12);
+ }
+
+ #[gpui2::test(iterations = 100)]
+ fn test_random_patch_compositions(mut rng: StdRng) {
+ let operations = env::var("OPERATIONS")
+ .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
+ .unwrap_or(20);
+
+ let initial_chars = (0..rng.gen_range(0..=100))
+ .map(|_| rng.gen_range(b'a'..=b'z') as char)
+ .collect::<Vec<_>>();
+ log::info!("initial chars: {:?}", initial_chars);
+
+ // Generate two sequential patches
+ let mut patches = Vec::new();
+ let mut expected_chars = initial_chars.clone();
+ for i in 0..2 {
+ log::info!("patch {}:", i);
+
+ let mut delta = 0i32;
+ let mut last_edit_end = 0;
+ let mut edits = Vec::new();
+
+ for _ in 0..operations {
+ if last_edit_end >= expected_chars.len() {
+ break;
+ }
+
+ let end = rng.gen_range(last_edit_end..=expected_chars.len());
+ let start = rng.gen_range(last_edit_end..=end);
+ let old_len = end - start;
+
+ let mut new_len = rng.gen_range(0..=3);
+ if start == end && new_len == 0 {
+ new_len += 1;
+ }
+
+ last_edit_end = start + new_len + 1;
+
+ let new_chars = (0..new_len)
+ .map(|_| rng.gen_range(b'A'..=b'Z') as char)
+ .collect::<Vec<_>>();
+ log::info!(
+ " editing {:?}: {:?}",
+ start..end,
+ new_chars.iter().collect::<String>()
+ );
+ edits.push(Edit {
+ old: (start as i32 - delta) as u32..(end as i32 - delta) as u32,
+ new: start as u32..(start + new_len) as u32,
+ });
+ expected_chars.splice(start..end, new_chars);
+
+ delta += new_len as i32 - old_len as i32;
+ }
+
+ patches.push(Patch(edits));
+ }
+
+ log::info!("old patch: {:?}", &patches[0]);
+ log::info!("new patch: {:?}", &patches[1]);
+ log::info!("initial chars: {:?}", initial_chars);
+ log::info!("final chars: {:?}", expected_chars);
+
+ // Compose the patches, and verify that it has the same effect as applying the
+ // two patches separately.
+ let composed = patches[0].compose(&patches[1]);
+ log::info!("composed patch: {:?}", &composed);
+
+ let mut actual_chars = initial_chars;
+ for edit in composed.0 {
+ actual_chars.splice(
+ edit.new.start as usize..edit.new.start as usize + edit.old.len(),
+ expected_chars[edit.new.start as usize..edit.new.end as usize]
+ .iter()
+ .copied(),
+ );
+ }
+
+ assert_eq!(actual_chars, expected_chars);
+ }
+
+ #[track_caller]
+ fn assert_patch_composition(old: Patch<u32>, new: Patch<u32>, composed: Patch<u32>) {
+ let original = ('a'..'z').collect::<Vec<_>>();
+ let inserted = ('A'..'Z').collect::<Vec<_>>();
+
+ let mut expected = original.clone();
+ apply_patch(&mut expected, &old, &inserted);
+ apply_patch(&mut expected, &new, &inserted);
+
+ let mut actual = original;
+ apply_patch(&mut actual, &composed, &expected);
+ assert_eq!(
+ actual.into_iter().collect::<String>(),
+ expected.into_iter().collect::<String>(),
+ "expected patch is incorrect"
+ );
+
+ assert_eq!(old.compose(&new), composed);
+ }
+
+ fn apply_patch(text: &mut Vec<char>, patch: &Patch<u32>, new_text: &[char]) {
+ for edit in patch.0.iter().rev() {
+ text.splice(
+ edit.old.start as usize..edit.old.end as usize,
+ new_text[edit.new.start as usize..edit.new.end as usize]
+ .iter()
+ .copied(),
+ );
+ }
+ }
+}
@@ -0,0 +1,123 @@
+use crate::{Anchor, BufferSnapshot, TextDimension};
+use std::cmp::Ordering;
+use std::ops::Range;
+
+#[derive(Copy, Clone, Debug, PartialEq)]
+pub enum SelectionGoal {
+ None,
+ HorizontalPosition(f32),
+ HorizontalRange { start: f32, end: f32 },
+ WrappedHorizontalPosition((u32, f32)),
+}
+
+#[derive(Clone, Debug, PartialEq)]
+pub struct Selection<T> {
+ pub id: usize,
+ pub start: T,
+ pub end: T,
+ pub reversed: bool,
+ pub goal: SelectionGoal,
+}
+
+impl Default for SelectionGoal {
+ fn default() -> Self {
+ Self::None
+ }
+}
+
+impl<T: Clone> Selection<T> {
+ pub fn head(&self) -> T {
+ if self.reversed {
+ self.start.clone()
+ } else {
+ self.end.clone()
+ }
+ }
+
+ pub fn tail(&self) -> T {
+ if self.reversed {
+ self.end.clone()
+ } else {
+ self.start.clone()
+ }
+ }
+
+ pub fn map<F, S>(&self, f: F) -> Selection<S>
+ where
+ F: Fn(T) -> S,
+ {
+ Selection::<S> {
+ id: self.id,
+ start: f(self.start.clone()),
+ end: f(self.end.clone()),
+ reversed: self.reversed,
+ goal: self.goal,
+ }
+ }
+
+ pub fn collapse_to(&mut self, point: T, new_goal: SelectionGoal) {
+ self.start = point.clone();
+ self.end = point;
+ self.goal = new_goal;
+ self.reversed = false;
+ }
+}
+
+impl<T: Copy + Ord> Selection<T> {
+ pub fn is_empty(&self) -> bool {
+ self.start == self.end
+ }
+
+ pub fn set_head(&mut self, head: T, new_goal: SelectionGoal) {
+ if head.cmp(&self.tail()) < Ordering::Equal {
+ if !self.reversed {
+ self.end = self.start;
+ self.reversed = true;
+ }
+ self.start = head;
+ } else {
+ if self.reversed {
+ self.start = self.end;
+ self.reversed = false;
+ }
+ self.end = head;
+ }
+ self.goal = new_goal;
+ }
+
+ pub fn range(&self) -> Range<T> {
+ self.start..self.end
+ }
+}
+
+impl Selection<usize> {
+ #[cfg(feature = "test-support")]
+ pub fn from_offset(offset: usize) -> Self {
+ Selection {
+ id: 0,
+ start: offset,
+ end: offset,
+ goal: SelectionGoal::None,
+ reversed: false,
+ }
+ }
+
+ pub fn equals(&self, offset_range: &Range<usize>) -> bool {
+ self.start == offset_range.start && self.end == offset_range.end
+ }
+}
+
+impl Selection<Anchor> {
+ pub fn resolve<'a, D: 'a + TextDimension>(
+ &'a self,
+ snapshot: &'a BufferSnapshot,
+ ) -> Selection<D> {
+ Selection {
+ id: self.id,
+ start: snapshot.summary_for_anchor(&self.start),
+ end: snapshot.summary_for_anchor(&self.end),
+ reversed: self.reversed,
+ goal: self.goal,
+ }
+ }
+}
@@ -0,0 +1,48 @@
+use crate::{Edit, Patch};
+use parking_lot::Mutex;
+use std::{
+ mem,
+ sync::{Arc, Weak},
+};
+
+#[derive(Default)]
+pub struct Topic(Mutex<Vec<Weak<Mutex<Patch<usize>>>>>);
+
+pub struct Subscription(Arc<Mutex<Patch<usize>>>);
+
+impl Topic {
+ pub fn subscribe(&mut self) -> Subscription {
+ let subscription = Subscription(Default::default());
+ self.0.get_mut().push(Arc::downgrade(&subscription.0));
+ subscription
+ }
+
+ pub fn publish(&self, edits: impl Clone + IntoIterator<Item = Edit<usize>>) {
+ publish(&mut *self.0.lock(), edits);
+ }
+
+ pub fn publish_mut(&mut self, edits: impl Clone + IntoIterator<Item = Edit<usize>>) {
+ publish(self.0.get_mut(), edits);
+ }
+}
+
+impl Subscription {
+ pub fn consume(&self) -> Patch<usize> {
+ mem::take(&mut *self.0.lock())
+ }
+}
+
+fn publish(
+ subscriptions: &mut Vec<Weak<Mutex<Patch<usize>>>>,
+ edits: impl Clone + IntoIterator<Item = Edit<usize>>,
+) {
+ subscriptions.retain(|subscription| {
+ if let Some(subscription) = subscription.upgrade() {
+ let mut patch = subscription.lock();
+ *patch = patch.compose(edits.clone());
+ true
+ } else {
+ false
+ }
+ });
+}
@@ -0,0 +1,764 @@
+use super::{network::Network, *};
+use clock::ReplicaId;
+use rand::prelude::*;
+use std::{
+ cmp::Ordering,
+ env,
+ iter::Iterator,
+ time::{Duration, Instant},
+};
+
+#[cfg(test)]
+#[ctor::ctor]
+fn init_logger() {
+ if std::env::var("RUST_LOG").is_ok() {
+ env_logger::init();
+ }
+}
+
+#[test]
+fn test_edit() {
+ let mut buffer = Buffer::new(0, 0, "abc".into());
+ assert_eq!(buffer.text(), "abc");
+ buffer.edit([(3..3, "def")]);
+ assert_eq!(buffer.text(), "abcdef");
+ buffer.edit([(0..0, "ghi")]);
+ assert_eq!(buffer.text(), "ghiabcdef");
+ buffer.edit([(5..5, "jkl")]);
+ assert_eq!(buffer.text(), "ghiabjklcdef");
+ buffer.edit([(6..7, "")]);
+ assert_eq!(buffer.text(), "ghiabjlcdef");
+ buffer.edit([(4..9, "mno")]);
+ assert_eq!(buffer.text(), "ghiamnoef");
+}
+
+#[gpui2::test(iterations = 100)]
+fn test_random_edits(mut rng: StdRng) {
+ let operations = env::var("OPERATIONS")
+ .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
+ .unwrap_or(10);
+
+ let reference_string_len = rng.gen_range(0..3);
+ let mut reference_string = RandomCharIter::new(&mut rng)
+ .take(reference_string_len)
+ .collect::<String>();
+ let mut buffer = Buffer::new(0, 0, reference_string.clone());
+ LineEnding::normalize(&mut reference_string);
+
+ buffer.set_group_interval(Duration::from_millis(rng.gen_range(0..=200)));
+ let mut buffer_versions = Vec::new();
+ log::info!(
+ "buffer text {:?}, version: {:?}",
+ buffer.text(),
+ buffer.version()
+ );
+
+ for _i in 0..operations {
+ let (edits, _) = buffer.randomly_edit(&mut rng, 5);
+ for (old_range, new_text) in edits.iter().rev() {
+ reference_string.replace_range(old_range.clone(), new_text);
+ }
+
+ assert_eq!(buffer.text(), reference_string);
+ log::info!(
+ "buffer text {:?}, version: {:?}",
+ buffer.text(),
+ buffer.version()
+ );
+
+ if rng.gen_bool(0.25) {
+ buffer.randomly_undo_redo(&mut rng);
+ reference_string = buffer.text();
+ log::info!(
+ "buffer text {:?}, version: {:?}",
+ buffer.text(),
+ buffer.version()
+ );
+ }
+
+ let range = buffer.random_byte_range(0, &mut rng);
+ assert_eq!(
+ buffer.text_summary_for_range::<TextSummary, _>(range.clone()),
+ TextSummary::from(&reference_string[range])
+ );
+
+ buffer.check_invariants();
+
+ if rng.gen_bool(0.3) {
+ buffer_versions.push((buffer.clone(), buffer.subscribe()));
+ }
+ }
+
+ for (old_buffer, subscription) in buffer_versions {
+ let edits = buffer
+ .edits_since::<usize>(&old_buffer.version)
+ .collect::<Vec<_>>();
+
+ log::info!(
+ "applying edits since version {:?} to old text: {:?}: {:?}",
+ old_buffer.version(),
+ old_buffer.text(),
+ edits,
+ );
+
+ let mut text = old_buffer.visible_text.clone();
+ for edit in edits {
+ let new_text: String = buffer.text_for_range(edit.new.clone()).collect();
+ text.replace(edit.new.start..edit.new.start + edit.old.len(), &new_text);
+ }
+ assert_eq!(text.to_string(), buffer.text());
+
+ for _ in 0..5 {
+ let end_ix = old_buffer.clip_offset(rng.gen_range(0..=old_buffer.len()), Bias::Right);
+ let start_ix = old_buffer.clip_offset(rng.gen_range(0..=end_ix), Bias::Left);
+ let range = old_buffer.anchor_before(start_ix)..old_buffer.anchor_after(end_ix);
+ let mut old_text = old_buffer.text_for_range(range.clone()).collect::<String>();
+ let edits = buffer
+ .edits_since_in_range::<usize>(&old_buffer.version, range.clone())
+ .collect::<Vec<_>>();
+ log::info!(
+ "applying edits since version {:?} to old text in range {:?}: {:?}: {:?}",
+ old_buffer.version(),
+ start_ix..end_ix,
+ old_text,
+ edits,
+ );
+
+ let new_text = buffer.text_for_range(range).collect::<String>();
+ for edit in edits {
+ old_text.replace_range(
+ edit.new.start..edit.new.start + edit.old_len(),
+ &new_text[edit.new],
+ );
+ }
+ assert_eq!(old_text, new_text);
+ }
+
+ let subscription_edits = subscription.consume();
+ log::info!(
+ "applying subscription edits since version {:?} to old text: {:?}: {:?}",
+ old_buffer.version(),
+ old_buffer.text(),
+ subscription_edits,
+ );
+
+ let mut text = old_buffer.visible_text.clone();
+ for edit in subscription_edits.into_inner() {
+ let new_text: String = buffer.text_for_range(edit.new.clone()).collect();
+ text.replace(edit.new.start..edit.new.start + edit.old.len(), &new_text);
+ }
+ assert_eq!(text.to_string(), buffer.text());
+ }
+}
+
+#[test]
+fn test_line_endings() {
+ assert_eq!(LineEnding::detect(&"🍐✅\n".repeat(1000)), LineEnding::Unix);
+ assert_eq!(LineEnding::detect(&"abcd\n".repeat(1000)), LineEnding::Unix);
+ assert_eq!(
+ LineEnding::detect(&"🍐✅\r\n".repeat(1000)),
+ LineEnding::Windows
+ );
+ assert_eq!(
+ LineEnding::detect(&"abcd\r\n".repeat(1000)),
+ LineEnding::Windows
+ );
+
+ let mut buffer = Buffer::new(0, 0, "one\r\ntwo\rthree".into());
+ assert_eq!(buffer.text(), "one\ntwo\nthree");
+ assert_eq!(buffer.line_ending(), LineEnding::Windows);
+ buffer.check_invariants();
+
+ buffer.edit([(buffer.len()..buffer.len(), "\r\nfour")]);
+ buffer.edit([(0..0, "zero\r\n")]);
+ assert_eq!(buffer.text(), "zero\none\ntwo\nthree\nfour");
+ assert_eq!(buffer.line_ending(), LineEnding::Windows);
+ buffer.check_invariants();
+}
+
+#[test]
+fn test_line_len() {
+ let mut buffer = Buffer::new(0, 0, "".into());
+ buffer.edit([(0..0, "abcd\nefg\nhij")]);
+ buffer.edit([(12..12, "kl\nmno")]);
+ buffer.edit([(18..18, "\npqrs\n")]);
+ buffer.edit([(18..21, "\nPQ")]);
+
+ assert_eq!(buffer.line_len(0), 4);
+ assert_eq!(buffer.line_len(1), 3);
+ assert_eq!(buffer.line_len(2), 5);
+ assert_eq!(buffer.line_len(3), 3);
+ assert_eq!(buffer.line_len(4), 4);
+ assert_eq!(buffer.line_len(5), 0);
+}
+
+#[test]
+fn test_common_prefix_at_position() {
+ let text = "a = str; b = δα";
+ let buffer = Buffer::new(0, 0, text.into());
+
+ let offset1 = offset_after(text, "str");
+ let offset2 = offset_after(text, "δα");
+
+ // the preceding word is a prefix of the suggestion
+ assert_eq!(
+ buffer.common_prefix_at(offset1, "string"),
+ range_of(text, "str"),
+ );
+ // a suffix of the preceding word is a prefix of the suggestion
+ assert_eq!(
+ buffer.common_prefix_at(offset1, "tree"),
+ range_of(text, "tr"),
+ );
+ // the preceding word is a substring of the suggestion, but not a prefix
+ assert_eq!(
+ buffer.common_prefix_at(offset1, "astro"),
+ empty_range_after(text, "str"),
+ );
+
+ // prefix matching is case insensitive.
+ assert_eq!(
+ buffer.common_prefix_at(offset1, "Strαngε"),
+ range_of(text, "str"),
+ );
+ assert_eq!(
+ buffer.common_prefix_at(offset2, "ΔΑΜΝ"),
+ range_of(text, "δα"),
+ );
+
+ fn offset_after(text: &str, part: &str) -> usize {
+ text.find(part).unwrap() + part.len()
+ }
+
+ fn empty_range_after(text: &str, part: &str) -> Range<usize> {
+ let offset = offset_after(text, part);
+ offset..offset
+ }
+
+ fn range_of(text: &str, part: &str) -> Range<usize> {
+ let start = text.find(part).unwrap();
+ start..start + part.len()
+ }
+}
+
+#[test]
+fn test_text_summary_for_range() {
+ let buffer = Buffer::new(0, 0, "ab\nefg\nhklm\nnopqrs\ntuvwxyz".into());
+ assert_eq!(
+ buffer.text_summary_for_range::<TextSummary, _>(1..3),
+ TextSummary {
+ len: 2,
+ len_utf16: OffsetUtf16(2),
+ lines: Point::new(1, 0),
+ first_line_chars: 1,
+ last_line_chars: 0,
+ last_line_len_utf16: 0,
+ longest_row: 0,
+ longest_row_chars: 1,
+ }
+ );
+ assert_eq!(
+ buffer.text_summary_for_range::<TextSummary, _>(1..12),
+ TextSummary {
+ len: 11,
+ len_utf16: OffsetUtf16(11),
+ lines: Point::new(3, 0),
+ first_line_chars: 1,
+ last_line_chars: 0,
+ last_line_len_utf16: 0,
+ longest_row: 2,
+ longest_row_chars: 4,
+ }
+ );
+ assert_eq!(
+ buffer.text_summary_for_range::<TextSummary, _>(0..20),
+ TextSummary {
+ len: 20,
+ len_utf16: OffsetUtf16(20),
+ lines: Point::new(4, 1),
+ first_line_chars: 2,
+ last_line_chars: 1,
+ last_line_len_utf16: 1,
+ longest_row: 3,
+ longest_row_chars: 6,
+ }
+ );
+ assert_eq!(
+ buffer.text_summary_for_range::<TextSummary, _>(0..22),
+ TextSummary {
+ len: 22,
+ len_utf16: OffsetUtf16(22),
+ lines: Point::new(4, 3),
+ first_line_chars: 2,
+ last_line_chars: 3,
+ last_line_len_utf16: 3,
+ longest_row: 3,
+ longest_row_chars: 6,
+ }
+ );
+ assert_eq!(
+ buffer.text_summary_for_range::<TextSummary, _>(7..22),
+ TextSummary {
+ len: 15,
+ len_utf16: OffsetUtf16(15),
+ lines: Point::new(2, 3),
+ first_line_chars: 4,
+ last_line_chars: 3,
+ last_line_len_utf16: 3,
+ longest_row: 1,
+ longest_row_chars: 6,
+ }
+ );
+}
+
+#[test]
+fn test_chars_at() {
+ let mut buffer = Buffer::new(0, 0, "".into());
+ buffer.edit([(0..0, "abcd\nefgh\nij")]);
+ buffer.edit([(12..12, "kl\nmno")]);
+ buffer.edit([(18..18, "\npqrs")]);
+ buffer.edit([(18..21, "\nPQ")]);
+
+ let chars = buffer.chars_at(Point::new(0, 0));
+ assert_eq!(chars.collect::<String>(), "abcd\nefgh\nijkl\nmno\nPQrs");
+
+ let chars = buffer.chars_at(Point::new(1, 0));
+ assert_eq!(chars.collect::<String>(), "efgh\nijkl\nmno\nPQrs");
+
+ let chars = buffer.chars_at(Point::new(2, 0));
+ assert_eq!(chars.collect::<String>(), "ijkl\nmno\nPQrs");
+
+ let chars = buffer.chars_at(Point::new(3, 0));
+ assert_eq!(chars.collect::<String>(), "mno\nPQrs");
+
+ let chars = buffer.chars_at(Point::new(4, 0));
+ assert_eq!(chars.collect::<String>(), "PQrs");
+
+ // Regression test:
+ let mut buffer = Buffer::new(0, 0, "".into());
+ buffer.edit([(0..0, "[workspace]\nmembers = [\n \"xray_core\",\n \"xray_server\",\n \"xray_cli\",\n \"xray_wasm\",\n]\n")]);
+ buffer.edit([(60..60, "\n")]);
+
+ let chars = buffer.chars_at(Point::new(6, 0));
+ assert_eq!(chars.collect::<String>(), " \"xray_wasm\",\n]\n");
+}
+
+#[test]
+fn test_anchors() {
+ let mut buffer = Buffer::new(0, 0, "".into());
+ buffer.edit([(0..0, "abc")]);
+ let left_anchor = buffer.anchor_before(2);
+ let right_anchor = buffer.anchor_after(2);
+
+ buffer.edit([(1..1, "def\n")]);
+ assert_eq!(buffer.text(), "adef\nbc");
+ assert_eq!(left_anchor.to_offset(&buffer), 6);
+ assert_eq!(right_anchor.to_offset(&buffer), 6);
+ assert_eq!(left_anchor.to_point(&buffer), Point { row: 1, column: 1 });
+ assert_eq!(right_anchor.to_point(&buffer), Point { row: 1, column: 1 });
+
+ buffer.edit([(2..3, "")]);
+ assert_eq!(buffer.text(), "adf\nbc");
+ assert_eq!(left_anchor.to_offset(&buffer), 5);
+ assert_eq!(right_anchor.to_offset(&buffer), 5);
+ assert_eq!(left_anchor.to_point(&buffer), Point { row: 1, column: 1 });
+ assert_eq!(right_anchor.to_point(&buffer), Point { row: 1, column: 1 });
+
+ buffer.edit([(5..5, "ghi\n")]);
+ assert_eq!(buffer.text(), "adf\nbghi\nc");
+ assert_eq!(left_anchor.to_offset(&buffer), 5);
+ assert_eq!(right_anchor.to_offset(&buffer), 9);
+ assert_eq!(left_anchor.to_point(&buffer), Point { row: 1, column: 1 });
+ assert_eq!(right_anchor.to_point(&buffer), Point { row: 2, column: 0 });
+
+ buffer.edit([(7..9, "")]);
+ assert_eq!(buffer.text(), "adf\nbghc");
+ assert_eq!(left_anchor.to_offset(&buffer), 5);
+ assert_eq!(right_anchor.to_offset(&buffer), 7);
+ assert_eq!(left_anchor.to_point(&buffer), Point { row: 1, column: 1 },);
+ assert_eq!(right_anchor.to_point(&buffer), Point { row: 1, column: 3 });
+
+ // Ensure anchoring to a point is equivalent to anchoring to an offset.
+ assert_eq!(
+ buffer.anchor_before(Point { row: 0, column: 0 }),
+ buffer.anchor_before(0)
+ );
+ assert_eq!(
+ buffer.anchor_before(Point { row: 0, column: 1 }),
+ buffer.anchor_before(1)
+ );
+ assert_eq!(
+ buffer.anchor_before(Point { row: 0, column: 2 }),
+ buffer.anchor_before(2)
+ );
+ assert_eq!(
+ buffer.anchor_before(Point { row: 0, column: 3 }),
+ buffer.anchor_before(3)
+ );
+ assert_eq!(
+ buffer.anchor_before(Point { row: 1, column: 0 }),
+ buffer.anchor_before(4)
+ );
+ assert_eq!(
+ buffer.anchor_before(Point { row: 1, column: 1 }),
+ buffer.anchor_before(5)
+ );
+ assert_eq!(
+ buffer.anchor_before(Point { row: 1, column: 2 }),
+ buffer.anchor_before(6)
+ );
+ assert_eq!(
+ buffer.anchor_before(Point { row: 1, column: 3 }),
+ buffer.anchor_before(7)
+ );
+ assert_eq!(
+ buffer.anchor_before(Point { row: 1, column: 4 }),
+ buffer.anchor_before(8)
+ );
+
+ // Comparison between anchors.
+ let anchor_at_offset_0 = buffer.anchor_before(0);
+ let anchor_at_offset_1 = buffer.anchor_before(1);
+ let anchor_at_offset_2 = buffer.anchor_before(2);
+
+ assert_eq!(
+ anchor_at_offset_0.cmp(&anchor_at_offset_0, &buffer),
+ Ordering::Equal
+ );
+ assert_eq!(
+ anchor_at_offset_1.cmp(&anchor_at_offset_1, &buffer),
+ Ordering::Equal
+ );
+ assert_eq!(
+ anchor_at_offset_2.cmp(&anchor_at_offset_2, &buffer),
+ Ordering::Equal
+ );
+
+ assert_eq!(
+ anchor_at_offset_0.cmp(&anchor_at_offset_1, &buffer),
+ Ordering::Less
+ );
+ assert_eq!(
+ anchor_at_offset_1.cmp(&anchor_at_offset_2, &buffer),
+ Ordering::Less
+ );
+ assert_eq!(
+ anchor_at_offset_0.cmp(&anchor_at_offset_2, &buffer),
+ Ordering::Less
+ );
+
+ assert_eq!(
+ anchor_at_offset_1.cmp(&anchor_at_offset_0, &buffer),
+ Ordering::Greater
+ );
+ assert_eq!(
+ anchor_at_offset_2.cmp(&anchor_at_offset_1, &buffer),
+ Ordering::Greater
+ );
+ assert_eq!(
+ anchor_at_offset_2.cmp(&anchor_at_offset_0, &buffer),
+ Ordering::Greater
+ );
+}
+
+#[test]
+fn test_anchors_at_start_and_end() {
+ let mut buffer = Buffer::new(0, 0, "".into());
+ let before_start_anchor = buffer.anchor_before(0);
+ let after_end_anchor = buffer.anchor_after(0);
+
+ buffer.edit([(0..0, "abc")]);
+ assert_eq!(buffer.text(), "abc");
+ assert_eq!(before_start_anchor.to_offset(&buffer), 0);
+ assert_eq!(after_end_anchor.to_offset(&buffer), 3);
+
+ let after_start_anchor = buffer.anchor_after(0);
+ let before_end_anchor = buffer.anchor_before(3);
+
+ buffer.edit([(3..3, "def")]);
+ buffer.edit([(0..0, "ghi")]);
+ assert_eq!(buffer.text(), "ghiabcdef");
+ assert_eq!(before_start_anchor.to_offset(&buffer), 0);
+ assert_eq!(after_start_anchor.to_offset(&buffer), 3);
+ assert_eq!(before_end_anchor.to_offset(&buffer), 6);
+ assert_eq!(after_end_anchor.to_offset(&buffer), 9);
+}
+
+#[test]
+fn test_undo_redo() {
+ let mut buffer = Buffer::new(0, 0, "1234".into());
+ // Set group interval to zero so as to not group edits in the undo stack.
+ buffer.set_group_interval(Duration::from_secs(0));
+
+ buffer.edit([(1..1, "abx")]);
+ buffer.edit([(3..4, "yzef")]);
+ buffer.edit([(3..5, "cd")]);
+ assert_eq!(buffer.text(), "1abcdef234");
+
+ let entries = buffer.history.undo_stack.clone();
+ assert_eq!(entries.len(), 3);
+
+ buffer.undo_or_redo(entries[0].transaction.clone()).unwrap();
+ assert_eq!(buffer.text(), "1cdef234");
+ buffer.undo_or_redo(entries[0].transaction.clone()).unwrap();
+ assert_eq!(buffer.text(), "1abcdef234");
+
+ buffer.undo_or_redo(entries[1].transaction.clone()).unwrap();
+ assert_eq!(buffer.text(), "1abcdx234");
+ buffer.undo_or_redo(entries[2].transaction.clone()).unwrap();
+ assert_eq!(buffer.text(), "1abx234");
+ buffer.undo_or_redo(entries[1].transaction.clone()).unwrap();
+ assert_eq!(buffer.text(), "1abyzef234");
+ buffer.undo_or_redo(entries[2].transaction.clone()).unwrap();
+ assert_eq!(buffer.text(), "1abcdef234");
+
+ buffer.undo_or_redo(entries[2].transaction.clone()).unwrap();
+ assert_eq!(buffer.text(), "1abyzef234");
+ buffer.undo_or_redo(entries[0].transaction.clone()).unwrap();
+ assert_eq!(buffer.text(), "1yzef234");
+ buffer.undo_or_redo(entries[1].transaction.clone()).unwrap();
+ assert_eq!(buffer.text(), "1234");
+}
+
+#[test]
+fn test_history() {
+ let mut now = Instant::now();
+ let mut buffer = Buffer::new(0, 0, "123456".into());
+ buffer.set_group_interval(Duration::from_millis(300));
+
+ let transaction_1 = buffer.start_transaction_at(now).unwrap();
+ buffer.edit([(2..4, "cd")]);
+ buffer.end_transaction_at(now);
+ assert_eq!(buffer.text(), "12cd56");
+
+ buffer.start_transaction_at(now);
+ buffer.edit([(4..5, "e")]);
+ buffer.end_transaction_at(now).unwrap();
+ assert_eq!(buffer.text(), "12cde6");
+
+ now += buffer.transaction_group_interval() + Duration::from_millis(1);
+ buffer.start_transaction_at(now);
+ buffer.edit([(0..1, "a")]);
+ buffer.edit([(1..1, "b")]);
+ buffer.end_transaction_at(now).unwrap();
+ assert_eq!(buffer.text(), "ab2cde6");
+
+ // Last transaction happened past the group interval, undo it on its own.
+ buffer.undo();
+ assert_eq!(buffer.text(), "12cde6");
+
+ // First two transactions happened within the group interval, undo them together.
+ buffer.undo();
+ assert_eq!(buffer.text(), "123456");
+
+ // Redo the first two transactions together.
+ buffer.redo();
+ assert_eq!(buffer.text(), "12cde6");
+
+ // Redo the last transaction on its own.
+ buffer.redo();
+ assert_eq!(buffer.text(), "ab2cde6");
+
+ buffer.start_transaction_at(now);
+ assert!(buffer.end_transaction_at(now).is_none());
+ buffer.undo();
+ assert_eq!(buffer.text(), "12cde6");
+
+ // Redo stack gets cleared after performing an edit.
+ buffer.start_transaction_at(now);
+ buffer.edit([(0..0, "X")]);
+ buffer.end_transaction_at(now);
+ assert_eq!(buffer.text(), "X12cde6");
+ buffer.redo();
+ assert_eq!(buffer.text(), "X12cde6");
+ buffer.undo();
+ assert_eq!(buffer.text(), "12cde6");
+ buffer.undo();
+ assert_eq!(buffer.text(), "123456");
+
+ // Transactions can be grouped manually.
+ buffer.redo();
+ buffer.redo();
+ assert_eq!(buffer.text(), "X12cde6");
+ buffer.group_until_transaction(transaction_1);
+ buffer.undo();
+ assert_eq!(buffer.text(), "123456");
+ buffer.redo();
+ assert_eq!(buffer.text(), "X12cde6");
+}
+
+#[test]
+fn test_finalize_last_transaction() {
+ let now = Instant::now();
+ let mut buffer = Buffer::new(0, 0, "123456".into());
+
+ buffer.start_transaction_at(now);
+ buffer.edit([(2..4, "cd")]);
+ buffer.end_transaction_at(now);
+ assert_eq!(buffer.text(), "12cd56");
+
+ buffer.finalize_last_transaction();
+ buffer.start_transaction_at(now);
+ buffer.edit([(4..5, "e")]);
+ buffer.end_transaction_at(now).unwrap();
+ assert_eq!(buffer.text(), "12cde6");
+
+ buffer.start_transaction_at(now);
+ buffer.edit([(0..1, "a")]);
+ buffer.edit([(1..1, "b")]);
+ buffer.end_transaction_at(now).unwrap();
+ assert_eq!(buffer.text(), "ab2cde6");
+
+ buffer.undo();
+ assert_eq!(buffer.text(), "12cd56");
+
+ buffer.undo();
+ assert_eq!(buffer.text(), "123456");
+
+ buffer.redo();
+ assert_eq!(buffer.text(), "12cd56");
+
+ buffer.redo();
+ assert_eq!(buffer.text(), "ab2cde6");
+}
+
+#[test]
+fn test_edited_ranges_for_transaction() {
+ let now = Instant::now();
+ let mut buffer = Buffer::new(0, 0, "1234567".into());
+
+ buffer.start_transaction_at(now);
+ buffer.edit([(2..4, "cd")]);
+ buffer.edit([(6..6, "efg")]);
+ buffer.end_transaction_at(now);
+ assert_eq!(buffer.text(), "12cd56efg7");
+
+ let tx = buffer.finalize_last_transaction().unwrap().clone();
+ assert_eq!(
+ buffer
+ .edited_ranges_for_transaction::<usize>(&tx)
+ .collect::<Vec<_>>(),
+ [2..4, 6..9]
+ );
+
+ buffer.edit([(5..5, "hijk")]);
+ assert_eq!(buffer.text(), "12cd5hijk6efg7");
+ assert_eq!(
+ buffer
+ .edited_ranges_for_transaction::<usize>(&tx)
+ .collect::<Vec<_>>(),
+ [2..4, 10..13]
+ );
+
+ buffer.edit([(4..4, "l")]);
+ assert_eq!(buffer.text(), "12cdl5hijk6efg7");
+ assert_eq!(
+ buffer
+ .edited_ranges_for_transaction::<usize>(&tx)
+ .collect::<Vec<_>>(),
+ [2..4, 11..14]
+ );
+}
+
+#[test]
+fn test_concurrent_edits() {
+ let text = "abcdef";
+
+ let mut buffer1 = Buffer::new(1, 0, text.into());
+ let mut buffer2 = Buffer::new(2, 0, text.into());
+ let mut buffer3 = Buffer::new(3, 0, text.into());
+
+ let buf1_op = buffer1.edit([(1..2, "12")]);
+ assert_eq!(buffer1.text(), "a12cdef");
+ let buf2_op = buffer2.edit([(3..4, "34")]);
+ assert_eq!(buffer2.text(), "abc34ef");
+ let buf3_op = buffer3.edit([(5..6, "56")]);
+ assert_eq!(buffer3.text(), "abcde56");
+
+ buffer1.apply_op(buf2_op.clone()).unwrap();
+ buffer1.apply_op(buf3_op.clone()).unwrap();
+ buffer2.apply_op(buf1_op.clone()).unwrap();
+ buffer2.apply_op(buf3_op).unwrap();
+ buffer3.apply_op(buf1_op).unwrap();
+ buffer3.apply_op(buf2_op).unwrap();
+
+ assert_eq!(buffer1.text(), "a12c34e56");
+ assert_eq!(buffer2.text(), "a12c34e56");
+ assert_eq!(buffer3.text(), "a12c34e56");
+}
+
+#[gpui2::test(iterations = 100)]
+fn test_random_concurrent_edits(mut rng: StdRng) {
+ let peers = env::var("PEERS")
+ .map(|i| i.parse().expect("invalid `PEERS` variable"))
+ .unwrap_or(5);
+ let operations = env::var("OPERATIONS")
+ .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
+ .unwrap_or(10);
+
+ let base_text_len = rng.gen_range(0..10);
+ let base_text = RandomCharIter::new(&mut rng)
+ .take(base_text_len)
+ .collect::<String>();
+ let mut replica_ids = Vec::new();
+ let mut buffers = Vec::new();
+ let mut network = Network::new(rng.clone());
+
+ for i in 0..peers {
+ let mut buffer = Buffer::new(i as ReplicaId, 0, base_text.clone());
+ buffer.history.group_interval = Duration::from_millis(rng.gen_range(0..=200));
+ buffers.push(buffer);
+ replica_ids.push(i as u16);
+ network.add_peer(i as u16);
+ }
+
+ log::info!("initial text: {:?}", base_text);
+
+ let mut mutation_count = operations;
+ loop {
+ let replica_index = rng.gen_range(0..peers);
+ let replica_id = replica_ids[replica_index];
+ let buffer = &mut buffers[replica_index];
+ match rng.gen_range(0..=100) {
+ 0..=50 if mutation_count != 0 => {
+ let op = buffer.randomly_edit(&mut rng, 5).1;
+ network.broadcast(buffer.replica_id, vec![op]);
+ log::info!("buffer {} text: {:?}", buffer.replica_id, buffer.text());
+ mutation_count -= 1;
+ }
+ 51..=70 if mutation_count != 0 => {
+ let ops = buffer.randomly_undo_redo(&mut rng);
+ network.broadcast(buffer.replica_id, ops);
+ mutation_count -= 1;
+ }
+ 71..=100 if network.has_unreceived(replica_id) => {
+ let ops = network.receive(replica_id);
+ if !ops.is_empty() {
+ log::info!(
+ "peer {} applying {} ops from the network.",
+ replica_id,
+ ops.len()
+ );
+ buffer.apply_ops(ops).unwrap();
+ }
+ }
+ _ => {}
+ }
+ buffer.check_invariants();
+
+ if mutation_count == 0 && network.is_idle() {
+ break;
+ }
+ }
+
+ let first_buffer = &buffers[0];
+ for buffer in &buffers[1..] {
+ assert_eq!(
+ buffer.text(),
+ first_buffer.text(),
+ "Replica {} text != Replica 0 text",
+ buffer.replica_id
+ );
+ buffer.check_invariants();
+ }
+}
@@ -0,0 +1,2682 @@
+mod anchor;
+pub mod locator;
+#[cfg(any(test, feature = "test-support"))]
+pub mod network;
+pub mod operation_queue;
+mod patch;
+mod selection;
+pub mod subscription;
+#[cfg(test)]
+mod tests;
+mod undo_map;
+
+pub use anchor::*;
+use anyhow::{anyhow, Result};
+pub use clock::ReplicaId;
+use collections::{HashMap, HashSet};
+use locator::Locator;
+use operation_queue::OperationQueue;
+pub use patch::Patch;
+use postage::{oneshot, prelude::*};
+
+use lazy_static::lazy_static;
+use regex::Regex;
+pub use rope::*;
+pub use selection::*;
+use std::{
+ borrow::Cow,
+ cmp::{self, Ordering, Reverse},
+ future::Future,
+ iter::Iterator,
+ ops::{self, Deref, Range, Sub},
+ str,
+ sync::Arc,
+ time::{Duration, Instant},
+};
+pub use subscription::*;
+pub use sum_tree::Bias;
+use sum_tree::{FilterCursor, SumTree, TreeMap};
+use undo_map::UndoMap;
+use util::ResultExt;
+
+#[cfg(any(test, feature = "test-support"))]
+use util::RandomCharIter;
+
+lazy_static! {
+ static ref LINE_SEPARATORS_REGEX: Regex = Regex::new("\r\n|\r|\u{2028}|\u{2029}").unwrap();
+}
+
+pub type TransactionId = clock::Lamport;
+
+pub struct Buffer {
+ snapshot: BufferSnapshot,
+ history: History,
+ deferred_ops: OperationQueue<Operation>,
+ deferred_replicas: HashSet<ReplicaId>,
+ pub lamport_clock: clock::Lamport,
+ subscriptions: Topic,
+ edit_id_resolvers: HashMap<clock::Lamport, Vec<oneshot::Sender<()>>>,
+ wait_for_version_txs: Vec<(clock::Global, oneshot::Sender<()>)>,
+}
+
+#[derive(Clone)]
+pub struct BufferSnapshot {
+ replica_id: ReplicaId,
+ remote_id: u64,
+ visible_text: Rope,
+ deleted_text: Rope,
+ line_ending: LineEnding,
+ undo_map: UndoMap,
+ fragments: SumTree<Fragment>,
+ insertions: SumTree<InsertionFragment>,
+ pub version: clock::Global,
+}
+
+#[derive(Clone, Debug)]
+pub struct HistoryEntry {
+ transaction: Transaction,
+ first_edit_at: Instant,
+ last_edit_at: Instant,
+ suppress_grouping: bool,
+}
+
+#[derive(Clone, Debug)]
+pub struct Transaction {
+ pub id: TransactionId,
+ pub edit_ids: Vec<clock::Lamport>,
+ pub start: clock::Global,
+}
+
+impl HistoryEntry {
+ pub fn transaction_id(&self) -> TransactionId {
+ self.transaction.id
+ }
+}
+
+struct History {
+ base_text: Rope,
+ operations: TreeMap<clock::Lamport, Operation>,
+ insertion_slices: HashMap<clock::Lamport, Vec<InsertionSlice>>,
+ undo_stack: Vec<HistoryEntry>,
+ redo_stack: Vec<HistoryEntry>,
+ transaction_depth: usize,
+ group_interval: Duration,
+}
+
+#[derive(Clone, Debug)]
+struct InsertionSlice {
+ insertion_id: clock::Lamport,
+ range: Range<usize>,
+}
+
+impl History {
+ pub fn new(base_text: Rope) -> Self {
+ Self {
+ base_text,
+ operations: Default::default(),
+ insertion_slices: Default::default(),
+ undo_stack: Vec::new(),
+ redo_stack: Vec::new(),
+ transaction_depth: 0,
+ // Don't group transactions in tests unless we opt in, because it's a footgun.
+ #[cfg(any(test, feature = "test-support"))]
+ group_interval: Duration::ZERO,
+ #[cfg(not(any(test, feature = "test-support")))]
+ group_interval: Duration::from_millis(300),
+ }
+ }
+
+ fn push(&mut self, op: Operation) {
+ self.operations.insert(op.timestamp(), op);
+ }
+
+ fn start_transaction(
+ &mut self,
+ start: clock::Global,
+ now: Instant,
+ clock: &mut clock::Lamport,
+ ) -> Option<TransactionId> {
+ self.transaction_depth += 1;
+ if self.transaction_depth == 1 {
+ let id = clock.tick();
+ self.undo_stack.push(HistoryEntry {
+ transaction: Transaction {
+ id,
+ start,
+ edit_ids: Default::default(),
+ },
+ first_edit_at: now,
+ last_edit_at: now,
+ suppress_grouping: false,
+ });
+ Some(id)
+ } else {
+ None
+ }
+ }
+
+ fn end_transaction(&mut self, now: Instant) -> Option<&HistoryEntry> {
+ assert_ne!(self.transaction_depth, 0);
+ self.transaction_depth -= 1;
+ if self.transaction_depth == 0 {
+ if self
+ .undo_stack
+ .last()
+ .unwrap()
+ .transaction
+ .edit_ids
+ .is_empty()
+ {
+ self.undo_stack.pop();
+ None
+ } else {
+ self.redo_stack.clear();
+ let entry = self.undo_stack.last_mut().unwrap();
+ entry.last_edit_at = now;
+ Some(entry)
+ }
+ } else {
+ None
+ }
+ }
+
+ fn group(&mut self) -> Option<TransactionId> {
+ let mut count = 0;
+ let mut entries = self.undo_stack.iter();
+ if let Some(mut entry) = entries.next_back() {
+ while let Some(prev_entry) = entries.next_back() {
+ if !prev_entry.suppress_grouping
+ && entry.first_edit_at - prev_entry.last_edit_at <= self.group_interval
+ {
+ entry = prev_entry;
+ count += 1;
+ } else {
+ break;
+ }
+ }
+ }
+ self.group_trailing(count)
+ }
+
+ fn group_until(&mut self, transaction_id: TransactionId) {
+ let mut count = 0;
+ for entry in self.undo_stack.iter().rev() {
+ if entry.transaction_id() == transaction_id {
+ self.group_trailing(count);
+ break;
+ } else if entry.suppress_grouping {
+ break;
+ } else {
+ count += 1;
+ }
+ }
+ }
+
+ fn group_trailing(&mut self, n: usize) -> Option<TransactionId> {
+ let new_len = self.undo_stack.len() - n;
+ let (entries_to_keep, entries_to_merge) = self.undo_stack.split_at_mut(new_len);
+ if let Some(last_entry) = entries_to_keep.last_mut() {
+ for entry in &*entries_to_merge {
+ for edit_id in &entry.transaction.edit_ids {
+ last_entry.transaction.edit_ids.push(*edit_id);
+ }
+ }
+
+ if let Some(entry) = entries_to_merge.last_mut() {
+ last_entry.last_edit_at = entry.last_edit_at;
+ }
+ }
+
+ self.undo_stack.truncate(new_len);
+ self.undo_stack.last().map(|e| e.transaction.id)
+ }
+
+ fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
+ self.undo_stack.last_mut().map(|entry| {
+ entry.suppress_grouping = true;
+ &entry.transaction
+ })
+ }
+
+ fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
+ assert_eq!(self.transaction_depth, 0);
+ self.undo_stack.push(HistoryEntry {
+ transaction,
+ first_edit_at: now,
+ last_edit_at: now,
+ suppress_grouping: false,
+ });
+ self.redo_stack.clear();
+ }
+
+ fn push_undo(&mut self, op_id: clock::Lamport) {
+ assert_ne!(self.transaction_depth, 0);
+ if let Some(Operation::Edit(_)) = self.operations.get(&op_id) {
+ let last_transaction = self.undo_stack.last_mut().unwrap();
+ last_transaction.transaction.edit_ids.push(op_id);
+ }
+ }
+
+ fn pop_undo(&mut self) -> Option<&HistoryEntry> {
+ assert_eq!(self.transaction_depth, 0);
+ if let Some(entry) = self.undo_stack.pop() {
+ self.redo_stack.push(entry);
+ self.redo_stack.last()
+ } else {
+ None
+ }
+ }
+
+ fn remove_from_undo(&mut self, transaction_id: TransactionId) -> Option<&HistoryEntry> {
+ assert_eq!(self.transaction_depth, 0);
+
+ let entry_ix = self
+ .undo_stack
+ .iter()
+ .rposition(|entry| entry.transaction.id == transaction_id)?;
+ let entry = self.undo_stack.remove(entry_ix);
+ self.redo_stack.push(entry);
+ self.redo_stack.last()
+ }
+
+ fn remove_from_undo_until(&mut self, transaction_id: TransactionId) -> &[HistoryEntry] {
+ assert_eq!(self.transaction_depth, 0);
+
+ let redo_stack_start_len = self.redo_stack.len();
+ if let Some(entry_ix) = self
+ .undo_stack
+ .iter()
+ .rposition(|entry| entry.transaction.id == transaction_id)
+ {
+ self.redo_stack
+ .extend(self.undo_stack.drain(entry_ix..).rev());
+ }
+ &self.redo_stack[redo_stack_start_len..]
+ }
+
+ fn forget(&mut self, transaction_id: TransactionId) -> Option<Transaction> {
+ assert_eq!(self.transaction_depth, 0);
+ if let Some(entry_ix) = self
+ .undo_stack
+ .iter()
+ .rposition(|entry| entry.transaction.id == transaction_id)
+ {
+ Some(self.undo_stack.remove(entry_ix).transaction)
+ } else if let Some(entry_ix) = self
+ .redo_stack
+ .iter()
+ .rposition(|entry| entry.transaction.id == transaction_id)
+ {
+ Some(self.redo_stack.remove(entry_ix).transaction)
+ } else {
+ None
+ }
+ }
+
+ fn transaction_mut(&mut self, transaction_id: TransactionId) -> Option<&mut Transaction> {
+ let entry = self
+ .undo_stack
+ .iter_mut()
+ .rfind(|entry| entry.transaction.id == transaction_id)
+ .or_else(|| {
+ self.redo_stack
+ .iter_mut()
+ .rfind(|entry| entry.transaction.id == transaction_id)
+ })?;
+ Some(&mut entry.transaction)
+ }
+
+ fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
+ if let Some(transaction) = self.forget(transaction) {
+ if let Some(destination) = self.transaction_mut(destination) {
+ destination.edit_ids.extend(transaction.edit_ids);
+ }
+ }
+ }
+
+ fn pop_redo(&mut self) -> Option<&HistoryEntry> {
+ assert_eq!(self.transaction_depth, 0);
+ if let Some(entry) = self.redo_stack.pop() {
+ self.undo_stack.push(entry);
+ self.undo_stack.last()
+ } else {
+ None
+ }
+ }
+
+ fn remove_from_redo(&mut self, transaction_id: TransactionId) -> &[HistoryEntry] {
+ assert_eq!(self.transaction_depth, 0);
+
+ let undo_stack_start_len = self.undo_stack.len();
+ if let Some(entry_ix) = self
+ .redo_stack
+ .iter()
+ .rposition(|entry| entry.transaction.id == transaction_id)
+ {
+ self.undo_stack
+ .extend(self.redo_stack.drain(entry_ix..).rev());
+ }
+ &self.undo_stack[undo_stack_start_len..]
+ }
+}
+
+struct Edits<'a, D: TextDimension, F: FnMut(&FragmentSummary) -> bool> {
+ visible_cursor: rope::Cursor<'a>,
+ deleted_cursor: rope::Cursor<'a>,
+ fragments_cursor: Option<FilterCursor<'a, F, Fragment, FragmentTextSummary>>,
+ undos: &'a UndoMap,
+ since: &'a clock::Global,
+ old_end: D,
+ new_end: D,
+ range: Range<(&'a Locator, usize)>,
+ buffer_id: u64,
+}
+
+#[derive(Clone, Debug, Default, Eq, PartialEq)]
+pub struct Edit<D> {
+ pub old: Range<D>,
+ pub new: Range<D>,
+}
+
+impl<D> Edit<D>
+where
+ D: Sub<D, Output = D> + PartialEq + Copy,
+{
+ pub fn old_len(&self) -> D {
+ self.old.end - self.old.start
+ }
+
+ pub fn new_len(&self) -> D {
+ self.new.end - self.new.start
+ }
+
+ pub fn is_empty(&self) -> bool {
+ self.old.start == self.old.end && self.new.start == self.new.end
+ }
+}
+
+impl<D1, D2> Edit<(D1, D2)> {
+ pub fn flatten(self) -> (Edit<D1>, Edit<D2>) {
+ (
+ Edit {
+ old: self.old.start.0..self.old.end.0,
+ new: self.new.start.0..self.new.end.0,
+ },
+ Edit {
+ old: self.old.start.1..self.old.end.1,
+ new: self.new.start.1..self.new.end.1,
+ },
+ )
+ }
+}
+
+#[derive(Eq, PartialEq, Clone, Debug)]
+pub struct Fragment {
+ pub id: Locator,
+ pub timestamp: clock::Lamport,
+ pub insertion_offset: usize,
+ pub len: usize,
+ pub visible: bool,
+ pub deletions: HashSet<clock::Lamport>,
+ pub max_undos: clock::Global,
+}
+
+#[derive(Eq, PartialEq, Clone, Debug)]
+pub struct FragmentSummary {
+ text: FragmentTextSummary,
+ max_id: Locator,
+ max_version: clock::Global,
+ min_insertion_version: clock::Global,
+ max_insertion_version: clock::Global,
+}
+
+#[derive(Copy, Default, Clone, Debug, PartialEq, Eq)]
+struct FragmentTextSummary {
+ visible: usize,
+ deleted: usize,
+}
+
+impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FragmentTextSummary {
+ fn add_summary(&mut self, summary: &'a FragmentSummary, _: &Option<clock::Global>) {
+ self.visible += summary.text.visible;
+ self.deleted += summary.text.deleted;
+ }
+}
+
+#[derive(Eq, PartialEq, Clone, Debug)]
+struct InsertionFragment {
+ timestamp: clock::Lamport,
+ split_offset: usize,
+ fragment_id: Locator,
+}
+
+#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, PartialOrd, Ord)]
+struct InsertionFragmentKey {
+ timestamp: clock::Lamport,
+ split_offset: usize,
+}
+
+#[derive(Clone, Debug, Eq, PartialEq)]
+pub enum Operation {
+ Edit(EditOperation),
+ Undo(UndoOperation),
+}
+
+#[derive(Clone, Debug, Eq, PartialEq)]
+pub struct EditOperation {
+ pub timestamp: clock::Lamport,
+ pub version: clock::Global,
+ pub ranges: Vec<Range<FullOffset>>,
+ pub new_text: Vec<Arc<str>>,
+}
+
+#[derive(Clone, Debug, Eq, PartialEq)]
+pub struct UndoOperation {
+ pub timestamp: clock::Lamport,
+ pub version: clock::Global,
+ pub counts: HashMap<clock::Lamport, u32>,
+}
+
+impl Buffer {
+ pub fn new(replica_id: u16, remote_id: u64, mut base_text: String) -> Buffer {
+ let line_ending = LineEnding::detect(&base_text);
+ LineEnding::normalize(&mut base_text);
+
+ let history = History::new(Rope::from(base_text.as_ref()));
+ let mut fragments = SumTree::new();
+ let mut insertions = SumTree::new();
+
+ let mut lamport_clock = clock::Lamport::new(replica_id);
+ let mut version = clock::Global::new();
+
+ let visible_text = history.base_text.clone();
+ if !visible_text.is_empty() {
+ let insertion_timestamp = clock::Lamport {
+ replica_id: 0,
+ value: 1,
+ };
+ lamport_clock.observe(insertion_timestamp);
+ version.observe(insertion_timestamp);
+ let fragment_id = Locator::between(&Locator::min(), &Locator::max());
+ let fragment = Fragment {
+ id: fragment_id,
+ timestamp: insertion_timestamp,
+ insertion_offset: 0,
+ len: visible_text.len(),
+ visible: true,
+ deletions: Default::default(),
+ max_undos: Default::default(),
+ };
+ insertions.push(InsertionFragment::new(&fragment), &());
+ fragments.push(fragment, &None);
+ }
+
+ Buffer {
+ snapshot: BufferSnapshot {
+ replica_id,
+ remote_id,
+ visible_text,
+ deleted_text: Rope::new(),
+ line_ending,
+ fragments,
+ insertions,
+ version,
+ undo_map: Default::default(),
+ },
+ history,
+ deferred_ops: OperationQueue::new(),
+ deferred_replicas: HashSet::default(),
+ lamport_clock,
+ subscriptions: Default::default(),
+ edit_id_resolvers: Default::default(),
+ wait_for_version_txs: Default::default(),
+ }
+ }
+
+ pub fn version(&self) -> clock::Global {
+ self.version.clone()
+ }
+
+ pub fn snapshot(&self) -> BufferSnapshot {
+ self.snapshot.clone()
+ }
+
+ pub fn replica_id(&self) -> ReplicaId {
+ self.lamport_clock.replica_id
+ }
+
+ pub fn remote_id(&self) -> u64 {
+ self.remote_id
+ }
+
+ pub fn deferred_ops_len(&self) -> usize {
+ self.deferred_ops.len()
+ }
+
+ pub fn transaction_group_interval(&self) -> Duration {
+ self.history.group_interval
+ }
+
+ pub fn edit<R, I, S, T>(&mut self, edits: R) -> Operation
+ where
+ R: IntoIterator<IntoIter = I>,
+ I: ExactSizeIterator<Item = (Range<S>, T)>,
+ S: ToOffset,
+ T: Into<Arc<str>>,
+ {
+ let edits = edits
+ .into_iter()
+ .map(|(range, new_text)| (range, new_text.into()));
+
+ self.start_transaction();
+ let timestamp = self.lamport_clock.tick();
+ let operation = Operation::Edit(self.apply_local_edit(edits, timestamp));
+
+ self.history.push(operation.clone());
+ self.history.push_undo(operation.timestamp());
+ self.snapshot.version.observe(operation.timestamp());
+ self.end_transaction();
+ operation
+ }
+
+ fn apply_local_edit<S: ToOffset, T: Into<Arc<str>>>(
+ &mut self,
+ edits: impl ExactSizeIterator<Item = (Range<S>, T)>,
+ timestamp: clock::Lamport,
+ ) -> EditOperation {
+ let mut edits_patch = Patch::default();
+ let mut edit_op = EditOperation {
+ timestamp,
+ version: self.version(),
+ ranges: Vec::with_capacity(edits.len()),
+ new_text: Vec::with_capacity(edits.len()),
+ };
+ let mut new_insertions = Vec::new();
+ let mut insertion_offset = 0;
+ let mut insertion_slices = Vec::new();
+
+ let mut edits = edits
+ .map(|(range, new_text)| (range.to_offset(&*self), new_text))
+ .peekable();
+
+ let mut new_ropes =
+ RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
+ let mut old_fragments = self.fragments.cursor::<FragmentTextSummary>();
+ let mut new_fragments =
+ old_fragments.slice(&edits.peek().unwrap().0.start, Bias::Right, &None);
+ new_ropes.append(new_fragments.summary().text);
+
+ let mut fragment_start = old_fragments.start().visible;
+ for (range, new_text) in edits {
+ let new_text = LineEnding::normalize_arc(new_text.into());
+ let fragment_end = old_fragments.end(&None).visible;
+
+ // If the current fragment ends before this range, then jump ahead to the first fragment
+ // that extends past the start of this range, reusing any intervening fragments.
+ if fragment_end < range.start {
+ // If the current fragment has been partially consumed, then consume the rest of it
+ // and advance to the next fragment before slicing.
+ if fragment_start > old_fragments.start().visible {
+ if fragment_end > fragment_start {
+ let mut suffix = old_fragments.item().unwrap().clone();
+ suffix.len = fragment_end - fragment_start;
+ suffix.insertion_offset += fragment_start - old_fragments.start().visible;
+ new_insertions.push(InsertionFragment::insert_new(&suffix));
+ new_ropes.push_fragment(&suffix, suffix.visible);
+ new_fragments.push(suffix, &None);
+ }
+ old_fragments.next(&None);
+ }
+
+ let slice = old_fragments.slice(&range.start, Bias::Right, &None);
+ new_ropes.append(slice.summary().text);
+ new_fragments.append(slice, &None);
+ fragment_start = old_fragments.start().visible;
+ }
+
+ let full_range_start = FullOffset(range.start + old_fragments.start().deleted);
+
+ // Preserve any portion of the current fragment that precedes this range.
+ if fragment_start < range.start {
+ let mut prefix = old_fragments.item().unwrap().clone();
+ prefix.len = range.start - fragment_start;
+ prefix.insertion_offset += fragment_start - old_fragments.start().visible;
+ prefix.id = Locator::between(&new_fragments.summary().max_id, &prefix.id);
+ new_insertions.push(InsertionFragment::insert_new(&prefix));
+ new_ropes.push_fragment(&prefix, prefix.visible);
+ new_fragments.push(prefix, &None);
+ fragment_start = range.start;
+ }
+
+ // Insert the new text before any existing fragments within the range.
+ if !new_text.is_empty() {
+ let new_start = new_fragments.summary().text.visible;
+
+ let fragment = Fragment {
+ id: Locator::between(
+ &new_fragments.summary().max_id,
+ old_fragments
+ .item()
+ .map_or(&Locator::max(), |old_fragment| &old_fragment.id),
+ ),
+ timestamp,
+ insertion_offset,
+ len: new_text.len(),
+ deletions: Default::default(),
+ max_undos: Default::default(),
+ visible: true,
+ };
+ edits_patch.push(Edit {
+ old: fragment_start..fragment_start,
+ new: new_start..new_start + new_text.len(),
+ });
+ insertion_slices.push(fragment.insertion_slice());
+ new_insertions.push(InsertionFragment::insert_new(&fragment));
+ new_ropes.push_str(new_text.as_ref());
+ new_fragments.push(fragment, &None);
+ insertion_offset += new_text.len();
+ }
+
+ // Advance through every fragment that intersects this range, marking the intersecting
+ // portions as deleted.
+ while fragment_start < range.end {
+ let fragment = old_fragments.item().unwrap();
+ let fragment_end = old_fragments.end(&None).visible;
+ let mut intersection = fragment.clone();
+ let intersection_end = cmp::min(range.end, fragment_end);
+ if fragment.visible {
+ intersection.len = intersection_end - fragment_start;
+ intersection.insertion_offset += fragment_start - old_fragments.start().visible;
+ intersection.id =
+ Locator::between(&new_fragments.summary().max_id, &intersection.id);
+ intersection.deletions.insert(timestamp);
+ intersection.visible = false;
+ }
+ if intersection.len > 0 {
+ if fragment.visible && !intersection.visible {
+ let new_start = new_fragments.summary().text.visible;
+ edits_patch.push(Edit {
+ old: fragment_start..intersection_end,
+ new: new_start..new_start,
+ });
+ insertion_slices.push(intersection.insertion_slice());
+ }
+ new_insertions.push(InsertionFragment::insert_new(&intersection));
+ new_ropes.push_fragment(&intersection, fragment.visible);
+ new_fragments.push(intersection, &None);
+ fragment_start = intersection_end;
+ }
+ if fragment_end <= range.end {
+ old_fragments.next(&None);
+ }
+ }
+
+ let full_range_end = FullOffset(range.end + old_fragments.start().deleted);
+ edit_op.ranges.push(full_range_start..full_range_end);
+ edit_op.new_text.push(new_text);
+ }
+
+ // If the current fragment has been partially consumed, then consume the rest of it
+ // and advance to the next fragment before slicing.
+ if fragment_start > old_fragments.start().visible {
+ let fragment_end = old_fragments.end(&None).visible;
+ if fragment_end > fragment_start {
+ let mut suffix = old_fragments.item().unwrap().clone();
+ suffix.len = fragment_end - fragment_start;
+ suffix.insertion_offset += fragment_start - old_fragments.start().visible;
+ new_insertions.push(InsertionFragment::insert_new(&suffix));
+ new_ropes.push_fragment(&suffix, suffix.visible);
+ new_fragments.push(suffix, &None);
+ }
+ old_fragments.next(&None);
+ }
+
+ let suffix = old_fragments.suffix(&None);
+ new_ropes.append(suffix.summary().text);
+ new_fragments.append(suffix, &None);
+ let (visible_text, deleted_text) = new_ropes.finish();
+ drop(old_fragments);
+
+ self.snapshot.fragments = new_fragments;
+ self.snapshot.insertions.edit(new_insertions, &());
+ self.snapshot.visible_text = visible_text;
+ self.snapshot.deleted_text = deleted_text;
+ self.subscriptions.publish_mut(&edits_patch);
+ self.history
+ .insertion_slices
+ .insert(timestamp, insertion_slices);
+ edit_op
+ }
+
+ pub fn set_line_ending(&mut self, line_ending: LineEnding) {
+ self.snapshot.line_ending = line_ending;
+ }
+
+ pub fn apply_ops<I: IntoIterator<Item = Operation>>(&mut self, ops: I) -> Result<()> {
+ let mut deferred_ops = Vec::new();
+ for op in ops {
+ self.history.push(op.clone());
+ if self.can_apply_op(&op) {
+ self.apply_op(op)?;
+ } else {
+ self.deferred_replicas.insert(op.replica_id());
+ deferred_ops.push(op);
+ }
+ }
+ self.deferred_ops.insert(deferred_ops);
+ self.flush_deferred_ops()?;
+ Ok(())
+ }
+
+ fn apply_op(&mut self, op: Operation) -> Result<()> {
+ match op {
+ Operation::Edit(edit) => {
+ if !self.version.observed(edit.timestamp) {
+ self.apply_remote_edit(
+ &edit.version,
+ &edit.ranges,
+ &edit.new_text,
+ edit.timestamp,
+ );
+ self.snapshot.version.observe(edit.timestamp);
+ self.lamport_clock.observe(edit.timestamp);
+ self.resolve_edit(edit.timestamp);
+ }
+ }
+ Operation::Undo(undo) => {
+ if !self.version.observed(undo.timestamp) {
+ self.apply_undo(&undo)?;
+ self.snapshot.version.observe(undo.timestamp);
+ self.lamport_clock.observe(undo.timestamp);
+ }
+ }
+ }
+ self.wait_for_version_txs.retain_mut(|(version, tx)| {
+ if self.snapshot.version().observed_all(version) {
+ tx.try_send(()).ok();
+ false
+ } else {
+ true
+ }
+ });
+ Ok(())
+ }
+
+ fn apply_remote_edit(
+ &mut self,
+ version: &clock::Global,
+ ranges: &[Range<FullOffset>],
+ new_text: &[Arc<str>],
+ timestamp: clock::Lamport,
+ ) {
+ if ranges.is_empty() {
+ return;
+ }
+
+ let edits = ranges.iter().zip(new_text.iter());
+ let mut edits_patch = Patch::default();
+ let mut insertion_slices = Vec::new();
+ let cx = Some(version.clone());
+ let mut new_insertions = Vec::new();
+ let mut insertion_offset = 0;
+ let mut new_ropes =
+ RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
+ let mut old_fragments = self.fragments.cursor::<(VersionedFullOffset, usize)>();
+ let mut new_fragments = old_fragments.slice(
+ &VersionedFullOffset::Offset(ranges[0].start),
+ Bias::Left,
+ &cx,
+ );
+ new_ropes.append(new_fragments.summary().text);
+
+ let mut fragment_start = old_fragments.start().0.full_offset();
+ for (range, new_text) in edits {
+ let fragment_end = old_fragments.end(&cx).0.full_offset();
+
+ // If the current fragment ends before this range, then jump ahead to the first fragment
+ // that extends past the start of this range, reusing any intervening fragments.
+ if fragment_end < range.start {
+ // If the current fragment has been partially consumed, then consume the rest of it
+ // and advance to the next fragment before slicing.
+ if fragment_start > old_fragments.start().0.full_offset() {
+ if fragment_end > fragment_start {
+ let mut suffix = old_fragments.item().unwrap().clone();
+ suffix.len = fragment_end.0 - fragment_start.0;
+ suffix.insertion_offset +=
+ fragment_start - old_fragments.start().0.full_offset();
+ new_insertions.push(InsertionFragment::insert_new(&suffix));
+ new_ropes.push_fragment(&suffix, suffix.visible);
+ new_fragments.push(suffix, &None);
+ }
+ old_fragments.next(&cx);
+ }
+
+ let slice =
+ old_fragments.slice(&VersionedFullOffset::Offset(range.start), Bias::Left, &cx);
+ new_ropes.append(slice.summary().text);
+ new_fragments.append(slice, &None);
+ fragment_start = old_fragments.start().0.full_offset();
+ }
+
+ // If we are at the end of a non-concurrent fragment, advance to the next one.
+ let fragment_end = old_fragments.end(&cx).0.full_offset();
+ if fragment_end == range.start && fragment_end > fragment_start {
+ let mut fragment = old_fragments.item().unwrap().clone();
+ fragment.len = fragment_end.0 - fragment_start.0;
+ fragment.insertion_offset += fragment_start - old_fragments.start().0.full_offset();
+ new_insertions.push(InsertionFragment::insert_new(&fragment));
+ new_ropes.push_fragment(&fragment, fragment.visible);
+ new_fragments.push(fragment, &None);
+ old_fragments.next(&cx);
+ fragment_start = old_fragments.start().0.full_offset();
+ }
+
+ // Skip over insertions that are concurrent to this edit, but have a lower lamport
+ // timestamp.
+ while let Some(fragment) = old_fragments.item() {
+ if fragment_start == range.start && fragment.timestamp > timestamp {
+ new_ropes.push_fragment(fragment, fragment.visible);
+ new_fragments.push(fragment.clone(), &None);
+ old_fragments.next(&cx);
+ debug_assert_eq!(fragment_start, range.start);
+ } else {
+ break;
+ }
+ }
+ debug_assert!(fragment_start <= range.start);
+
+ // Preserve any portion of the current fragment that precedes this range.
+ if fragment_start < range.start {
+ let mut prefix = old_fragments.item().unwrap().clone();
+ prefix.len = range.start.0 - fragment_start.0;
+ prefix.insertion_offset += fragment_start - old_fragments.start().0.full_offset();
+ prefix.id = Locator::between(&new_fragments.summary().max_id, &prefix.id);
+ new_insertions.push(InsertionFragment::insert_new(&prefix));
+ fragment_start = range.start;
+ new_ropes.push_fragment(&prefix, prefix.visible);
+ new_fragments.push(prefix, &None);
+ }
+
+ // Insert the new text before any existing fragments within the range.
+ if !new_text.is_empty() {
+ let mut old_start = old_fragments.start().1;
+ if old_fragments.item().map_or(false, |f| f.visible) {
+ old_start += fragment_start.0 - old_fragments.start().0.full_offset().0;
+ }
+ let new_start = new_fragments.summary().text.visible;
+ let fragment = Fragment {
+ id: Locator::between(
+ &new_fragments.summary().max_id,
+ old_fragments
+ .item()
+ .map_or(&Locator::max(), |old_fragment| &old_fragment.id),
+ ),
+ timestamp,
+ insertion_offset,
+ len: new_text.len(),
+ deletions: Default::default(),
+ max_undos: Default::default(),
+ visible: true,
+ };
+ edits_patch.push(Edit {
+ old: old_start..old_start,
+ new: new_start..new_start + new_text.len(),
+ });
+ insertion_slices.push(fragment.insertion_slice());
+ new_insertions.push(InsertionFragment::insert_new(&fragment));
+ new_ropes.push_str(new_text);
+ new_fragments.push(fragment, &None);
+ insertion_offset += new_text.len();
+ }
+
+ // Advance through every fragment that intersects this range, marking the intersecting
+ // portions as deleted.
+ while fragment_start < range.end {
+ let fragment = old_fragments.item().unwrap();
+ let fragment_end = old_fragments.end(&cx).0.full_offset();
+ let mut intersection = fragment.clone();
+ let intersection_end = cmp::min(range.end, fragment_end);
+ if fragment.was_visible(version, &self.undo_map) {
+ intersection.len = intersection_end.0 - fragment_start.0;
+ intersection.insertion_offset +=
+ fragment_start - old_fragments.start().0.full_offset();
+ intersection.id =
+ Locator::between(&new_fragments.summary().max_id, &intersection.id);
+ intersection.deletions.insert(timestamp);
+ intersection.visible = false;
+ insertion_slices.push(intersection.insertion_slice());
+ }
+ if intersection.len > 0 {
+ if fragment.visible && !intersection.visible {
+ let old_start = old_fragments.start().1
+ + (fragment_start.0 - old_fragments.start().0.full_offset().0);
+ let new_start = new_fragments.summary().text.visible;
+ edits_patch.push(Edit {
+ old: old_start..old_start + intersection.len,
+ new: new_start..new_start,
+ });
+ }
+ new_insertions.push(InsertionFragment::insert_new(&intersection));
+ new_ropes.push_fragment(&intersection, fragment.visible);
+ new_fragments.push(intersection, &None);
+ fragment_start = intersection_end;
+ }
+ if fragment_end <= range.end {
+ old_fragments.next(&cx);
+ }
+ }
+ }
+
+ // If the current fragment has been partially consumed, then consume the rest of it
+ // and advance to the next fragment before slicing.
+ if fragment_start > old_fragments.start().0.full_offset() {
+ let fragment_end = old_fragments.end(&cx).0.full_offset();
+ if fragment_end > fragment_start {
+ let mut suffix = old_fragments.item().unwrap().clone();
+ suffix.len = fragment_end.0 - fragment_start.0;
+ suffix.insertion_offset += fragment_start - old_fragments.start().0.full_offset();
+ new_insertions.push(InsertionFragment::insert_new(&suffix));
+ new_ropes.push_fragment(&suffix, suffix.visible);
+ new_fragments.push(suffix, &None);
+ }
+ old_fragments.next(&cx);
+ }
+
+ let suffix = old_fragments.suffix(&cx);
+ new_ropes.append(suffix.summary().text);
+ new_fragments.append(suffix, &None);
+ let (visible_text, deleted_text) = new_ropes.finish();
+ drop(old_fragments);
+
+ self.snapshot.fragments = new_fragments;
+ self.snapshot.visible_text = visible_text;
+ self.snapshot.deleted_text = deleted_text;
+ self.snapshot.insertions.edit(new_insertions, &());
+ self.history
+ .insertion_slices
+ .insert(timestamp, insertion_slices);
+ self.subscriptions.publish_mut(&edits_patch)
+ }
+
+ fn fragment_ids_for_edits<'a>(
+ &'a self,
+ edit_ids: impl Iterator<Item = &'a clock::Lamport>,
+ ) -> Vec<&'a Locator> {
+ // Get all of the insertion slices changed by the given edits.
+ let mut insertion_slices = Vec::new();
+ for edit_id in edit_ids {
+ if let Some(slices) = self.history.insertion_slices.get(edit_id) {
+ insertion_slices.extend_from_slice(slices)
+ }
+ }
+ insertion_slices
+ .sort_unstable_by_key(|s| (s.insertion_id, s.range.start, Reverse(s.range.end)));
+
+ // Get all of the fragments corresponding to these insertion slices.
+ let mut fragment_ids = Vec::new();
+ let mut insertions_cursor = self.insertions.cursor::<InsertionFragmentKey>();
+ for insertion_slice in &insertion_slices {
+ if insertion_slice.insertion_id != insertions_cursor.start().timestamp
+ || insertion_slice.range.start > insertions_cursor.start().split_offset
+ {
+ insertions_cursor.seek_forward(
+ &InsertionFragmentKey {
+ timestamp: insertion_slice.insertion_id,
+ split_offset: insertion_slice.range.start,
+ },
+ Bias::Left,
+ &(),
+ );
+ }
+ while let Some(item) = insertions_cursor.item() {
+ if item.timestamp != insertion_slice.insertion_id
+ || item.split_offset >= insertion_slice.range.end
+ {
+ break;
+ }
+ fragment_ids.push(&item.fragment_id);
+ insertions_cursor.next(&());
+ }
+ }
+ fragment_ids.sort_unstable();
+ fragment_ids
+ }
+
+ fn apply_undo(&mut self, undo: &UndoOperation) -> Result<()> {
+ self.snapshot.undo_map.insert(undo);
+
+ let mut edits = Patch::default();
+ let mut old_fragments = self.fragments.cursor::<(Option<&Locator>, usize)>();
+ let mut new_fragments = SumTree::new();
+ let mut new_ropes =
+ RopeBuilder::new(self.visible_text.cursor(0), self.deleted_text.cursor(0));
+
+ for fragment_id in self.fragment_ids_for_edits(undo.counts.keys()) {
+ let preceding_fragments = old_fragments.slice(&Some(fragment_id), Bias::Left, &None);
+ new_ropes.append(preceding_fragments.summary().text);
+ new_fragments.append(preceding_fragments, &None);
+
+ if let Some(fragment) = old_fragments.item() {
+ let mut fragment = fragment.clone();
+ let fragment_was_visible = fragment.visible;
+
+ fragment.visible = fragment.is_visible(&self.undo_map);
+ fragment.max_undos.observe(undo.timestamp);
+
+ let old_start = old_fragments.start().1;
+ let new_start = new_fragments.summary().text.visible;
+ if fragment_was_visible && !fragment.visible {
+ edits.push(Edit {
+ old: old_start..old_start + fragment.len,
+ new: new_start..new_start,
+ });
+ } else if !fragment_was_visible && fragment.visible {
+ edits.push(Edit {
+ old: old_start..old_start,
+ new: new_start..new_start + fragment.len,
+ });
+ }
+ new_ropes.push_fragment(&fragment, fragment_was_visible);
+ new_fragments.push(fragment, &None);
+
+ old_fragments.next(&None);
+ }
+ }
+
+ let suffix = old_fragments.suffix(&None);
+ new_ropes.append(suffix.summary().text);
+ new_fragments.append(suffix, &None);
+
+ drop(old_fragments);
+ let (visible_text, deleted_text) = new_ropes.finish();
+ self.snapshot.fragments = new_fragments;
+ self.snapshot.visible_text = visible_text;
+ self.snapshot.deleted_text = deleted_text;
+ self.subscriptions.publish_mut(&edits);
+ Ok(())
+ }
+
+ fn flush_deferred_ops(&mut self) -> Result<()> {
+ self.deferred_replicas.clear();
+ let mut deferred_ops = Vec::new();
+ for op in self.deferred_ops.drain().iter().cloned() {
+ if self.can_apply_op(&op) {
+ self.apply_op(op)?;
+ } else {
+ self.deferred_replicas.insert(op.replica_id());
+ deferred_ops.push(op);
+ }
+ }
+ self.deferred_ops.insert(deferred_ops);
+ Ok(())
+ }
+
+ fn can_apply_op(&self, op: &Operation) -> bool {
+ if self.deferred_replicas.contains(&op.replica_id()) {
+ false
+ } else {
+ self.version.observed_all(match op {
+ Operation::Edit(edit) => &edit.version,
+ Operation::Undo(undo) => &undo.version,
+ })
+ }
+ }
+
+ pub fn peek_undo_stack(&self) -> Option<&HistoryEntry> {
+ self.history.undo_stack.last()
+ }
+
+ pub fn peek_redo_stack(&self) -> Option<&HistoryEntry> {
+ self.history.redo_stack.last()
+ }
+
+ pub fn start_transaction(&mut self) -> Option<TransactionId> {
+ self.start_transaction_at(Instant::now())
+ }
+
+ pub fn start_transaction_at(&mut self, now: Instant) -> Option<TransactionId> {
+ self.history
+ .start_transaction(self.version.clone(), now, &mut self.lamport_clock)
+ }
+
+ pub fn end_transaction(&mut self) -> Option<(TransactionId, clock::Global)> {
+ self.end_transaction_at(Instant::now())
+ }
+
+ pub fn end_transaction_at(&mut self, now: Instant) -> Option<(TransactionId, clock::Global)> {
+ if let Some(entry) = self.history.end_transaction(now) {
+ let since = entry.transaction.start.clone();
+ let id = self.history.group().unwrap();
+ Some((id, since))
+ } else {
+ None
+ }
+ }
+
+ pub fn finalize_last_transaction(&mut self) -> Option<&Transaction> {
+ self.history.finalize_last_transaction()
+ }
+
+ pub fn group_until_transaction(&mut self, transaction_id: TransactionId) {
+ self.history.group_until(transaction_id);
+ }
+
+ pub fn base_text(&self) -> &Rope {
+ &self.history.base_text
+ }
+
+ pub fn operations(&self) -> &TreeMap<clock::Lamport, Operation> {
+ &self.history.operations
+ }
+
+ pub fn undo(&mut self) -> Option<(TransactionId, Operation)> {
+ if let Some(entry) = self.history.pop_undo() {
+ let transaction = entry.transaction.clone();
+ let transaction_id = transaction.id;
+ let op = self.undo_or_redo(transaction).unwrap();
+ Some((transaction_id, op))
+ } else {
+ None
+ }
+ }
+
+ pub fn undo_transaction(&mut self, transaction_id: TransactionId) -> Option<Operation> {
+ let transaction = self
+ .history
+ .remove_from_undo(transaction_id)?
+ .transaction
+ .clone();
+ self.undo_or_redo(transaction).log_err()
+ }
+
+ #[allow(clippy::needless_collect)]
+ pub fn undo_to_transaction(&mut self, transaction_id: TransactionId) -> Vec<Operation> {
+ let transactions = self
+ .history
+ .remove_from_undo_until(transaction_id)
+ .iter()
+ .map(|entry| entry.transaction.clone())
+ .collect::<Vec<_>>();
+
+ transactions
+ .into_iter()
+ .map(|transaction| self.undo_or_redo(transaction).unwrap())
+ .collect()
+ }
+
+ pub fn forget_transaction(&mut self, transaction_id: TransactionId) {
+ self.history.forget(transaction_id);
+ }
+
+ pub fn merge_transactions(&mut self, transaction: TransactionId, destination: TransactionId) {
+ self.history.merge_transactions(transaction, destination);
+ }
+
+ pub fn redo(&mut self) -> Option<(TransactionId, Operation)> {
+ if let Some(entry) = self.history.pop_redo() {
+ let transaction = entry.transaction.clone();
+ let transaction_id = transaction.id;
+ let op = self.undo_or_redo(transaction).unwrap();
+ Some((transaction_id, op))
+ } else {
+ None
+ }
+ }
+
+ #[allow(clippy::needless_collect)]
+ pub fn redo_to_transaction(&mut self, transaction_id: TransactionId) -> Vec<Operation> {
+ let transactions = self
+ .history
+ .remove_from_redo(transaction_id)
+ .iter()
+ .map(|entry| entry.transaction.clone())
+ .collect::<Vec<_>>();
+
+ transactions
+ .into_iter()
+ .map(|transaction| self.undo_or_redo(transaction).unwrap())
+ .collect()
+ }
+
+ fn undo_or_redo(&mut self, transaction: Transaction) -> Result<Operation> {
+ let mut counts = HashMap::default();
+ for edit_id in transaction.edit_ids {
+ counts.insert(edit_id, self.undo_map.undo_count(edit_id) + 1);
+ }
+
+ let undo = UndoOperation {
+ timestamp: self.lamport_clock.tick(),
+ version: self.version(),
+ counts,
+ };
+ self.apply_undo(&undo)?;
+ self.snapshot.version.observe(undo.timestamp);
+ let operation = Operation::Undo(undo);
+ self.history.push(operation.clone());
+ Ok(operation)
+ }
+
+ pub fn push_transaction(&mut self, transaction: Transaction, now: Instant) {
+ self.history.push_transaction(transaction, now);
+ self.history.finalize_last_transaction();
+ }
+
+ pub fn edited_ranges_for_transaction<'a, D>(
+ &'a self,
+ transaction: &'a Transaction,
+ ) -> impl 'a + Iterator<Item = Range<D>>
+ where
+ D: TextDimension,
+ {
+ // get fragment ranges
+ let mut cursor = self.fragments.cursor::<(Option<&Locator>, usize)>();
+ let offset_ranges = self
+ .fragment_ids_for_edits(transaction.edit_ids.iter())
+ .into_iter()
+ .filter_map(move |fragment_id| {
+ cursor.seek_forward(&Some(fragment_id), Bias::Left, &None);
+ let fragment = cursor.item()?;
+ let start_offset = cursor.start().1;
+ let end_offset = start_offset + if fragment.visible { fragment.len } else { 0 };
+ Some(start_offset..end_offset)
+ });
+
+ // combine adjacent ranges
+ let mut prev_range: Option<Range<usize>> = None;
+ let disjoint_ranges = offset_ranges
+ .map(Some)
+ .chain([None])
+ .filter_map(move |range| {
+ if let Some((range, prev_range)) = range.as_ref().zip(prev_range.as_mut()) {
+ if prev_range.end == range.start {
+ prev_range.end = range.end;
+ return None;
+ }
+ }
+ let result = prev_range.clone();
+ prev_range = range;
+ result
+ });
+
+ // convert to the desired text dimension.
+ let mut position = D::default();
+ let mut rope_cursor = self.visible_text.cursor(0);
+ disjoint_ranges.map(move |range| {
+ position.add_assign(&rope_cursor.summary(range.start));
+ let start = position.clone();
+ position.add_assign(&rope_cursor.summary(range.end));
+ let end = position.clone();
+ start..end
+ })
+ }
+
+ pub fn subscribe(&mut self) -> Subscription {
+ self.subscriptions.subscribe()
+ }
+
+ pub fn wait_for_edits(
+ &mut self,
+ edit_ids: impl IntoIterator<Item = clock::Lamport>,
+ ) -> impl 'static + Future<Output = Result<()>> {
+ let mut futures = Vec::new();
+ for edit_id in edit_ids {
+ if !self.version.observed(edit_id) {
+ let (tx, rx) = oneshot::channel();
+ self.edit_id_resolvers.entry(edit_id).or_default().push(tx);
+ futures.push(rx);
+ }
+ }
+
+ async move {
+ for mut future in futures {
+ if future.recv().await.is_none() {
+ Err(anyhow!("gave up waiting for edits"))?;
+ }
+ }
+ Ok(())
+ }
+ }
+
+ pub fn wait_for_anchors(
+ &mut self,
+ anchors: impl IntoIterator<Item = Anchor>,
+ ) -> impl 'static + Future<Output = Result<()>> {
+ let mut futures = Vec::new();
+ for anchor in anchors {
+ if !self.version.observed(anchor.timestamp)
+ && anchor != Anchor::MAX
+ && anchor != Anchor::MIN
+ {
+ let (tx, rx) = oneshot::channel();
+ self.edit_id_resolvers
+ .entry(anchor.timestamp)
+ .or_default()
+ .push(tx);
+ futures.push(rx);
+ }
+ }
+
+ async move {
+ for mut future in futures {
+ if future.recv().await.is_none() {
+ Err(anyhow!("gave up waiting for anchors"))?;
+ }
+ }
+ Ok(())
+ }
+ }
+
+ pub fn wait_for_version(&mut self, version: clock::Global) -> impl Future<Output = Result<()>> {
+ let mut rx = None;
+ if !self.snapshot.version.observed_all(&version) {
+ let channel = oneshot::channel();
+ self.wait_for_version_txs.push((version, channel.0));
+ rx = Some(channel.1);
+ }
+ async move {
+ if let Some(mut rx) = rx {
+ if rx.recv().await.is_none() {
+ Err(anyhow!("gave up waiting for version"))?;
+ }
+ }
+ Ok(())
+ }
+ }
+
+ pub fn give_up_waiting(&mut self) {
+ self.edit_id_resolvers.clear();
+ self.wait_for_version_txs.clear();
+ }
+
+ fn resolve_edit(&mut self, edit_id: clock::Lamport) {
+ for mut tx in self
+ .edit_id_resolvers
+ .remove(&edit_id)
+ .into_iter()
+ .flatten()
+ {
+ tx.try_send(()).ok();
+ }
+ }
+}
+
+#[cfg(any(test, feature = "test-support"))]
+impl Buffer {
+ pub fn edit_via_marked_text(&mut self, marked_string: &str) {
+ let edits = self.edits_for_marked_text(marked_string);
+ self.edit(edits);
+ }
+
+ pub fn edits_for_marked_text(&self, marked_string: &str) -> Vec<(Range<usize>, String)> {
+ let old_text = self.text();
+ let (new_text, mut ranges) = util::test::marked_text_ranges(marked_string, false);
+ if ranges.is_empty() {
+ ranges.push(0..new_text.len());
+ }
+
+ assert_eq!(
+ old_text[..ranges[0].start],
+ new_text[..ranges[0].start],
+ "invalid edit"
+ );
+
+ let mut delta = 0;
+ let mut edits = Vec::new();
+ let mut ranges = ranges.into_iter().peekable();
+
+ while let Some(inserted_range) = ranges.next() {
+ let new_start = inserted_range.start;
+ let old_start = (new_start as isize - delta) as usize;
+
+ let following_text = if let Some(next_range) = ranges.peek() {
+ &new_text[inserted_range.end..next_range.start]
+ } else {
+ &new_text[inserted_range.end..]
+ };
+
+ let inserted_len = inserted_range.len();
+ let deleted_len = old_text[old_start..]
+ .find(following_text)
+ .expect("invalid edit");
+
+ let old_range = old_start..old_start + deleted_len;
+ edits.push((old_range, new_text[inserted_range].to_string()));
+ delta += inserted_len as isize - deleted_len as isize;
+ }
+
+ assert_eq!(
+ old_text.len() as isize + delta,
+ new_text.len() as isize,
+ "invalid edit"
+ );
+
+ edits
+ }
+
+ pub fn check_invariants(&self) {
+ // Ensure every fragment is ordered by locator in the fragment tree and corresponds
+ // to an insertion fragment in the insertions tree.
+ let mut prev_fragment_id = Locator::min();
+ for fragment in self.snapshot.fragments.items(&None) {
+ assert!(fragment.id > prev_fragment_id);
+ prev_fragment_id = fragment.id.clone();
+
+ let insertion_fragment = self
+ .snapshot
+ .insertions
+ .get(
+ &InsertionFragmentKey {
+ timestamp: fragment.timestamp,
+ split_offset: fragment.insertion_offset,
+ },
+ &(),
+ )
+ .unwrap();
+ assert_eq!(
+ insertion_fragment.fragment_id, fragment.id,
+ "fragment: {:?}\ninsertion: {:?}",
+ fragment, insertion_fragment
+ );
+ }
+
+ let mut cursor = self.snapshot.fragments.cursor::<Option<&Locator>>();
+ for insertion_fragment in self.snapshot.insertions.cursor::<()>() {
+ cursor.seek(&Some(&insertion_fragment.fragment_id), Bias::Left, &None);
+ let fragment = cursor.item().unwrap();
+ assert_eq!(insertion_fragment.fragment_id, fragment.id);
+ assert_eq!(insertion_fragment.split_offset, fragment.insertion_offset);
+ }
+
+ let fragment_summary = self.snapshot.fragments.summary();
+ assert_eq!(
+ fragment_summary.text.visible,
+ self.snapshot.visible_text.len()
+ );
+ assert_eq!(
+ fragment_summary.text.deleted,
+ self.snapshot.deleted_text.len()
+ );
+
+ assert!(!self.text().contains("\r\n"));
+ }
+
+ pub fn set_group_interval(&mut self, group_interval: Duration) {
+ self.history.group_interval = group_interval;
+ }
+
+ pub fn random_byte_range(&self, start_offset: usize, rng: &mut impl rand::Rng) -> Range<usize> {
+ let end = self.clip_offset(rng.gen_range(start_offset..=self.len()), Bias::Right);
+ let start = self.clip_offset(rng.gen_range(start_offset..=end), Bias::Right);
+ start..end
+ }
+
+ pub fn get_random_edits<T>(
+ &self,
+ rng: &mut T,
+ edit_count: usize,
+ ) -> Vec<(Range<usize>, Arc<str>)>
+ where
+ T: rand::Rng,
+ {
+ let mut edits: Vec<(Range<usize>, Arc<str>)> = Vec::new();
+ let mut last_end = None;
+ for _ in 0..edit_count {
+ if last_end.map_or(false, |last_end| last_end >= self.len()) {
+ break;
+ }
+ let new_start = last_end.map_or(0, |last_end| last_end + 1);
+ let range = self.random_byte_range(new_start, rng);
+ last_end = Some(range.end);
+
+ let new_text_len = rng.gen_range(0..10);
+ let new_text: String = RandomCharIter::new(&mut *rng).take(new_text_len).collect();
+
+ edits.push((range, new_text.into()));
+ }
+ edits
+ }
+
+ #[allow(clippy::type_complexity)]
+ pub fn randomly_edit<T>(
+ &mut self,
+ rng: &mut T,
+ edit_count: usize,
+ ) -> (Vec<(Range<usize>, Arc<str>)>, Operation)
+ where
+ T: rand::Rng,
+ {
+ let mut edits = self.get_random_edits(rng, edit_count);
+ log::info!("mutating buffer {} with {:?}", self.replica_id, edits);
+
+ let op = self.edit(edits.iter().cloned());
+ if let Operation::Edit(edit) = &op {
+ assert_eq!(edits.len(), edit.new_text.len());
+ for (edit, new_text) in edits.iter_mut().zip(&edit.new_text) {
+ edit.1 = new_text.clone();
+ }
+ } else {
+ unreachable!()
+ }
+
+ (edits, op)
+ }
+
+ pub fn randomly_undo_redo(&mut self, rng: &mut impl rand::Rng) -> Vec<Operation> {
+ use rand::prelude::*;
+
+ let mut ops = Vec::new();
+ for _ in 0..rng.gen_range(1..=5) {
+ if let Some(entry) = self.history.undo_stack.choose(rng) {
+ let transaction = entry.transaction.clone();
+ log::info!(
+ "undoing buffer {} transaction {:?}",
+ self.replica_id,
+ transaction
+ );
+ ops.push(self.undo_or_redo(transaction).unwrap());
+ }
+ }
+ ops
+ }
+}
+
+impl Deref for Buffer {
+ type Target = BufferSnapshot;
+
+ fn deref(&self) -> &Self::Target {
+ &self.snapshot
+ }
+}
+
+impl BufferSnapshot {
+ pub fn as_rope(&self) -> &Rope {
+ &self.visible_text
+ }
+
+ pub fn remote_id(&self) -> u64 {
+ self.remote_id
+ }
+
+ pub fn replica_id(&self) -> ReplicaId {
+ self.replica_id
+ }
+
+ pub fn row_count(&self) -> u32 {
+ self.max_point().row + 1
+ }
+
+ pub fn len(&self) -> usize {
+ self.visible_text.len()
+ }
+
+ pub fn is_empty(&self) -> bool {
+ self.len() == 0
+ }
+
+ pub fn chars(&self) -> impl Iterator<Item = char> + '_ {
+ self.chars_at(0)
+ }
+
+ pub fn chars_for_range<T: ToOffset>(&self, range: Range<T>) -> impl Iterator<Item = char> + '_ {
+ self.text_for_range(range).flat_map(str::chars)
+ }
+
+ pub fn reversed_chars_for_range<T: ToOffset>(
+ &self,
+ range: Range<T>,
+ ) -> impl Iterator<Item = char> + '_ {
+ self.reversed_chunks_in_range(range)
+ .flat_map(|chunk| chunk.chars().rev())
+ }
+
+ pub fn contains_str_at<T>(&self, position: T, needle: &str) -> bool
+ where
+ T: ToOffset,
+ {
+ let position = position.to_offset(self);
+ position == self.clip_offset(position, Bias::Left)
+ && self
+ .bytes_in_range(position..self.len())
+ .flatten()
+ .copied()
+ .take(needle.len())
+ .eq(needle.bytes())
+ }
+
+ pub fn common_prefix_at<T>(&self, position: T, needle: &str) -> Range<T>
+ where
+ T: ToOffset + TextDimension,
+ {
+ let offset = position.to_offset(self);
+ let common_prefix_len = needle
+ .char_indices()
+ .map(|(index, _)| index)
+ .chain([needle.len()])
+ .take_while(|&len| len <= offset)
+ .filter(|&len| {
+ let left = self
+ .chars_for_range(offset - len..offset)
+ .flat_map(char::to_lowercase);
+ let right = needle[..len].chars().flat_map(char::to_lowercase);
+ left.eq(right)
+ })
+ .last()
+ .unwrap_or(0);
+ let start_offset = offset - common_prefix_len;
+ let start = self.text_summary_for_range(0..start_offset);
+ start..position
+ }
+
+ pub fn text(&self) -> String {
+ self.visible_text.to_string()
+ }
+
+ pub fn line_ending(&self) -> LineEnding {
+ self.line_ending
+ }
+
+ pub fn deleted_text(&self) -> String {
+ self.deleted_text.to_string()
+ }
+
+ pub fn fragments(&self) -> impl Iterator<Item = &Fragment> {
+ self.fragments.iter()
+ }
+
+ pub fn text_summary(&self) -> TextSummary {
+ self.visible_text.summary()
+ }
+
+ pub fn max_point(&self) -> Point {
+ self.visible_text.max_point()
+ }
+
+ pub fn max_point_utf16(&self) -> PointUtf16 {
+ self.visible_text.max_point_utf16()
+ }
+
+ pub fn point_to_offset(&self, point: Point) -> usize {
+ self.visible_text.point_to_offset(point)
+ }
+
+ pub fn point_utf16_to_offset(&self, point: PointUtf16) -> usize {
+ self.visible_text.point_utf16_to_offset(point)
+ }
+
+ pub fn unclipped_point_utf16_to_offset(&self, point: Unclipped<PointUtf16>) -> usize {
+ self.visible_text.unclipped_point_utf16_to_offset(point)
+ }
+
+ pub fn unclipped_point_utf16_to_point(&self, point: Unclipped<PointUtf16>) -> Point {
+ self.visible_text.unclipped_point_utf16_to_point(point)
+ }
+
+ pub fn offset_utf16_to_offset(&self, offset: OffsetUtf16) -> usize {
+ self.visible_text.offset_utf16_to_offset(offset)
+ }
+
+ pub fn offset_to_offset_utf16(&self, offset: usize) -> OffsetUtf16 {
+ self.visible_text.offset_to_offset_utf16(offset)
+ }
+
+ pub fn offset_to_point(&self, offset: usize) -> Point {
+ self.visible_text.offset_to_point(offset)
+ }
+
+ pub fn offset_to_point_utf16(&self, offset: usize) -> PointUtf16 {
+ self.visible_text.offset_to_point_utf16(offset)
+ }
+
+ pub fn point_to_point_utf16(&self, point: Point) -> PointUtf16 {
+ self.visible_text.point_to_point_utf16(point)
+ }
+
+ pub fn version(&self) -> &clock::Global {
+ &self.version
+ }
+
+ pub fn chars_at<T: ToOffset>(&self, position: T) -> impl Iterator<Item = char> + '_ {
+ let offset = position.to_offset(self);
+ self.visible_text.chars_at(offset)
+ }
+
+ pub fn reversed_chars_at<T: ToOffset>(&self, position: T) -> impl Iterator<Item = char> + '_ {
+ let offset = position.to_offset(self);
+ self.visible_text.reversed_chars_at(offset)
+ }
+
+ pub fn reversed_chunks_in_range<T: ToOffset>(&self, range: Range<T>) -> rope::Chunks {
+ let range = range.start.to_offset(self)..range.end.to_offset(self);
+ self.visible_text.reversed_chunks_in_range(range)
+ }
+
+ pub fn bytes_in_range<T: ToOffset>(&self, range: Range<T>) -> rope::Bytes<'_> {
+ let start = range.start.to_offset(self);
+ let end = range.end.to_offset(self);
+ self.visible_text.bytes_in_range(start..end)
+ }
+
+ pub fn reversed_bytes_in_range<T: ToOffset>(&self, range: Range<T>) -> rope::Bytes<'_> {
+ let start = range.start.to_offset(self);
+ let end = range.end.to_offset(self);
+ self.visible_text.reversed_bytes_in_range(start..end)
+ }
+
+ pub fn text_for_range<T: ToOffset>(&self, range: Range<T>) -> Chunks<'_> {
+ let start = range.start.to_offset(self);
+ let end = range.end.to_offset(self);
+ self.visible_text.chunks_in_range(start..end)
+ }
+
+ pub fn line_len(&self, row: u32) -> u32 {
+ let row_start_offset = Point::new(row, 0).to_offset(self);
+ let row_end_offset = if row >= self.max_point().row {
+ self.len()
+ } else {
+ Point::new(row + 1, 0).to_offset(self) - 1
+ };
+ (row_end_offset - row_start_offset) as u32
+ }
+
+ pub fn is_line_blank(&self, row: u32) -> bool {
+ self.text_for_range(Point::new(row, 0)..Point::new(row, self.line_len(row)))
+ .all(|chunk| chunk.matches(|c: char| !c.is_whitespace()).next().is_none())
+ }
+
+ pub fn text_summary_for_range<D, O: ToOffset>(&self, range: Range<O>) -> D
+ where
+ D: TextDimension,
+ {
+ self.visible_text
+ .cursor(range.start.to_offset(self))
+ .summary(range.end.to_offset(self))
+ }
+
+ pub fn summaries_for_anchors<'a, D, A>(&'a self, anchors: A) -> impl 'a + Iterator<Item = D>
+ where
+ D: 'a + TextDimension,
+ A: 'a + IntoIterator<Item = &'a Anchor>,
+ {
+ let anchors = anchors.into_iter();
+ self.summaries_for_anchors_with_payload::<D, _, ()>(anchors.map(|a| (a, ())))
+ .map(|d| d.0)
+ }
+
+ pub fn summaries_for_anchors_with_payload<'a, D, A, T>(
+ &'a self,
+ anchors: A,
+ ) -> impl 'a + Iterator<Item = (D, T)>
+ where
+ D: 'a + TextDimension,
+ A: 'a + IntoIterator<Item = (&'a Anchor, T)>,
+ {
+ let anchors = anchors.into_iter();
+ let mut insertion_cursor = self.insertions.cursor::<InsertionFragmentKey>();
+ let mut fragment_cursor = self.fragments.cursor::<(Option<&Locator>, usize)>();
+ let mut text_cursor = self.visible_text.cursor(0);
+ let mut position = D::default();
+
+ anchors.map(move |(anchor, payload)| {
+ if *anchor == Anchor::MIN {
+ return (D::default(), payload);
+ } else if *anchor == Anchor::MAX {
+ return (D::from_text_summary(&self.visible_text.summary()), payload);
+ }
+
+ let anchor_key = InsertionFragmentKey {
+ timestamp: anchor.timestamp,
+ split_offset: anchor.offset,
+ };
+ insertion_cursor.seek(&anchor_key, anchor.bias, &());
+ if let Some(insertion) = insertion_cursor.item() {
+ let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key);
+ if comparison == Ordering::Greater
+ || (anchor.bias == Bias::Left
+ && comparison == Ordering::Equal
+ && anchor.offset > 0)
+ {
+ insertion_cursor.prev(&());
+ }
+ } else {
+ insertion_cursor.prev(&());
+ }
+ let insertion = insertion_cursor.item().expect("invalid insertion");
+ assert_eq!(insertion.timestamp, anchor.timestamp, "invalid insertion");
+
+ fragment_cursor.seek_forward(&Some(&insertion.fragment_id), Bias::Left, &None);
+ let fragment = fragment_cursor.item().unwrap();
+ let mut fragment_offset = fragment_cursor.start().1;
+ if fragment.visible {
+ fragment_offset += anchor.offset - insertion.split_offset;
+ }
+
+ position.add_assign(&text_cursor.summary(fragment_offset));
+ (position.clone(), payload)
+ })
+ }
+
+ fn summary_for_anchor<D>(&self, anchor: &Anchor) -> D
+ where
+ D: TextDimension,
+ {
+ if *anchor == Anchor::MIN {
+ D::default()
+ } else if *anchor == Anchor::MAX {
+ D::from_text_summary(&self.visible_text.summary())
+ } else {
+ let anchor_key = InsertionFragmentKey {
+ timestamp: anchor.timestamp,
+ split_offset: anchor.offset,
+ };
+ let mut insertion_cursor = self.insertions.cursor::<InsertionFragmentKey>();
+ insertion_cursor.seek(&anchor_key, anchor.bias, &());
+ if let Some(insertion) = insertion_cursor.item() {
+ let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key);
+ if comparison == Ordering::Greater
+ || (anchor.bias == Bias::Left
+ && comparison == Ordering::Equal
+ && anchor.offset > 0)
+ {
+ insertion_cursor.prev(&());
+ }
+ } else {
+ insertion_cursor.prev(&());
+ }
+ let insertion = insertion_cursor.item().expect("invalid insertion");
+ assert_eq!(insertion.timestamp, anchor.timestamp, "invalid insertion");
+
+ let mut fragment_cursor = self.fragments.cursor::<(Option<&Locator>, usize)>();
+ fragment_cursor.seek(&Some(&insertion.fragment_id), Bias::Left, &None);
+ let fragment = fragment_cursor.item().unwrap();
+ let mut fragment_offset = fragment_cursor.start().1;
+ if fragment.visible {
+ fragment_offset += anchor.offset - insertion.split_offset;
+ }
+ self.text_summary_for_range(0..fragment_offset)
+ }
+ }
+
+ fn fragment_id_for_anchor(&self, anchor: &Anchor) -> &Locator {
+ if *anchor == Anchor::MIN {
+ Locator::min_ref()
+ } else if *anchor == Anchor::MAX {
+ Locator::max_ref()
+ } else {
+ let anchor_key = InsertionFragmentKey {
+ timestamp: anchor.timestamp,
+ split_offset: anchor.offset,
+ };
+ let mut insertion_cursor = self.insertions.cursor::<InsertionFragmentKey>();
+ insertion_cursor.seek(&anchor_key, anchor.bias, &());
+ if let Some(insertion) = insertion_cursor.item() {
+ let comparison = sum_tree::KeyedItem::key(insertion).cmp(&anchor_key);
+ if comparison == Ordering::Greater
+ || (anchor.bias == Bias::Left
+ && comparison == Ordering::Equal
+ && anchor.offset > 0)
+ {
+ insertion_cursor.prev(&());
+ }
+ } else {
+ insertion_cursor.prev(&());
+ }
+ let insertion = insertion_cursor.item().expect("invalid insertion");
+ debug_assert_eq!(insertion.timestamp, anchor.timestamp, "invalid insertion");
+ &insertion.fragment_id
+ }
+ }
+
+ pub fn anchor_before<T: ToOffset>(&self, position: T) -> Anchor {
+ self.anchor_at(position, Bias::Left)
+ }
+
+ pub fn anchor_after<T: ToOffset>(&self, position: T) -> Anchor {
+ self.anchor_at(position, Bias::Right)
+ }
+
+ pub fn anchor_at<T: ToOffset>(&self, position: T, bias: Bias) -> Anchor {
+ self.anchor_at_offset(position.to_offset(self), bias)
+ }
+
+ fn anchor_at_offset(&self, offset: usize, bias: Bias) -> Anchor {
+ if bias == Bias::Left && offset == 0 {
+ Anchor::MIN
+ } else if bias == Bias::Right && offset == self.len() {
+ Anchor::MAX
+ } else {
+ let mut fragment_cursor = self.fragments.cursor::<usize>();
+ fragment_cursor.seek(&offset, bias, &None);
+ let fragment = fragment_cursor.item().unwrap();
+ let overshoot = offset - *fragment_cursor.start();
+ Anchor {
+ timestamp: fragment.timestamp,
+ offset: fragment.insertion_offset + overshoot,
+ bias,
+ buffer_id: Some(self.remote_id),
+ }
+ }
+ }
+
+ pub fn can_resolve(&self, anchor: &Anchor) -> bool {
+ *anchor == Anchor::MIN
+ || *anchor == Anchor::MAX
+ || (Some(self.remote_id) == anchor.buffer_id && self.version.observed(anchor.timestamp))
+ }
+
+ pub fn clip_offset(&self, offset: usize, bias: Bias) -> usize {
+ self.visible_text.clip_offset(offset, bias)
+ }
+
+ pub fn clip_point(&self, point: Point, bias: Bias) -> Point {
+ self.visible_text.clip_point(point, bias)
+ }
+
+ pub fn clip_offset_utf16(&self, offset: OffsetUtf16, bias: Bias) -> OffsetUtf16 {
+ self.visible_text.clip_offset_utf16(offset, bias)
+ }
+
+ pub fn clip_point_utf16(&self, point: Unclipped<PointUtf16>, bias: Bias) -> PointUtf16 {
+ self.visible_text.clip_point_utf16(point, bias)
+ }
+
+ pub fn edits_since<'a, D>(
+ &'a self,
+ since: &'a clock::Global,
+ ) -> impl 'a + Iterator<Item = Edit<D>>
+ where
+ D: TextDimension + Ord,
+ {
+ self.edits_since_in_range(since, Anchor::MIN..Anchor::MAX)
+ }
+
+ pub fn anchored_edits_since<'a, D>(
+ &'a self,
+ since: &'a clock::Global,
+ ) -> impl 'a + Iterator<Item = (Edit<D>, Range<Anchor>)>
+ where
+ D: TextDimension + Ord,
+ {
+ self.anchored_edits_since_in_range(since, Anchor::MIN..Anchor::MAX)
+ }
+
+ pub fn edits_since_in_range<'a, D>(
+ &'a self,
+ since: &'a clock::Global,
+ range: Range<Anchor>,
+ ) -> impl 'a + Iterator<Item = Edit<D>>
+ where
+ D: TextDimension + Ord,
+ {
+ self.anchored_edits_since_in_range(since, range)
+ .map(|item| item.0)
+ }
+
+ pub fn anchored_edits_since_in_range<'a, D>(
+ &'a self,
+ since: &'a clock::Global,
+ range: Range<Anchor>,
+ ) -> impl 'a + Iterator<Item = (Edit<D>, Range<Anchor>)>
+ where
+ D: TextDimension + Ord,
+ {
+ let fragments_cursor = if *since == self.version {
+ None
+ } else {
+ let mut cursor = self
+ .fragments
+ .filter(move |summary| !since.observed_all(&summary.max_version));
+ cursor.next(&None);
+ Some(cursor)
+ };
+ let mut cursor = self
+ .fragments
+ .cursor::<(Option<&Locator>, FragmentTextSummary)>();
+
+ let start_fragment_id = self.fragment_id_for_anchor(&range.start);
+ cursor.seek(&Some(start_fragment_id), Bias::Left, &None);
+ let mut visible_start = cursor.start().1.visible;
+ let mut deleted_start = cursor.start().1.deleted;
+ if let Some(fragment) = cursor.item() {
+ let overshoot = range.start.offset - fragment.insertion_offset;
+ if fragment.visible {
+ visible_start += overshoot;
+ } else {
+ deleted_start += overshoot;
+ }
+ }
+ let end_fragment_id = self.fragment_id_for_anchor(&range.end);
+
+ Edits {
+ visible_cursor: self.visible_text.cursor(visible_start),
+ deleted_cursor: self.deleted_text.cursor(deleted_start),
+ fragments_cursor,
+ undos: &self.undo_map,
+ since,
+ old_end: Default::default(),
+ new_end: Default::default(),
+ range: (start_fragment_id, range.start.offset)..(end_fragment_id, range.end.offset),
+ buffer_id: self.remote_id,
+ }
+ }
+}
+
+struct RopeBuilder<'a> {
+ old_visible_cursor: rope::Cursor<'a>,
+ old_deleted_cursor: rope::Cursor<'a>,
+ new_visible: Rope,
+ new_deleted: Rope,
+}
+
+impl<'a> RopeBuilder<'a> {
+ fn new(old_visible_cursor: rope::Cursor<'a>, old_deleted_cursor: rope::Cursor<'a>) -> Self {
+ Self {
+ old_visible_cursor,
+ old_deleted_cursor,
+ new_visible: Rope::new(),
+ new_deleted: Rope::new(),
+ }
+ }
+
+ fn append(&mut self, len: FragmentTextSummary) {
+ self.push(len.visible, true, true);
+ self.push(len.deleted, false, false);
+ }
+
+ fn push_fragment(&mut self, fragment: &Fragment, was_visible: bool) {
+ debug_assert!(fragment.len > 0);
+ self.push(fragment.len, was_visible, fragment.visible)
+ }
+
+ fn push(&mut self, len: usize, was_visible: bool, is_visible: bool) {
+ let text = if was_visible {
+ self.old_visible_cursor
+ .slice(self.old_visible_cursor.offset() + len)
+ } else {
+ self.old_deleted_cursor
+ .slice(self.old_deleted_cursor.offset() + len)
+ };
+ if is_visible {
+ self.new_visible.append(text);
+ } else {
+ self.new_deleted.append(text);
+ }
+ }
+
+ fn push_str(&mut self, text: &str) {
+ self.new_visible.push(text);
+ }
+
+ fn finish(mut self) -> (Rope, Rope) {
+ self.new_visible.append(self.old_visible_cursor.suffix());
+ self.new_deleted.append(self.old_deleted_cursor.suffix());
+ (self.new_visible, self.new_deleted)
+ }
+}
+
+impl<'a, D: TextDimension + Ord, F: FnMut(&FragmentSummary) -> bool> Iterator for Edits<'a, D, F> {
+ type Item = (Edit<D>, Range<Anchor>);
+
+ fn next(&mut self) -> Option<Self::Item> {
+ let mut pending_edit: Option<Self::Item> = None;
+ let cursor = self.fragments_cursor.as_mut()?;
+
+ while let Some(fragment) = cursor.item() {
+ if fragment.id < *self.range.start.0 {
+ cursor.next(&None);
+ continue;
+ } else if fragment.id > *self.range.end.0 {
+ break;
+ }
+
+ if cursor.start().visible > self.visible_cursor.offset() {
+ let summary = self.visible_cursor.summary(cursor.start().visible);
+ self.old_end.add_assign(&summary);
+ self.new_end.add_assign(&summary);
+ }
+
+ if pending_edit
+ .as_ref()
+ .map_or(false, |(change, _)| change.new.end < self.new_end)
+ {
+ break;
+ }
+
+ let start_anchor = Anchor {
+ timestamp: fragment.timestamp,
+ offset: fragment.insertion_offset,
+ bias: Bias::Right,
+ buffer_id: Some(self.buffer_id),
+ };
+ let end_anchor = Anchor {
+ timestamp: fragment.timestamp,
+ offset: fragment.insertion_offset + fragment.len,
+ bias: Bias::Left,
+ buffer_id: Some(self.buffer_id),
+ };
+
+ if !fragment.was_visible(self.since, self.undos) && fragment.visible {
+ let mut visible_end = cursor.end(&None).visible;
+ if fragment.id == *self.range.end.0 {
+ visible_end = cmp::min(
+ visible_end,
+ cursor.start().visible + (self.range.end.1 - fragment.insertion_offset),
+ );
+ }
+
+ let fragment_summary = self.visible_cursor.summary(visible_end);
+ let mut new_end = self.new_end.clone();
+ new_end.add_assign(&fragment_summary);
+ if let Some((edit, range)) = pending_edit.as_mut() {
+ edit.new.end = new_end.clone();
+ range.end = end_anchor;
+ } else {
+ pending_edit = Some((
+ Edit {
+ old: self.old_end.clone()..self.old_end.clone(),
+ new: self.new_end.clone()..new_end.clone(),
+ },
+ start_anchor..end_anchor,
+ ));
+ }
+
+ self.new_end = new_end;
+ } else if fragment.was_visible(self.since, self.undos) && !fragment.visible {
+ let mut deleted_end = cursor.end(&None).deleted;
+ if fragment.id == *self.range.end.0 {
+ deleted_end = cmp::min(
+ deleted_end,
+ cursor.start().deleted + (self.range.end.1 - fragment.insertion_offset),
+ );
+ }
+
+ if cursor.start().deleted > self.deleted_cursor.offset() {
+ self.deleted_cursor.seek_forward(cursor.start().deleted);
+ }
+ let fragment_summary = self.deleted_cursor.summary(deleted_end);
+ let mut old_end = self.old_end.clone();
+ old_end.add_assign(&fragment_summary);
+ if let Some((edit, range)) = pending_edit.as_mut() {
+ edit.old.end = old_end.clone();
+ range.end = end_anchor;
+ } else {
+ pending_edit = Some((
+ Edit {
+ old: self.old_end.clone()..old_end.clone(),
+ new: self.new_end.clone()..self.new_end.clone(),
+ },
+ start_anchor..end_anchor,
+ ));
+ }
+
+ self.old_end = old_end;
+ }
+
+ cursor.next(&None);
+ }
+
+ pending_edit
+ }
+}
+
+impl Fragment {
+ fn insertion_slice(&self) -> InsertionSlice {
+ InsertionSlice {
+ insertion_id: self.timestamp,
+ range: self.insertion_offset..self.insertion_offset + self.len,
+ }
+ }
+
+ fn is_visible(&self, undos: &UndoMap) -> bool {
+ !undos.is_undone(self.timestamp) && self.deletions.iter().all(|d| undos.is_undone(*d))
+ }
+
+ fn was_visible(&self, version: &clock::Global, undos: &UndoMap) -> bool {
+ (version.observed(self.timestamp) && !undos.was_undone(self.timestamp, version))
+ && self
+ .deletions
+ .iter()
+ .all(|d| !version.observed(*d) || undos.was_undone(*d, version))
+ }
+}
+
+impl sum_tree::Item for Fragment {
+ type Summary = FragmentSummary;
+
+ fn summary(&self) -> Self::Summary {
+ let mut max_version = clock::Global::new();
+ max_version.observe(self.timestamp);
+ for deletion in &self.deletions {
+ max_version.observe(*deletion);
+ }
+ max_version.join(&self.max_undos);
+
+ let mut min_insertion_version = clock::Global::new();
+ min_insertion_version.observe(self.timestamp);
+ let max_insertion_version = min_insertion_version.clone();
+ if self.visible {
+ FragmentSummary {
+ max_id: self.id.clone(),
+ text: FragmentTextSummary {
+ visible: self.len,
+ deleted: 0,
+ },
+ max_version,
+ min_insertion_version,
+ max_insertion_version,
+ }
+ } else {
+ FragmentSummary {
+ max_id: self.id.clone(),
+ text: FragmentTextSummary {
+ visible: 0,
+ deleted: self.len,
+ },
+ max_version,
+ min_insertion_version,
+ max_insertion_version,
+ }
+ }
+ }
+}
+
+impl sum_tree::Summary for FragmentSummary {
+ type Context = Option<clock::Global>;
+
+ fn add_summary(&mut self, other: &Self, _: &Self::Context) {
+ self.max_id.assign(&other.max_id);
+ self.text.visible += &other.text.visible;
+ self.text.deleted += &other.text.deleted;
+ self.max_version.join(&other.max_version);
+ self.min_insertion_version
+ .meet(&other.min_insertion_version);
+ self.max_insertion_version
+ .join(&other.max_insertion_version);
+ }
+}
+
+impl Default for FragmentSummary {
+ fn default() -> Self {
+ FragmentSummary {
+ max_id: Locator::min(),
+ text: FragmentTextSummary::default(),
+ max_version: clock::Global::new(),
+ min_insertion_version: clock::Global::new(),
+ max_insertion_version: clock::Global::new(),
+ }
+ }
+}
+
+impl sum_tree::Item for InsertionFragment {
+ type Summary = InsertionFragmentKey;
+
+ fn summary(&self) -> Self::Summary {
+ InsertionFragmentKey {
+ timestamp: self.timestamp,
+ split_offset: self.split_offset,
+ }
+ }
+}
+
+impl sum_tree::KeyedItem for InsertionFragment {
+ type Key = InsertionFragmentKey;
+
+ fn key(&self) -> Self::Key {
+ sum_tree::Item::summary(self)
+ }
+}
+
+impl InsertionFragment {
+ fn new(fragment: &Fragment) -> Self {
+ Self {
+ timestamp: fragment.timestamp,
+ split_offset: fragment.insertion_offset,
+ fragment_id: fragment.id.clone(),
+ }
+ }
+
+ fn insert_new(fragment: &Fragment) -> sum_tree::Edit<Self> {
+ sum_tree::Edit::Insert(Self::new(fragment))
+ }
+}
+
+impl sum_tree::Summary for InsertionFragmentKey {
+ type Context = ();
+
+ fn add_summary(&mut self, summary: &Self, _: &()) {
+ *self = *summary;
+ }
+}
+
+#[derive(Copy, Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub struct FullOffset(pub usize);
+
+impl ops::AddAssign<usize> for FullOffset {
+ fn add_assign(&mut self, rhs: usize) {
+ self.0 += rhs;
+ }
+}
+
+impl ops::Add<usize> for FullOffset {
+ type Output = Self;
+
+ fn add(mut self, rhs: usize) -> Self::Output {
+ self += rhs;
+ self
+ }
+}
+
+impl ops::Sub for FullOffset {
+ type Output = usize;
+
+ fn sub(self, rhs: Self) -> Self::Output {
+ self.0 - rhs.0
+ }
+}
+
+impl<'a> sum_tree::Dimension<'a, FragmentSummary> for usize {
+ fn add_summary(&mut self, summary: &FragmentSummary, _: &Option<clock::Global>) {
+ *self += summary.text.visible;
+ }
+}
+
+impl<'a> sum_tree::Dimension<'a, FragmentSummary> for FullOffset {
+ fn add_summary(&mut self, summary: &FragmentSummary, _: &Option<clock::Global>) {
+ self.0 += summary.text.visible + summary.text.deleted;
+ }
+}
+
+impl<'a> sum_tree::Dimension<'a, FragmentSummary> for Option<&'a Locator> {
+ fn add_summary(&mut self, summary: &'a FragmentSummary, _: &Option<clock::Global>) {
+ *self = Some(&summary.max_id);
+ }
+}
+
+impl<'a> sum_tree::SeekTarget<'a, FragmentSummary, FragmentTextSummary> for usize {
+ fn cmp(
+ &self,
+ cursor_location: &FragmentTextSummary,
+ _: &Option<clock::Global>,
+ ) -> cmp::Ordering {
+ Ord::cmp(self, &cursor_location.visible)
+ }
+}
+
+#[derive(Copy, Clone, Debug, Eq, PartialEq)]
+enum VersionedFullOffset {
+ Offset(FullOffset),
+ Invalid,
+}
+
+impl VersionedFullOffset {
+ fn full_offset(&self) -> FullOffset {
+ if let Self::Offset(position) = self {
+ *position
+ } else {
+ panic!("invalid version")
+ }
+ }
+}
+
+impl Default for VersionedFullOffset {
+ fn default() -> Self {
+ Self::Offset(Default::default())
+ }
+}
+
+impl<'a> sum_tree::Dimension<'a, FragmentSummary> for VersionedFullOffset {
+ fn add_summary(&mut self, summary: &'a FragmentSummary, cx: &Option<clock::Global>) {
+ if let Self::Offset(offset) = self {
+ let version = cx.as_ref().unwrap();
+ if version.observed_all(&summary.max_insertion_version) {
+ *offset += summary.text.visible + summary.text.deleted;
+ } else if version.observed_any(&summary.min_insertion_version) {
+ *self = Self::Invalid;
+ }
+ }
+ }
+}
+
+impl<'a> sum_tree::SeekTarget<'a, FragmentSummary, Self> for VersionedFullOffset {
+ fn cmp(&self, cursor_position: &Self, _: &Option<clock::Global>) -> cmp::Ordering {
+ match (self, cursor_position) {
+ (Self::Offset(a), Self::Offset(b)) => Ord::cmp(a, b),
+ (Self::Offset(_), Self::Invalid) => cmp::Ordering::Less,
+ (Self::Invalid, _) => unreachable!(),
+ }
+ }
+}
+
+impl Operation {
+ fn replica_id(&self) -> ReplicaId {
+ operation_queue::Operation::lamport_timestamp(self).replica_id
+ }
+
+ pub fn timestamp(&self) -> clock::Lamport {
+ match self {
+ Operation::Edit(edit) => edit.timestamp,
+ Operation::Undo(undo) => undo.timestamp,
+ }
+ }
+
+ pub fn as_edit(&self) -> Option<&EditOperation> {
+ match self {
+ Operation::Edit(edit) => Some(edit),
+ _ => None,
+ }
+ }
+
+ pub fn is_edit(&self) -> bool {
+ matches!(self, Operation::Edit { .. })
+ }
+}
+
+impl operation_queue::Operation for Operation {
+ fn lamport_timestamp(&self) -> clock::Lamport {
+ match self {
+ Operation::Edit(edit) => edit.timestamp,
+ Operation::Undo(undo) => undo.timestamp,
+ }
+ }
+}
+
+pub trait ToOffset {
+ fn to_offset(&self, snapshot: &BufferSnapshot) -> usize;
+}
+
+impl ToOffset for Point {
+ fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
+ snapshot.point_to_offset(*self)
+ }
+}
+
+impl ToOffset for usize {
+ fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
+ assert!(
+ *self <= snapshot.len(),
+ "offset {} is out of range, max allowed is {}",
+ self,
+ snapshot.len()
+ );
+ *self
+ }
+}
+
+impl ToOffset for Anchor {
+ fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
+ snapshot.summary_for_anchor(self)
+ }
+}
+
+impl<'a, T: ToOffset> ToOffset for &'a T {
+ fn to_offset(&self, content: &BufferSnapshot) -> usize {
+ (*self).to_offset(content)
+ }
+}
+
+impl ToOffset for PointUtf16 {
+ fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
+ snapshot.point_utf16_to_offset(*self)
+ }
+}
+
+impl ToOffset for Unclipped<PointUtf16> {
+ fn to_offset(&self, snapshot: &BufferSnapshot) -> usize {
+ snapshot.unclipped_point_utf16_to_offset(*self)
+ }
+}
+
+pub trait ToPoint {
+ fn to_point(&self, snapshot: &BufferSnapshot) -> Point;
+}
+
+impl ToPoint for Anchor {
+ fn to_point(&self, snapshot: &BufferSnapshot) -> Point {
+ snapshot.summary_for_anchor(self)
+ }
+}
+
+impl ToPoint for usize {
+ fn to_point(&self, snapshot: &BufferSnapshot) -> Point {
+ snapshot.offset_to_point(*self)
+ }
+}
+
+impl ToPoint for Point {
+ fn to_point(&self, _: &BufferSnapshot) -> Point {
+ *self
+ }
+}
+
+impl ToPoint for Unclipped<PointUtf16> {
+ fn to_point(&self, snapshot: &BufferSnapshot) -> Point {
+ snapshot.unclipped_point_utf16_to_point(*self)
+ }
+}
+
+pub trait ToPointUtf16 {
+ fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16;
+}
+
+impl ToPointUtf16 for Anchor {
+ fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
+ snapshot.summary_for_anchor(self)
+ }
+}
+
+impl ToPointUtf16 for usize {
+ fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
+ snapshot.offset_to_point_utf16(*self)
+ }
+}
+
+impl ToPointUtf16 for PointUtf16 {
+ fn to_point_utf16(&self, _: &BufferSnapshot) -> PointUtf16 {
+ *self
+ }
+}
+
+impl ToPointUtf16 for Point {
+ fn to_point_utf16(&self, snapshot: &BufferSnapshot) -> PointUtf16 {
+ snapshot.point_to_point_utf16(*self)
+ }
+}
+
+pub trait ToOffsetUtf16 {
+ fn to_offset_utf16(&self, snapshot: &BufferSnapshot) -> OffsetUtf16;
+}
+
+impl ToOffsetUtf16 for Anchor {
+ fn to_offset_utf16(&self, snapshot: &BufferSnapshot) -> OffsetUtf16 {
+ snapshot.summary_for_anchor(self)
+ }
+}
+
+impl ToOffsetUtf16 for usize {
+ fn to_offset_utf16(&self, snapshot: &BufferSnapshot) -> OffsetUtf16 {
+ snapshot.offset_to_offset_utf16(*self)
+ }
+}
+
+impl ToOffsetUtf16 for OffsetUtf16 {
+ fn to_offset_utf16(&self, _snapshot: &BufferSnapshot) -> OffsetUtf16 {
+ *self
+ }
+}
+
+pub trait FromAnchor {
+ fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self;
+}
+
+impl FromAnchor for Point {
+ fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self {
+ snapshot.summary_for_anchor(anchor)
+ }
+}
+
+impl FromAnchor for PointUtf16 {
+ fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self {
+ snapshot.summary_for_anchor(anchor)
+ }
+}
+
+impl FromAnchor for usize {
+ fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self {
+ snapshot.summary_for_anchor(anchor)
+ }
+}
+
+#[derive(Clone, Copy, Debug, PartialEq)]
+pub enum LineEnding {
+ Unix,
+ Windows,
+}
+
+impl Default for LineEnding {
+ fn default() -> Self {
+ #[cfg(unix)]
+ return Self::Unix;
+
+ #[cfg(not(unix))]
+ return Self::CRLF;
+ }
+}
+
+impl LineEnding {
+ pub fn as_str(&self) -> &'static str {
+ match self {
+ LineEnding::Unix => "\n",
+ LineEnding::Windows => "\r\n",
+ }
+ }
+
+ pub fn detect(text: &str) -> Self {
+ let mut max_ix = cmp::min(text.len(), 1000);
+ while !text.is_char_boundary(max_ix) {
+ max_ix -= 1;
+ }
+
+ if let Some(ix) = text[..max_ix].find(&['\n']) {
+ if ix > 0 && text.as_bytes()[ix - 1] == b'\r' {
+ Self::Windows
+ } else {
+ Self::Unix
+ }
+ } else {
+ Self::default()
+ }
+ }
+
+ pub fn normalize(text: &mut String) {
+ if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(text, "\n") {
+ *text = replaced;
+ }
+ }
+
+ pub fn normalize_arc(text: Arc<str>) -> Arc<str> {
+ if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(&text, "\n") {
+ replaced.into()
+ } else {
+ text
+ }
+ }
+}
@@ -0,0 +1,112 @@
+use crate::UndoOperation;
+use std::cmp;
+use sum_tree::{Bias, SumTree};
+
+#[derive(Copy, Clone, Debug)]
+struct UndoMapEntry {
+ key: UndoMapKey,
+ undo_count: u32,
+}
+
+impl sum_tree::Item for UndoMapEntry {
+ type Summary = UndoMapKey;
+
+ fn summary(&self) -> Self::Summary {
+ self.key
+ }
+}
+
+impl sum_tree::KeyedItem for UndoMapEntry {
+ type Key = UndoMapKey;
+
+ fn key(&self) -> Self::Key {
+ self.key
+ }
+}
+
+#[derive(Copy, Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord)]
+struct UndoMapKey {
+ edit_id: clock::Lamport,
+ undo_id: clock::Lamport,
+}
+
+impl sum_tree::Summary for UndoMapKey {
+ type Context = ();
+
+ fn add_summary(&mut self, summary: &Self, _: &Self::Context) {
+ *self = cmp::max(*self, *summary);
+ }
+}
+
+#[derive(Clone, Default)]
+pub struct UndoMap(SumTree<UndoMapEntry>);
+
+impl UndoMap {
+ pub fn insert(&mut self, undo: &UndoOperation) {
+ let edits = undo
+ .counts
+ .iter()
+ .map(|(edit_id, count)| {
+ sum_tree::Edit::Insert(UndoMapEntry {
+ key: UndoMapKey {
+ edit_id: *edit_id,
+ undo_id: undo.timestamp,
+ },
+ undo_count: *count,
+ })
+ })
+ .collect::<Vec<_>>();
+ self.0.edit(edits, &());
+ }
+
+ pub fn is_undone(&self, edit_id: clock::Lamport) -> bool {
+ self.undo_count(edit_id) % 2 == 1
+ }
+
+ pub fn was_undone(&self, edit_id: clock::Lamport, version: &clock::Global) -> bool {
+ let mut cursor = self.0.cursor::<UndoMapKey>();
+ cursor.seek(
+ &UndoMapKey {
+ edit_id,
+ undo_id: Default::default(),
+ },
+ Bias::Left,
+ &(),
+ );
+
+ let mut undo_count = 0;
+ for entry in cursor {
+ if entry.key.edit_id != edit_id {
+ break;
+ }
+
+ if version.observed(entry.key.undo_id) {
+ undo_count = cmp::max(undo_count, entry.undo_count);
+ }
+ }
+
+ undo_count % 2 == 1
+ }
+
+ pub fn undo_count(&self, edit_id: clock::Lamport) -> u32 {
+ let mut cursor = self.0.cursor::<UndoMapKey>();
+ cursor.seek(
+ &UndoMapKey {
+ edit_id,
+ undo_id: Default::default(),
+ },
+ Bias::Left,
+ &(),
+ );
+
+ let mut undo_count = 0;
+ for entry in cursor {
+ if entry.key.edit_id != edit_id {
+ break;
+ }
+
+ undo_count = cmp::max(undo_count, entry.undo_count);
+ }
+ undo_count
+ }
+}
@@ -17,6 +17,7 @@ const MIN_LINE_HEIGHT: f32 = 1.0;
#[derive(Clone)]
pub struct ThemeSettings {
+ pub ui_font_size: Pixels,
pub buffer_font: Font,
pub buffer_font_size: Pixels,
pub buffer_line_height: BufferLineHeight,
@@ -28,6 +29,8 @@ pub struct AdjustedBufferFontSize(Option<Pixels>);
#[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema)]
pub struct ThemeSettingsContent {
+ #[serde(default)]
+ pub ui_font_size: Option<f32>,
#[serde(default)]
pub buffer_font_family: Option<String>,
#[serde(default)]
@@ -115,6 +118,7 @@ impl settings2::Settings for ThemeSettings {
let themes = cx.default_global::<Arc<ThemeRegistry>>();
let mut this = Self {
+ ui_font_size: defaults.ui_font_size.unwrap_or(16.).into(),
buffer_font: Font {
family: defaults.buffer_font_family.clone().unwrap().into(),
features: defaults.buffer_font_features.clone().unwrap(),
@@ -123,9 +127,10 @@ impl settings2::Settings for ThemeSettings {
},
buffer_font_size: defaults.buffer_font_size.unwrap().into(),
buffer_line_height: defaults.buffer_line_height.unwrap(),
- active_theme: themes.get("Zed Pro Moonlight").unwrap(),
- // todo!(Read the theme name from the settings)
- // active_theme: themes.get(defaults.theme.as_ref().unwrap()).unwrap(),
+ active_theme: themes
+ .get(defaults.theme.as_ref().unwrap())
+ .or(themes.get("Zed Pro Moonlight"))
+ .unwrap(),
};
for value in user_values.into_iter().copied().cloned() {
@@ -142,6 +147,7 @@ impl settings2::Settings for ThemeSettings {
}
}
+ merge(&mut this.ui_font_size, value.ui_font_size.map(Into::into));
merge(
&mut this.buffer_font_size,
value.buffer_font_size.map(Into::into),
@@ -10,6 +10,7 @@ chrono = "0.4"
gpui2 = { path = "../gpui2" }
itertools = { version = "0.11.0", optional = true }
serde.workspace = true
+settings2 = { path = "../settings2" }
smallvec.workspace = true
strum = { version = "0.25.0", features = ["derive"] }
theme2 = { path = "../theme2" }
@@ -1,6 +1,6 @@
use std::sync::Arc;
-use gpui2::MouseButton;
+use gpui2::{rems, MouseButton};
use crate::{h_stack, prelude::*};
use crate::{ClickHandler, Icon, IconColor, IconElement};
@@ -88,8 +88,8 @@ impl<V: 'static> IconButton<V> {
.id(self.id.clone())
.justify_center()
.rounded_md()
- .py(ui_size(cx, 0.25))
- .px(ui_size(cx, 6. / 14.))
+ .py(rems(0.21875))
+ .px(rems(0.375))
.bg(bg_color)
.hover(|style| style.bg(bg_hover_color))
.active(|style| style.bg(bg_active_color))
@@ -1,14 +1,16 @@
use std::sync::Arc;
use chrono::DateTime;
-use gpui2::{px, relative, rems, Div, Render, Size, View, VisualContext};
+use gpui2::{px, relative, Div, Render, Size, View, VisualContext};
+use settings2::Settings;
+use theme2::ThemeSettings;
-use crate::{prelude::*, NotificationsPanel};
+use crate::prelude::*;
use crate::{
- static_livestream, user_settings_mut, v_stack, AssistantPanel, Button, ChatMessage, ChatPanel,
- CollabPanel, EditorPane, FakeSettings, Label, LanguageSelector, Pane, PaneGroup, Panel,
- PanelAllowedSides, PanelSide, ProjectPanel, SettingValue, SplitDirection, StatusBar, Terminal,
- TitleBar, Toast, ToastOrigin,
+ static_livestream, v_stack, AssistantPanel, Button, ChatMessage, ChatPanel, CollabPanel,
+ EditorPane, Label, LanguageSelector, NotificationsPanel, Pane, PaneGroup, Panel,
+ PanelAllowedSides, PanelSide, ProjectPanel, SplitDirection, StatusBar, Terminal, TitleBar,
+ Toast, ToastOrigin,
};
#[derive(Clone)]
@@ -150,6 +152,18 @@ impl Workspace {
pub fn debug_toggle_user_settings(&mut self, cx: &mut ViewContext<Self>) {
self.debug.enable_user_settings = !self.debug.enable_user_settings;
+ let mut theme_settings = ThemeSettings::get_global(cx).clone();
+
+ if self.debug.enable_user_settings {
+ theme_settings.ui_font_size = 18.0.into();
+ } else {
+ theme_settings.ui_font_size = 16.0.into();
+ }
+
+ ThemeSettings::override_global(theme_settings.clone(), cx);
+
+ cx.set_rem_size(theme_settings.ui_font_size);
+
cx.notify();
}
@@ -179,20 +193,6 @@ impl Render for Workspace {
type Element = Div<Self>;
fn render(&mut self, cx: &mut ViewContext<Self>) -> Div<Self> {
- // HACK: This should happen inside of `debug_toggle_user_settings`, but
- // we don't have `cx.global::<FakeSettings>()` in event handlers at the moment.
- // Need to talk with Nathan/Antonio about this.
- {
- let settings = user_settings_mut(cx);
-
- if self.debug.enable_user_settings {
- settings.list_indent_depth = SettingValue::UserDefined(rems(0.5).into());
- settings.ui_scale = SettingValue::UserDefined(1.25);
- } else {
- *settings = FakeSettings::default();
- }
- }
-
let root_group = PaneGroup::new_panes(
vec![Pane::new(
"pane-0",
@@ -321,7 +321,7 @@ impl Render for Workspace {
v_stack()
.z_index(9)
.absolute()
- .bottom_10()
+ .top_20()
.left_1_4()
.w_40()
.gap_2()
@@ -1,9 +1,9 @@
use std::sync::Arc;
-use gpui2::{div, DefiniteLength, Hsla, MouseButton, WindowContext};
+use gpui2::{div, rems, DefiniteLength, Hsla, MouseButton, WindowContext};
-use crate::{h_stack, Icon, IconColor, IconElement, Label, LabelColor};
-use crate::{prelude::*, LineHeightStyle};
+use crate::prelude::*;
+use crate::{h_stack, Icon, IconColor, IconElement, Label, LabelColor, LineHeightStyle};
#[derive(Default, PartialEq, Clone, Copy)]
pub enum IconPosition {
@@ -151,7 +151,7 @@ impl<V: 'static> Button<V> {
.relative()
.id(SharedString::from(format!("{}", self.label)))
.p_1()
- .text_size(ui_size(cx, 1.))
+ .text_size(rems(1.))
.rounded_md()
.bg(self.variant.bg_color(cx))
.hover(|style| style.bg(self.variant.bg_color_hover(cx)))
@@ -198,7 +198,7 @@ impl<V: 'static> ButtonGroup<V> {
}
fn render(self, _view: &mut V, cx: &mut ViewContext<V>) -> impl Component<V> {
- let mut el = h_stack().text_size(ui_size(cx, 1.));
+ let mut el = h_stack().text_size(rems(1.));
for button in self.buttons {
el = el.child(button.render(_view, cx));
@@ -1,4 +1,4 @@
-use gpui2::{svg, Hsla};
+use gpui2::{rems, svg, Hsla};
use strum::EnumIter;
use crate::prelude::*;
@@ -184,8 +184,8 @@ impl IconElement {
fn render<V: 'static>(self, _view: &mut V, cx: &mut ViewContext<V>) -> impl Component<V> {
let fill = self.color.color(cx);
let svg_size = match self.size {
- IconSize::Small => ui_size(cx, 12. / 14.),
- IconSize::Medium => ui_size(cx, 15. / 14.),
+ IconSize::Small => rems(0.75),
+ IconSize::Medium => rems(0.9375),
};
svg()
@@ -1,4 +1,4 @@
-use gpui2::{relative, Hsla, WindowContext};
+use gpui2::{relative, rems, Hsla, WindowContext};
use smallvec::SmallVec;
use crate::prelude::*;
@@ -85,7 +85,7 @@ impl Label {
.bg(LabelColor::Hidden.hsla(cx)),
)
})
- .text_size(ui_size(cx, 1.))
+ .text_size(rems(1.))
.when(self.line_height_style == LineHeightStyle::UILabel, |this| {
this.line_height(relative(1.))
})
@@ -4,21 +4,12 @@ pub use gpui2::{
};
pub use crate::elevation::*;
-use crate::settings::user_settings;
pub use crate::ButtonVariant;
pub use theme2::ActiveTheme;
-use gpui2::{rems, Hsla, Rems};
+use gpui2::Hsla;
use strum::EnumIter;
-pub fn ui_size(cx: &mut WindowContext, size: f32) -> Rems {
- const UI_SCALE_RATIO: f32 = 0.875;
-
- let settings = user_settings(cx);
-
- rems(*settings.ui_scale * UI_SCALE_RATIO * size)
-}
-
/// Represents a person with a Zed account's public profile.
/// All data in this struct should be considered public.
pub struct PublicActor {
@@ -58,7 +58,6 @@ pub struct FakeSettings {
pub list_disclosure_style: SettingValue<DisclosureControlStyle>,
pub list_indent_depth: SettingValue<AbsoluteLength>,
pub titlebar: TitlebarSettings,
- pub ui_scale: SettingValue<f32>,
}
impl Default for FakeSettings {
@@ -68,7 +67,6 @@ impl Default for FakeSettings {
list_disclosure_style: SettingValue::Default(DisclosureControlStyle::ChevronOnHover),
list_indent_depth: SettingValue::Default(rems(0.3).into()),
default_panel_size: SettingValue::Default(rems(16.).into()),
- ui_scale: SettingValue::Default(1.),
}
}
}
@@ -3,7 +3,7 @@ authors = ["Nathan Sobo <nathansobo@gmail.com>"]
description = "The fast, collaborative code editor."
edition = "2021"
name = "zed"
-version = "0.111.0"
+version = "0.112.0"
publish = false
[lib]
@@ -63,7 +63,7 @@ settings2 = { path = "../settings2" }
feature_flags2 = { path = "../feature_flags2" }
sum_tree = { path = "../sum_tree" }
shellexpand = "2.1.0"
-text = { path = "../text" }
+text2 = { path = "../text2" }
# terminal_view = { path = "../terminal_view" }
theme2 = { path = "../theme2" }
# theme_selector = { path = "../theme_selector" }
@@ -152,7 +152,7 @@ language2 = { path = "../language2", features = ["test-support"] }
project2 = { path = "../project2", features = ["test-support"] }
# rpc = { path = "../rpc", features = ["test-support"] }
# settings = { path = "../settings", features = ["test-support"] }
-# text = { path = "../text", features = ["test-support"] }
+text2 = { path = "../text2", features = ["test-support"] }
# util = { path = "../util", features = ["test-support"] }
# workspace = { path = "../workspace", features = ["test-support"] }
unindent.workspace = true