Detailed changes
@@ -1222,7 +1222,6 @@ version = "0.1.0"
dependencies = [
"anyhow",
"async-broadcast",
- "async-trait",
"audio2",
"client2",
"collections",
@@ -1242,9 +1241,7 @@ dependencies = [
"serde_json",
"settings2",
"smallvec",
- "ui2",
"util",
- "workspace2",
]
[[package]]
@@ -2115,7 +2112,7 @@ dependencies = [
"lsp2",
"node_runtime",
"parking_lot 0.11.2",
- "rpc",
+ "rpc2",
"serde",
"serde_derive",
"settings2",
@@ -8246,6 +8243,57 @@ dependencies = [
"workspace",
]
+[[package]]
+name = "semantic_index2"
+version = "0.1.0"
+dependencies = [
+ "ai2",
+ "anyhow",
+ "async-trait",
+ "client2",
+ "collections",
+ "ctor",
+ "env_logger 0.9.3",
+ "futures 0.3.28",
+ "globset",
+ "gpui2",
+ "language2",
+ "lazy_static",
+ "log",
+ "ndarray",
+ "node_runtime",
+ "ordered-float 2.10.0",
+ "parking_lot 0.11.2",
+ "postage",
+ "pretty_assertions",
+ "project2",
+ "rand 0.8.5",
+ "rpc2",
+ "rusqlite",
+ "rust-embed",
+ "schemars",
+ "serde",
+ "serde_json",
+ "settings2",
+ "sha1",
+ "smol",
+ "tempdir",
+ "tiktoken-rs",
+ "tree-sitter",
+ "tree-sitter-cpp",
+ "tree-sitter-elixir",
+ "tree-sitter-json 0.20.0",
+ "tree-sitter-lua",
+ "tree-sitter-php",
+ "tree-sitter-ruby",
+ "tree-sitter-rust",
+ "tree-sitter-toml",
+ "tree-sitter-typescript",
+ "unindent",
+ "util",
+ "workspace2",
+]
+
[[package]]
name = "semver"
version = "1.0.18"
@@ -11545,7 +11593,6 @@ version = "0.1.0"
dependencies = [
"anyhow",
"async-recursion 1.0.5",
- "async-trait",
"bincode",
"call2",
"client2",
@@ -96,6 +96,8 @@ members = [
"crates/rpc2",
"crates/search",
"crates/search2",
+ "crates/semantic_index",
+ "crates/semantic_index2",
"crates/settings",
"crates/settings2",
"crates/snippet",
@@ -115,7 +117,6 @@ members = [
"crates/theme_selector2",
"crates/ui2",
"crates/util",
- "crates/semantic_index",
"crates/story",
"crates/vim",
"crates/vcs_menu",
@@ -7,7 +7,7 @@ pub enum ProviderCredential {
NotNeeded,
}
-pub trait CredentialProvider {
+pub trait CredentialProvider: Send + Sync {
fn has_credentials(&self) -> bool;
fn retrieve_credentials(&self, cx: &mut AppContext) -> ProviderCredential;
fn save_credentials(&self, cx: &mut AppContext, credential: ProviderCredential);
@@ -35,7 +35,7 @@ pub struct OpenAIEmbeddingProvider {
model: OpenAILanguageModel,
credential: Arc<RwLock<ProviderCredential>>,
pub client: Arc<dyn HttpClient>,
- pub executor: Arc<BackgroundExecutor>,
+ pub executor: BackgroundExecutor,
rate_limit_count_rx: watch::Receiver<Option<Instant>>,
rate_limit_count_tx: Arc<Mutex<watch::Sender<Option<Instant>>>>,
}
@@ -66,7 +66,7 @@ struct OpenAIEmbeddingUsage {
}
impl OpenAIEmbeddingProvider {
- pub fn new(client: Arc<dyn HttpClient>, executor: Arc<BackgroundExecutor>) -> Self {
+ pub fn new(client: Arc<dyn HttpClient>, executor: BackgroundExecutor) -> Self {
let (rate_limit_count_tx, rate_limit_count_rx) = watch::channel_with(None);
let rate_limit_count_tx = Arc::new(Mutex::new(rate_limit_count_tx));
@@ -31,9 +31,7 @@ media = { path = "../media" }
project = { package = "project2", path = "../project2" }
settings = { package = "settings2", path = "../settings2" }
util = { path = "../util" }
-ui = {package = "ui2", path = "../ui2"}
-workspace = {package = "workspace2", path = "../workspace2"}
-async-trait.workspace = true
+
anyhow.workspace = true
async-broadcast = "0.4"
futures.workspace = true
@@ -1,32 +1,25 @@
pub mod call_settings;
pub mod participant;
pub mod room;
-mod shared_screen;
use anyhow::{anyhow, Result};
-use async_trait::async_trait;
use audio::Audio;
use call_settings::CallSettings;
-use client::{
- proto::{self, PeerId},
- Client, TelemetrySettings, TypedEnvelope, User, UserStore, ZED_ALWAYS_ACTIVE,
-};
+use client::{proto, Client, TelemetrySettings, TypedEnvelope, User, UserStore, ZED_ALWAYS_ACTIVE};
use collections::HashSet;
use futures::{channel::oneshot, future::Shared, Future, FutureExt};
use gpui::{
- AppContext, AsyncAppContext, Context, EventEmitter, Model, ModelContext, PromptLevel,
- Subscription, Task, View, ViewContext, VisualContext, WeakModel, WindowHandle,
+ AppContext, AsyncAppContext, Context, EventEmitter, Model, ModelContext, Subscription, Task,
+ WeakModel,
};
-pub use participant::ParticipantLocation;
use postage::watch;
use project::Project;
use room::Event;
-pub use room::Room;
use settings::Settings;
-use shared_screen::SharedScreen;
use std::sync::Arc;
-use util::ResultExt;
-use workspace::{item::ItemHandle, CallHandler, Pane, Workspace};
+
+pub use participant::ParticipantLocation;
+pub use room::Room;
pub fn init(client: Arc<Client>, user_store: Model<UserStore>, cx: &mut AppContext) {
CallSettings::register(cx);
@@ -334,55 +327,12 @@ impl ActiveCall {
pub fn join_channel(
&mut self,
channel_id: u64,
- requesting_window: Option<WindowHandle<Workspace>>,
cx: &mut ModelContext<Self>,
) -> Task<Result<Option<Model<Room>>>> {
if let Some(room) = self.room().cloned() {
if room.read(cx).channel_id() == Some(channel_id) {
- return cx.spawn(|_, _| async move {
- todo!();
- // let future = room.update(&mut cx, |room, cx| {
- // room.most_active_project(cx).map(|(host, project)| {
- // room.join_project(project, host, app_state.clone(), cx)
- // })
- // })
-
- // if let Some(future) = future {
- // future.await?;
- // }
-
- // Ok(Some(room))
- });
- }
-
- let should_prompt = room.update(cx, |room, _| {
- room.channel_id().is_some()
- && room.is_sharing_project()
- && room.remote_participants().len() > 0
- });
- if should_prompt && requesting_window.is_some() {
- return cx.spawn(|this, mut cx| async move {
- let answer = requesting_window.unwrap().update(&mut cx, |_, cx| {
- cx.prompt(
- PromptLevel::Warning,
- "Leaving this call will unshare your current project.\nDo you want to switch channels?",
- &["Yes, Join Channel", "Cancel"],
- )
- })?;
- if answer.await? == 1 {
- return Ok(None);
- }
-
- room.update(&mut cx, |room, cx| room.clear_state(cx))?;
-
- this.update(&mut cx, |this, cx| {
- this.join_channel(channel_id, requesting_window, cx)
- })?
- .await
- });
- }
-
- if room.read(cx).channel_id().is_some() {
+ return Task::ready(Ok(Some(room)));
+ } else {
room.update(cx, |room, cx| room.clear_state(cx));
}
}
@@ -555,197 +505,6 @@ pub fn report_call_event_for_channel(
)
}
-pub struct Call {
- active_call: Option<(Model<ActiveCall>, Vec<Subscription>)>,
-}
-
-impl Call {
- pub fn new(cx: &mut ViewContext<'_, Workspace>) -> Box<dyn CallHandler> {
- let mut active_call = None;
- if cx.has_global::<Model<ActiveCall>>() {
- let call = cx.global::<Model<ActiveCall>>().clone();
- let subscriptions = vec![cx.subscribe(&call, Self::on_active_call_event)];
- active_call = Some((call, subscriptions));
- }
- Box::new(Self { active_call })
- }
- fn on_active_call_event(
- workspace: &mut Workspace,
- _: Model<ActiveCall>,
- event: &room::Event,
- cx: &mut ViewContext<Workspace>,
- ) {
- match event {
- room::Event::ParticipantLocationChanged { participant_id }
- | room::Event::RemoteVideoTracksChanged { participant_id } => {
- workspace.leader_updated(*participant_id, cx);
- }
- _ => {}
- }
- }
-}
-
-#[async_trait(?Send)]
-impl CallHandler for Call {
- fn peer_state(
- &mut self,
- leader_id: PeerId,
- project: &Model<Project>,
- cx: &mut ViewContext<Workspace>,
- ) -> Option<(bool, bool)> {
- let (call, _) = self.active_call.as_ref()?;
- let room = call.read(cx).room()?.read(cx);
- let participant = room.remote_participant_for_peer_id(leader_id)?;
-
- let leader_in_this_app;
- let leader_in_this_project;
- match participant.location {
- ParticipantLocation::SharedProject { project_id } => {
- leader_in_this_app = true;
- leader_in_this_project = Some(project_id) == project.read(cx).remote_id();
- }
- ParticipantLocation::UnsharedProject => {
- leader_in_this_app = true;
- leader_in_this_project = false;
- }
- ParticipantLocation::External => {
- leader_in_this_app = false;
- leader_in_this_project = false;
- }
- };
-
- Some((leader_in_this_project, leader_in_this_app))
- }
-
- fn shared_screen_for_peer(
- &self,
- peer_id: PeerId,
- pane: &View<Pane>,
- cx: &mut ViewContext<Workspace>,
- ) -> Option<Box<dyn ItemHandle>> {
- let (call, _) = self.active_call.as_ref()?;
- let room = call.read(cx).room()?.read(cx);
- let participant = room.remote_participant_for_peer_id(peer_id)?;
- let track = participant.video_tracks.values().next()?.clone();
- let user = participant.user.clone();
- for item in pane.read(cx).items_of_type::<SharedScreen>() {
- if item.read(cx).peer_id == peer_id {
- return Some(Box::new(item));
- }
- }
-
- Some(Box::new(cx.build_view(|cx| {
- SharedScreen::new(&track, peer_id, user.clone(), cx)
- })))
- }
- fn room_id(&self, cx: &AppContext) -> Option<u64> {
- Some(self.active_call.as_ref()?.0.read(cx).room()?.read(cx).id())
- }
- fn hang_up(&self, cx: &mut AppContext) -> Task<Result<()>> {
- let Some((call, _)) = self.active_call.as_ref() else {
- return Task::ready(Err(anyhow!("Cannot exit a call; not in a call")));
- };
-
- call.update(cx, |this, cx| this.hang_up(cx))
- }
- fn active_project(&self, cx: &AppContext) -> Option<WeakModel<Project>> {
- ActiveCall::global(cx).read(cx).location().cloned()
- }
- fn invite(
- &mut self,
- called_user_id: u64,
- initial_project: Option<Model<Project>>,
- cx: &mut AppContext,
- ) -> Task<Result<()>> {
- ActiveCall::global(cx).update(cx, |this, cx| {
- this.invite(called_user_id, initial_project, cx)
- })
- }
- fn remote_participants(&self, cx: &AppContext) -> Option<Vec<(Arc<User>, PeerId)>> {
- self.active_call
- .as_ref()
- .map(|call| {
- call.0.read(cx).room().map(|room| {
- room.read(cx)
- .remote_participants()
- .iter()
- .map(|participant| {
- (participant.1.user.clone(), participant.1.peer_id.clone())
- })
- .collect()
- })
- })
- .flatten()
- }
- fn is_muted(&self, cx: &AppContext) -> Option<bool> {
- self.active_call
- .as_ref()
- .map(|call| {
- call.0
- .read(cx)
- .room()
- .map(|room| room.read(cx).is_muted(cx))
- })
- .flatten()
- }
- fn toggle_mute(&self, cx: &mut AppContext) {
- self.active_call.as_ref().map(|call| {
- call.0.update(cx, |this, cx| {
- this.room().map(|room| {
- let room = room.clone();
- cx.spawn(|_, mut cx| async move {
- room.update(&mut cx, |this, cx| this.toggle_mute(cx))??
- .await
- })
- .detach_and_log_err(cx);
- })
- })
- });
- }
- fn toggle_screen_share(&self, cx: &mut AppContext) {
- self.active_call.as_ref().map(|call| {
- call.0.update(cx, |this, cx| {
- this.room().map(|room| {
- room.update(cx, |this, cx| {
- if this.is_screen_sharing() {
- this.unshare_screen(cx).log_err();
- } else {
- let t = this.share_screen(cx);
- cx.spawn(move |_, _| async move {
- t.await.log_err();
- })
- .detach();
- }
- })
- })
- })
- });
- }
- fn toggle_deafen(&self, cx: &mut AppContext) {
- self.active_call.as_ref().map(|call| {
- call.0.update(cx, |this, cx| {
- this.room().map(|room| {
- room.update(cx, |this, cx| {
- this.toggle_deafen(cx).log_err();
- })
- })
- })
- });
- }
- fn is_deafened(&self, cx: &AppContext) -> Option<bool> {
- self.active_call
- .as_ref()
- .map(|call| {
- call.0
- .read(cx)
- .room()
- .map(|room| room.read(cx).is_deafened())
- })
- .flatten()
- .flatten()
- }
-}
-
#[cfg(test)]
mod test {
use gpui::TestAppContext;
@@ -4,7 +4,7 @@ use client::{proto, User};
use collections::HashMap;
use gpui::WeakModel;
pub use live_kit_client::Frame;
-pub(crate) use live_kit_client::{RemoteAudioTrack, RemoteVideoTrack};
+pub use live_kit_client::{RemoteAudioTrack, RemoteVideoTrack};
use project::Project;
use std::sync::Arc;
@@ -4,8 +4,10 @@ use collab_ui::notifications::project_shared_notification::ProjectSharedNotifica
use editor::{Editor, ExcerptRange, MultiBuffer};
use gpui::{executor::Deterministic, geometry::vector::vec2f, TestAppContext, ViewHandle};
use live_kit_client::MacOSDisplay;
+use project::project_settings::ProjectSettings;
use rpc::proto::PeerId;
use serde_json::json;
+use settings::SettingsStore;
use std::{borrow::Cow, sync::Arc};
use workspace::{
dock::{test::TestPanel, DockPosition},
@@ -1602,6 +1604,141 @@ async fn test_following_across_workspaces(
});
}
+#[gpui::test]
+async fn test_following_into_excluded_file(
+ deterministic: Arc<Deterministic>,
+ mut cx_a: &mut TestAppContext,
+ mut cx_b: &mut TestAppContext,
+) {
+ deterministic.forbid_parking();
+
+ let mut server = TestServer::start(&deterministic).await;
+ let client_a = server.create_client(cx_a, "user_a").await;
+ let client_b = server.create_client(cx_b, "user_b").await;
+ for cx in [&mut cx_a, &mut cx_b] {
+ cx.update(|cx| {
+ cx.update_global::<SettingsStore, _, _>(|store, cx| {
+ store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
+ project_settings.file_scan_exclusions = Some(vec!["**/.git".to_string()]);
+ });
+ });
+ });
+ }
+ server
+ .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
+ .await;
+ let active_call_a = cx_a.read(ActiveCall::global);
+ let active_call_b = cx_b.read(ActiveCall::global);
+
+ cx_a.update(editor::init);
+ cx_b.update(editor::init);
+
+ client_a
+ .fs()
+ .insert_tree(
+ "/a",
+ json!({
+ ".git": {
+ "COMMIT_EDITMSG": "write your commit message here",
+ },
+ "1.txt": "one\none\none",
+ "2.txt": "two\ntwo\ntwo",
+ "3.txt": "three\nthree\nthree",
+ }),
+ )
+ .await;
+ let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
+ active_call_a
+ .update(cx_a, |call, cx| call.set_location(Some(&project_a), cx))
+ .await
+ .unwrap();
+
+ let project_id = active_call_a
+ .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
+ .await
+ .unwrap();
+ let project_b = client_b.build_remote_project(project_id, cx_b).await;
+ active_call_b
+ .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
+ .await
+ .unwrap();
+
+ let window_a = client_a.build_workspace(&project_a, cx_a);
+ let workspace_a = window_a.root(cx_a);
+ let peer_id_a = client_a.peer_id().unwrap();
+ let window_b = client_b.build_workspace(&project_b, cx_b);
+ let workspace_b = window_b.root(cx_b);
+
+ // Client A opens editors for a regular file and an excluded file.
+ let editor_for_regular = workspace_a
+ .update(cx_a, |workspace, cx| {
+ workspace.open_path((worktree_id, "1.txt"), None, true, cx)
+ })
+ .await
+ .unwrap()
+ .downcast::<Editor>()
+ .unwrap();
+ let editor_for_excluded_a = workspace_a
+ .update(cx_a, |workspace, cx| {
+ workspace.open_path((worktree_id, ".git/COMMIT_EDITMSG"), None, true, cx)
+ })
+ .await
+ .unwrap()
+ .downcast::<Editor>()
+ .unwrap();
+
+ // Client A updates their selections in those editors
+ editor_for_regular.update(cx_a, |editor, cx| {
+ editor.handle_input("a", cx);
+ editor.handle_input("b", cx);
+ editor.handle_input("c", cx);
+ editor.select_left(&Default::default(), cx);
+ assert_eq!(editor.selections.ranges(cx), vec![3..2]);
+ });
+ editor_for_excluded_a.update(cx_a, |editor, cx| {
+ editor.select_all(&Default::default(), cx);
+ editor.handle_input("new commit message", cx);
+ editor.select_left(&Default::default(), cx);
+ assert_eq!(editor.selections.ranges(cx), vec![18..17]);
+ });
+
+ // When client B starts following client A, currently visible file is replicated
+ workspace_b
+ .update(cx_b, |workspace, cx| {
+ workspace.follow(peer_id_a, cx).unwrap()
+ })
+ .await
+ .unwrap();
+
+ let editor_for_excluded_b = workspace_b.read_with(cx_b, |workspace, cx| {
+ workspace
+ .active_item(cx)
+ .unwrap()
+ .downcast::<Editor>()
+ .unwrap()
+ });
+ assert_eq!(
+ cx_b.read(|cx| editor_for_excluded_b.project_path(cx)),
+ Some((worktree_id, ".git/COMMIT_EDITMSG").into())
+ );
+ assert_eq!(
+ editor_for_excluded_b.read_with(cx_b, |editor, cx| editor.selections.ranges(cx)),
+ vec![18..17]
+ );
+
+ // Changes from B to the excluded file are replicated in A's editor
+ editor_for_excluded_b.update(cx_b, |editor, cx| {
+ editor.handle_input("\nCo-Authored-By: B <b@b.b>", cx);
+ });
+ deterministic.run_until_parked();
+ editor_for_excluded_a.update(cx_a, |editor, cx| {
+ assert_eq!(
+ editor.text(cx),
+ "new commit messag\nCo-Authored-By: B <b@b.b>"
+ );
+ });
+}
+
fn visible_push_notifications(
cx: &mut TestAppContext,
) -> Vec<gpui::ViewHandle<ProjectSharedNotification>> {
@@ -2981,11 +2981,10 @@ async fn test_fs_operations(
let entry = project_b
.update(cx_b, |project, cx| {
- project
- .create_entry((worktree_id, "c.txt"), false, cx)
- .unwrap()
+ project.create_entry((worktree_id, "c.txt"), false, cx)
})
.await
+ .unwrap()
.unwrap();
worktree_a.read_with(cx_a, |worktree, _| {
assert_eq!(
@@ -3010,7 +3009,6 @@ async fn test_fs_operations(
.update(cx_b, |project, cx| {
project.rename_entry(entry.id, Path::new("d.txt"), cx)
})
- .unwrap()
.await
.unwrap();
worktree_a.read_with(cx_a, |worktree, _| {
@@ -3034,11 +3032,10 @@ async fn test_fs_operations(
let dir_entry = project_b
.update(cx_b, |project, cx| {
- project
- .create_entry((worktree_id, "DIR"), true, cx)
- .unwrap()
+ project.create_entry((worktree_id, "DIR"), true, cx)
})
.await
+ .unwrap()
.unwrap();
worktree_a.read_with(cx_a, |worktree, _| {
assert_eq!(
@@ -3061,25 +3058,19 @@ async fn test_fs_operations(
project_b
.update(cx_b, |project, cx| {
- project
- .create_entry((worktree_id, "DIR/e.txt"), false, cx)
- .unwrap()
+ project.create_entry((worktree_id, "DIR/e.txt"), false, cx)
})
.await
.unwrap();
project_b
.update(cx_b, |project, cx| {
- project
- .create_entry((worktree_id, "DIR/SUBDIR"), true, cx)
- .unwrap()
+ project.create_entry((worktree_id, "DIR/SUBDIR"), true, cx)
})
.await
.unwrap();
project_b
.update(cx_b, |project, cx| {
- project
- .create_entry((worktree_id, "DIR/SUBDIR/f.txt"), false, cx)
- .unwrap()
+ project.create_entry((worktree_id, "DIR/SUBDIR/f.txt"), false, cx)
})
.await
.unwrap();
@@ -3120,9 +3111,7 @@ async fn test_fs_operations(
project_b
.update(cx_b, |project, cx| {
- project
- .copy_entry(entry.id, Path::new("f.txt"), cx)
- .unwrap()
+ project.copy_entry(entry.id, Path::new("f.txt"), cx)
})
.await
.unwrap();
@@ -665,7 +665,6 @@ impl RandomizedTest for ProjectCollaborationTest {
ensure_project_shared(&project, client, cx).await;
project
.update(cx, |p, cx| p.create_entry(project_path, is_dir, cx))
- .unwrap()
.await?;
}
@@ -364,8 +364,7 @@ async fn test_joining_channel_ancestor_member(
let active_call_b = cx_b.read(ActiveCall::global);
assert!(active_call_b
- .update(cx_b, |active_call, cx| active_call
- .join_channel(sub_id, None, cx))
+ .update(cx_b, |active_call, cx| active_call.join_channel(sub_id, cx))
.await
.is_ok());
}
@@ -395,9 +394,7 @@ async fn test_channel_room(
let active_call_b = cx_b.read(ActiveCall::global);
active_call_a
- .update(cx_a, |active_call, cx| {
- active_call.join_channel(zed_id, None, cx)
- })
+ .update(cx_a, |active_call, cx| active_call.join_channel(zed_id, cx))
.await
.unwrap();
@@ -445,9 +442,7 @@ async fn test_channel_room(
});
active_call_b
- .update(cx_b, |active_call, cx| {
- active_call.join_channel(zed_id, None, cx)
- })
+ .update(cx_b, |active_call, cx| active_call.join_channel(zed_id, cx))
.await
.unwrap();
@@ -564,16 +559,12 @@ async fn test_channel_room(
});
active_call_a
- .update(cx_a, |active_call, cx| {
- active_call.join_channel(zed_id, None, cx)
- })
+ .update(cx_a, |active_call, cx| active_call.join_channel(zed_id, cx))
.await
.unwrap();
active_call_b
- .update(cx_b, |active_call, cx| {
- active_call.join_channel(zed_id, None, cx)
- })
+ .update(cx_b, |active_call, cx| active_call.join_channel(zed_id, cx))
.await
.unwrap();
@@ -617,9 +608,7 @@ async fn test_channel_jumping(executor: BackgroundExecutor, cx_a: &mut TestAppCo
let active_call_a = cx_a.read(ActiveCall::global);
active_call_a
- .update(cx_a, |active_call, cx| {
- active_call.join_channel(zed_id, None, cx)
- })
+ .update(cx_a, |active_call, cx| active_call.join_channel(zed_id, cx))
.await
.unwrap();
@@ -638,7 +627,7 @@ async fn test_channel_jumping(executor: BackgroundExecutor, cx_a: &mut TestAppCo
active_call_a
.update(cx_a, |active_call, cx| {
- active_call.join_channel(rust_id, None, cx)
+ active_call.join_channel(rust_id, cx)
})
.await
.unwrap();
@@ -804,7 +793,7 @@ async fn test_call_from_channel(
let active_call_b = cx_b.read(ActiveCall::global);
active_call_a
- .update(cx_a, |call, cx| call.join_channel(channel_id, None, cx))
+ .update(cx_a, |call, cx| call.join_channel(channel_id, cx))
.await
.unwrap();
@@ -1297,7 +1286,7 @@ async fn test_guest_access(
// Non-members should not be allowed to join
assert!(active_call_b
- .update(cx_b, |call, cx| call.join_channel(channel_a, None, cx))
+ .update(cx_b, |call, cx| call.join_channel(channel_a, cx))
.await
.is_err());
@@ -1319,7 +1308,7 @@ async fn test_guest_access(
// Client B joins channel A as a guest
active_call_b
- .update(cx_b, |call, cx| call.join_channel(channel_a, None, cx))
+ .update(cx_b, |call, cx| call.join_channel(channel_a, cx))
.await
.unwrap();
@@ -1352,7 +1341,7 @@ async fn test_guest_access(
assert_channels_list_shape(client_b.channel_store(), cx_b, &[]);
active_call_b
- .update(cx_b, |call, cx| call.join_channel(channel_b, None, cx))
+ .update(cx_b, |call, cx| call.join_channel(channel_b, cx))
.await
.unwrap();
@@ -1383,7 +1372,7 @@ async fn test_invite_access(
// should not be allowed to join
assert!(active_call_b
- .update(cx_b, |call, cx| call.join_channel(channel_b_id, None, cx))
+ .update(cx_b, |call, cx| call.join_channel(channel_b_id, cx))
.await
.is_err());
@@ -1401,7 +1390,7 @@ async fn test_invite_access(
.unwrap();
active_call_b
- .update(cx_b, |call, cx| call.join_channel(channel_b_id, None, cx))
+ .update(cx_b, |call, cx| call.join_channel(channel_b_id, cx))
.await
.unwrap();
@@ -4,10 +4,12 @@
// use call::ActiveCall;
// use collab_ui::notifications::project_shared_notification::ProjectSharedNotification;
// use editor::{Editor, ExcerptRange, MultiBuffer};
-// use gpui::{BackgroundExecutor, TestAppContext, View};
+// use gpui::{point, BackgroundExecutor, TestAppContext, View, VisualTestContext, WindowContext};
// use live_kit_client::MacOSDisplay;
+// use project::project_settings::ProjectSettings;
// use rpc::proto::PeerId;
// use serde_json::json;
+// use settings::SettingsStore;
// use std::borrow::Cow;
// use workspace::{
// dock::{test::TestPanel, DockPosition},
@@ -24,7 +26,7 @@
// cx_c: &mut TestAppContext,
// cx_d: &mut TestAppContext,
// ) {
-// let mut server = TestServer::start(&executor).await;
+// let mut server = TestServer::start(executor.clone()).await;
// let client_a = server.create_client(cx_a, "user_a").await;
// let client_b = server.create_client(cx_b, "user_b").await;
// let client_c = server.create_client(cx_c, "user_c").await;
@@ -71,12 +73,22 @@
// .unwrap();
// let window_a = client_a.build_workspace(&project_a, cx_a);
-// let workspace_a = window_a.root(cx_a);
+// let workspace_a = window_a.root(cx_a).unwrap();
// let window_b = client_b.build_workspace(&project_b, cx_b);
-// let workspace_b = window_b.root(cx_b);
+// let workspace_b = window_b.root(cx_b).unwrap();
+
+// todo!("could be wrong")
+// let mut cx_a = VisualTestContext::from_window(*window_a, cx_a);
+// let cx_a = &mut cx_a;
+// let mut cx_b = VisualTestContext::from_window(*window_b, cx_b);
+// let cx_b = &mut cx_b;
+// let mut cx_c = VisualTestContext::from_window(*window_c, cx_c);
+// let cx_c = &mut cx_c;
+// let mut cx_d = VisualTestContext::from_window(*window_d, cx_d);
+// let cx_d = &mut cx_d;
// // Client A opens some editors.
-// let pane_a = workspace_a.read_with(cx_a, |workspace, _| workspace.active_pane().clone());
+// let pane_a = workspace_a.update(cx_a, |workspace, _| workspace.active_pane().clone());
// let editor_a1 = workspace_a
// .update(cx_a, |workspace, cx| {
// workspace.open_path((worktree_id, "1.txt"), None, true, cx)
@@ -132,8 +144,8 @@
// .await
// .unwrap();
-// cx_c.foreground().run_until_parked();
-// let editor_b2 = workspace_b.read_with(cx_b, |workspace, cx| {
+// cx_c.executor().run_until_parked();
+// let editor_b2 = workspace_b.update(cx_b, |workspace, cx| {
// workspace
// .active_item(cx)
// .unwrap()
@@ -145,19 +157,19 @@
// Some((worktree_id, "2.txt").into())
// );
// assert_eq!(
-// editor_b2.read_with(cx_b, |editor, cx| editor.selections.ranges(cx)),
+// editor_b2.update(cx_b, |editor, cx| editor.selections.ranges(cx)),
// vec![2..1]
// );
// assert_eq!(
-// editor_b1.read_with(cx_b, |editor, cx| editor.selections.ranges(cx)),
+// editor_b1.update(cx_b, |editor, cx| editor.selections.ranges(cx)),
// vec![3..2]
// );
-// cx_c.foreground().run_until_parked();
+// cx_c.executor().run_until_parked();
// let active_call_c = cx_c.read(ActiveCall::global);
// let project_c = client_c.build_remote_project(project_id, cx_c).await;
// let window_c = client_c.build_workspace(&project_c, cx_c);
-// let workspace_c = window_c.root(cx_c);
+// let workspace_c = window_c.root(cx_c).unwrap();
// active_call_c
// .update(cx_c, |call, cx| call.set_location(Some(&project_c), cx))
// .await
@@ -172,10 +184,13 @@
// .await
// .unwrap();
-// cx_d.foreground().run_until_parked();
+// cx_d.executor().run_until_parked();
// let active_call_d = cx_d.read(ActiveCall::global);
// let project_d = client_d.build_remote_project(project_id, cx_d).await;
-// let workspace_d = client_d.build_workspace(&project_d, cx_d).root(cx_d);
+// let workspace_d = client_d
+// .build_workspace(&project_d, cx_d)
+// .root(cx_d)
+// .unwrap();
// active_call_d
// .update(cx_d, |call, cx| call.set_location(Some(&project_d), cx))
// .await
@@ -183,7 +198,7 @@
// drop(project_d);
// // All clients see that clients B and C are following client A.
-// cx_c.foreground().run_until_parked();
+// cx_c.executor().run_until_parked();
// for (name, cx) in [("A", &cx_a), ("B", &cx_b), ("C", &cx_c), ("D", &cx_d)] {
// assert_eq!(
// followers_by_leader(project_id, cx),
@@ -198,7 +213,7 @@
// });
// // All clients see that clients B is following client A.
-// cx_c.foreground().run_until_parked();
+// cx_c.executor().run_until_parked();
// for (name, cx) in [("A", &cx_a), ("B", &cx_b), ("C", &cx_c), ("D", &cx_d)] {
// assert_eq!(
// followers_by_leader(project_id, cx),
@@ -216,7 +231,7 @@
// .unwrap();
// // All clients see that clients B and C are following client A.
-// cx_c.foreground().run_until_parked();
+// cx_c.executor().run_until_parked();
// for (name, cx) in [("A", &cx_a), ("B", &cx_b), ("C", &cx_c), ("D", &cx_d)] {
// assert_eq!(
// followers_by_leader(project_id, cx),
@@ -240,7 +255,7 @@
// .unwrap();
// // All clients see that D is following C
-// cx_d.foreground().run_until_parked();
+// cx_d.executor().run_until_parked();
// for (name, cx) in [("A", &cx_a), ("B", &cx_b), ("C", &cx_c), ("D", &cx_d)] {
// assert_eq!(
// followers_by_leader(project_id, cx),
@@ -257,7 +272,7 @@
// cx_c.drop_last(workspace_c);
// // Clients A and B see that client B is following A, and client C is not present in the followers.
-// cx_c.foreground().run_until_parked();
+// cx_c.executor().run_until_parked();
// for (name, cx) in [("A", &cx_a), ("B", &cx_b), ("C", &cx_c), ("D", &cx_d)] {
// assert_eq!(
// followers_by_leader(project_id, cx),
@@ -271,12 +286,15 @@
// workspace.activate_item(&editor_a1, cx)
// });
// executor.run_until_parked();
-// workspace_b.read_with(cx_b, |workspace, cx| {
-// assert_eq!(workspace.active_item(cx).unwrap().id(), editor_b1.id());
+// workspace_b.update(cx_b, |workspace, cx| {
+// assert_eq!(
+// workspace.active_item(cx).unwrap().item_id(),
+// editor_b1.item_id()
+// );
// });
// // When client A opens a multibuffer, client B does so as well.
-// let multibuffer_a = cx_a.add_model(|cx| {
+// let multibuffer_a = cx_a.build_model(|cx| {
// let buffer_a1 = project_a.update(cx, |project, cx| {
// project
// .get_open_buffer(&(worktree_id, "1.txt").into(), cx)
@@ -308,12 +326,12 @@
// });
// let multibuffer_editor_a = workspace_a.update(cx_a, |workspace, cx| {
// let editor =
-// cx.add_view(|cx| Editor::for_multibuffer(multibuffer_a, Some(project_a.clone()), cx));
+// cx.build_view(|cx| Editor::for_multibuffer(multibuffer_a, Some(project_a.clone()), cx));
// workspace.add_item(Box::new(editor.clone()), cx);
// editor
// });
// executor.run_until_parked();
-// let multibuffer_editor_b = workspace_b.read_with(cx_b, |workspace, cx| {
+// let multibuffer_editor_b = workspace_b.update(cx_b, |workspace, cx| {
// workspace
// .active_item(cx)
// .unwrap()
@@ -321,8 +339,8 @@
// .unwrap()
// });
// assert_eq!(
-// multibuffer_editor_a.read_with(cx_a, |editor, cx| editor.text(cx)),
-// multibuffer_editor_b.read_with(cx_b, |editor, cx| editor.text(cx)),
+// multibuffer_editor_a.update(cx_a, |editor, cx| editor.text(cx)),
+// multibuffer_editor_b.update(cx_b, |editor, cx| editor.text(cx)),
// );
// // When client A navigates back and forth, client B does so as well.
@@ -333,8 +351,11 @@
// .await
// .unwrap();
// executor.run_until_parked();
-// workspace_b.read_with(cx_b, |workspace, cx| {
-// assert_eq!(workspace.active_item(cx).unwrap().id(), editor_b1.id());
+// workspace_b.update(cx_b, |workspace, cx| {
+// assert_eq!(
+// workspace.active_item(cx).unwrap().item_id(),
+// editor_b1.item_id()
+// );
// });
// workspace_a
@@ -344,8 +365,11 @@
// .await
// .unwrap();
// executor.run_until_parked();
-// workspace_b.read_with(cx_b, |workspace, cx| {
-// assert_eq!(workspace.active_item(cx).unwrap().id(), editor_b2.id());
+// workspace_b.update(cx_b, |workspace, cx| {
+// assert_eq!(
+// workspace.active_item(cx).unwrap().item_id(),
+// editor_b2.item_id()
+// );
// });
// workspace_a
@@ -355,8 +379,11 @@
// .await
// .unwrap();
// executor.run_until_parked();
-// workspace_b.read_with(cx_b, |workspace, cx| {
-// assert_eq!(workspace.active_item(cx).unwrap().id(), editor_b1.id());
+// workspace_b.update(cx_b, |workspace, cx| {
+// assert_eq!(
+// workspace.active_item(cx).unwrap().item_id(),
+// editor_b1.item_id()
+// );
// });
// // Changes to client A's editor are reflected on client B.
@@ -364,20 +391,20 @@
// editor.change_selections(None, cx, |s| s.select_ranges([1..1, 2..2]));
// });
// executor.run_until_parked();
-// editor_b1.read_with(cx_b, |editor, cx| {
+// editor_b1.update(cx_b, |editor, cx| {
// assert_eq!(editor.selections.ranges(cx), &[1..1, 2..2]);
// });
// editor_a1.update(cx_a, |editor, cx| editor.set_text("TWO", cx));
// executor.run_until_parked();
-// editor_b1.read_with(cx_b, |editor, cx| assert_eq!(editor.text(cx), "TWO"));
+// editor_b1.update(cx_b, |editor, cx| assert_eq!(editor.text(cx), "TWO"));
// editor_a1.update(cx_a, |editor, cx| {
// editor.change_selections(None, cx, |s| s.select_ranges([3..3]));
-// editor.set_scroll_position(vec2f(0., 100.), cx);
+// editor.set_scroll_position(point(0., 100.), cx);
// });
// executor.run_until_parked();
-// editor_b1.read_with(cx_b, |editor, cx| {
+// editor_b1.update(cx_b, |editor, cx| {
// assert_eq!(editor.selections.ranges(cx), &[3..3]);
// });
@@ -390,11 +417,11 @@
// });
// executor.run_until_parked();
// assert_eq!(
-// workspace_b.read_with(cx_b, |workspace, cx| workspace
+// workspace_b.update(cx_b, |workspace, cx| workspace
// .active_item(cx)
// .unwrap()
-// .id()),
-// editor_b1.id()
+// .item_id()),
+// editor_b1.item_id()
// );
// // Client A starts following client B.
@@ -405,15 +432,15 @@
// .await
// .unwrap();
// assert_eq!(
-// workspace_a.read_with(cx_a, |workspace, _| workspace.leader_for_pane(&pane_a)),
+// workspace_a.update(cx_a, |workspace, _| workspace.leader_for_pane(&pane_a)),
// Some(peer_id_b)
// );
// assert_eq!(
-// workspace_a.read_with(cx_a, |workspace, cx| workspace
+// workspace_a.update(cx_a, |workspace, cx| workspace
// .active_item(cx)
// .unwrap()
-// .id()),
-// editor_a1.id()
+// .item_id()),
+// editor_a1.item_id()
// );
// // Client B activates an external window, which causes a new screen-sharing item to be added to the pane.
@@ -432,7 +459,7 @@
// .await
// .unwrap();
// executor.run_until_parked();
-// let shared_screen = workspace_a.read_with(cx_a, |workspace, cx| {
+// let shared_screen = workspace_a.update(cx_a, |workspace, cx| {
// workspace
// .active_item(cx)
// .expect("no active item")
@@ -446,8 +473,11 @@
// .await
// .unwrap();
// executor.run_until_parked();
-// workspace_a.read_with(cx_a, |workspace, cx| {
-// assert_eq!(workspace.active_item(cx).unwrap().id(), editor_a1.id())
+// workspace_a.update(cx_a, |workspace, cx| {
+// assert_eq!(
+// workspace.active_item(cx).unwrap().item_id(),
+// editor_a1.item_id()
+// )
// });
// // Client B activates a multibuffer that was created by following client A. Client A returns to that multibuffer.
@@ -455,26 +485,26 @@
// workspace.activate_item(&multibuffer_editor_b, cx)
// });
// executor.run_until_parked();
-// workspace_a.read_with(cx_a, |workspace, cx| {
+// workspace_a.update(cx_a, |workspace, cx| {
// assert_eq!(
-// workspace.active_item(cx).unwrap().id(),
-// multibuffer_editor_a.id()
+// workspace.active_item(cx).unwrap().item_id(),
+// multibuffer_editor_a.item_id()
// )
// });
// // Client B activates a panel, and the previously-opened screen-sharing item gets activated.
-// let panel = window_b.add_view(cx_b, |_| TestPanel::new(DockPosition::Left));
+// let panel = window_b.build_view(cx_b, |_| TestPanel::new(DockPosition::Left));
// workspace_b.update(cx_b, |workspace, cx| {
// workspace.add_panel(panel, cx);
// workspace.toggle_panel_focus::<TestPanel>(cx);
// });
// executor.run_until_parked();
// assert_eq!(
-// workspace_a.read_with(cx_a, |workspace, cx| workspace
+// workspace_a.update(cx_a, |workspace, cx| workspace
// .active_item(cx)
// .unwrap()
-// .id()),
-// shared_screen.id()
+// .item_id()),
+// shared_screen.item_id()
// );
// // Toggling the focus back to the pane causes client A to return to the multibuffer.
@@ -482,16 +512,16 @@
// workspace.toggle_panel_focus::<TestPanel>(cx);
// });
// executor.run_until_parked();
-// workspace_a.read_with(cx_a, |workspace, cx| {
+// workspace_a.update(cx_a, |workspace, cx| {
// assert_eq!(
-// workspace.active_item(cx).unwrap().id(),
-// multibuffer_editor_a.id()
+// workspace.active_item(cx).unwrap().item_id(),
+// multibuffer_editor_a.item_id()
// )
// });
// // Client B activates an item that doesn't implement following,
// // so the previously-opened screen-sharing item gets activated.
-// let unfollowable_item = window_b.add_view(cx_b, |_| TestItem::new());
+// let unfollowable_item = window_b.build_view(cx_b, |_| TestItem::new());
// workspace_b.update(cx_b, |workspace, cx| {
// workspace.active_pane().update(cx, |pane, cx| {
// pane.add_item(Box::new(unfollowable_item), true, true, None, cx)
@@ -499,18 +529,18 @@
// });
// executor.run_until_parked();
// assert_eq!(
-// workspace_a.read_with(cx_a, |workspace, cx| workspace
+// workspace_a.update(cx_a, |workspace, cx| workspace
// .active_item(cx)
// .unwrap()
-// .id()),
-// shared_screen.id()
+// .item_id()),
+// shared_screen.item_id()
// );
// // Following interrupts when client B disconnects.
// client_b.disconnect(&cx_b.to_async());
// executor.advance_clock(RECONNECT_TIMEOUT);
// assert_eq!(
-// workspace_a.read_with(cx_a, |workspace, _| workspace.leader_for_pane(&pane_a)),
+// workspace_a.update(cx_a, |workspace, _| workspace.leader_for_pane(&pane_a)),
// None
// );
// }
@@ -521,7 +551,7 @@
// cx_a: &mut TestAppContext,
// cx_b: &mut TestAppContext,
// ) {
-// let mut server = TestServer::start(&executor).await;
+// let mut server = TestServer::start(executor.clone()).await;
// let client_a = server.create_client(cx_a, "user_a").await;
// let client_b = server.create_client(cx_b, "user_b").await;
// server
@@ -560,13 +590,19 @@
// .await
// .unwrap();
-// let workspace_a = client_a.build_workspace(&project_a, cx_a).root(cx_a);
-// let pane_a = workspace_a.read_with(cx_a, |workspace, _| workspace.active_pane().clone());
+// let workspace_a = client_a
+// .build_workspace(&project_a, cx_a)
+// .root(cx_a)
+// .unwrap();
+// let pane_a = workspace_a.update(cx_a, |workspace, _| workspace.active_pane().clone());
-// let workspace_b = client_b.build_workspace(&project_b, cx_b).root(cx_b);
-// let pane_b = workspace_b.read_with(cx_b, |workspace, _| workspace.active_pane().clone());
+// let workspace_b = client_b
+// .build_workspace(&project_b, cx_b)
+// .root(cx_b)
+// .unwrap();
+// let pane_b = workspace_b.update(cx_b, |workspace, _| workspace.active_pane().clone());
-// let client_b_id = project_a.read_with(cx_a, |project, _| {
+// let client_b_id = project_a.update(cx_a, |project, _| {
// project.collaborators().values().next().unwrap().peer_id
// });
@@ -584,7 +620,7 @@
// .await
// .unwrap();
-// let pane_paths = |pane: &ViewHandle<workspace::Pane>, cx: &mut TestAppContext| {
+// let pane_paths = |pane: &View<workspace::Pane>, cx: &mut TestAppContext| {
// pane.update(cx, |pane, cx| {
// pane.items()
// .map(|item| {
@@ -642,7 +678,7 @@
// cx_a: &mut TestAppContext,
// cx_b: &mut TestAppContext,
// ) {
-// let mut server = TestServer::start(&executor).await;
+// let mut server = TestServer::start(executor.clone()).await;
// let client_a = server.create_client(cx_a, "user_a").await;
// let client_b = server.create_client(cx_b, "user_b").await;
// server
@@ -685,7 +721,10 @@
// .unwrap();
// // Client A opens a file.
-// let workspace_a = client_a.build_workspace(&project_a, cx_a).root(cx_a);
+// let workspace_a = client_a
+// .build_workspace(&project_a, cx_a)
+// .root(cx_a)
+// .unwrap();
// workspace_a
// .update(cx_a, |workspace, cx| {
// workspace.open_path((worktree_id, "1.txt"), None, true, cx)
@@ -696,7 +735,10 @@
// .unwrap();
// // Client B opens a different file.
-// let workspace_b = client_b.build_workspace(&project_b, cx_b).root(cx_b);
+// let workspace_b = client_b
+// .build_workspace(&project_b, cx_b)
+// .root(cx_b)
+// .unwrap();
// workspace_b
// .update(cx_b, |workspace, cx| {
// workspace.open_path((worktree_id, "2.txt"), None, true, cx)
@@ -1167,7 +1209,7 @@
// cx_b: &mut TestAppContext,
// ) {
// // 2 clients connect to a server.
-// let mut server = TestServer::start(&executor).await;
+// let mut server = TestServer::start(executor.clone()).await;
// let client_a = server.create_client(cx_a, "user_a").await;
// let client_b = server.create_client(cx_b, "user_b").await;
// server
@@ -1207,8 +1249,17 @@
// .await
// .unwrap();
+// todo!("could be wrong")
+// let mut cx_a = VisualTestContext::from_window(*window_a, cx_a);
+// let cx_a = &mut cx_a;
+// let mut cx_b = VisualTestContext::from_window(*window_b, cx_b);
+// let cx_b = &mut cx_b;
+
// // Client A opens some editors.
-// let workspace_a = client_a.build_workspace(&project_a, cx_a).root(cx_a);
+// let workspace_a = client_a
+// .build_workspace(&project_a, cx_a)
+// .root(cx_a)
+// .unwrap();
// let _editor_a1 = workspace_a
// .update(cx_a, |workspace, cx| {
// workspace.open_path((worktree_id, "1.txt"), None, true, cx)
@@ -1219,9 +1270,12 @@
// .unwrap();
// // Client B starts following client A.
-// let workspace_b = client_b.build_workspace(&project_b, cx_b).root(cx_b);
-// let pane_b = workspace_b.read_with(cx_b, |workspace, _| workspace.active_pane().clone());
-// let leader_id = project_b.read_with(cx_b, |project, _| {
+// let workspace_b = client_b
+// .build_workspace(&project_b, cx_b)
+// .root(cx_b)
+// .unwrap();
+// let pane_b = workspace_b.update(cx_b, |workspace, _| workspace.active_pane().clone());
+// let leader_id = project_b.update(cx_b, |project, _| {
// project.collaborators().values().next().unwrap().peer_id
// });
// workspace_b
@@ -1231,10 +1285,10 @@
// .await
// .unwrap();
// assert_eq!(
-// workspace_b.read_with(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
+// workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
// Some(leader_id)
// );
-// let editor_b2 = workspace_b.read_with(cx_b, |workspace, cx| {
+// let editor_b2 = workspace_b.update(cx_b, |workspace, cx| {
// workspace
// .active_item(cx)
// .unwrap()
@@ -1245,7 +1299,7 @@
// // When client B moves, it automatically stops following client A.
// editor_b2.update(cx_b, |editor, cx| editor.move_right(&editor::MoveRight, cx));
// assert_eq!(
-// workspace_b.read_with(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
+// workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
// None
// );
@@ -1256,14 +1310,14 @@
// .await
// .unwrap();
// assert_eq!(
-// workspace_b.read_with(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
+// workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
// Some(leader_id)
// );
// // When client B edits, it automatically stops following client A.
// editor_b2.update(cx_b, |editor, cx| editor.insert("X", cx));
// assert_eq!(
-// workspace_b.read_with(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
+// workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
// None
// );
@@ -1274,16 +1328,16 @@
// .await
// .unwrap();
// assert_eq!(
-// workspace_b.read_with(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
+// workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
// Some(leader_id)
// );
// // When client B scrolls, it automatically stops following client A.
// editor_b2.update(cx_b, |editor, cx| {
-// editor.set_scroll_position(vec2f(0., 3.), cx)
+// editor.set_scroll_position(point(0., 3.), cx)
// });
// assert_eq!(
-// workspace_b.read_with(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
+// workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
// None
// );
@@ -1294,7 +1348,7 @@
// .await
// .unwrap();
// assert_eq!(
-// workspace_b.read_with(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
+// workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
// Some(leader_id)
// );
@@ -1303,13 +1357,13 @@
// workspace.split_and_clone(pane_b.clone(), SplitDirection::Right, cx)
// });
// assert_eq!(
-// workspace_b.read_with(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
+// workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
// Some(leader_id)
// );
// workspace_b.update(cx_b, |workspace, cx| workspace.activate_next_pane(cx));
// assert_eq!(
-// workspace_b.read_with(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
+// workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
// Some(leader_id)
// );
@@ -1321,7 +1375,7 @@
// .await
// .unwrap();
// assert_eq!(
-// workspace_b.read_with(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
+// workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
// None
// );
// }
@@ -1332,7 +1386,7 @@
// cx_a: &mut TestAppContext,
// cx_b: &mut TestAppContext,
// ) {
-// let mut server = TestServer::start(&executor).await;
+// let mut server = TestServer::start(executor.clone()).await;
// let client_a = server.create_client(cx_a, "user_a").await;
// let client_b = server.create_client(cx_b, "user_b").await;
// server
@@ -1345,20 +1399,26 @@
// client_a.fs().insert_tree("/a", json!({})).await;
// let (project_a, _) = client_a.build_local_project("/a", cx_a).await;
-// let workspace_a = client_a.build_workspace(&project_a, cx_a).root(cx_a);
+// let workspace_a = client_a
+// .build_workspace(&project_a, cx_a)
+// .root(cx_a)
+// .unwrap();
// let project_id = active_call_a
// .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
// .await
// .unwrap();
// let project_b = client_b.build_remote_project(project_id, cx_b).await;
-// let workspace_b = client_b.build_workspace(&project_b, cx_b).root(cx_b);
+// let workspace_b = client_b
+// .build_workspace(&project_b, cx_b)
+// .root(cx_b)
+// .unwrap();
// executor.run_until_parked();
-// let client_a_id = project_b.read_with(cx_b, |project, _| {
+// let client_a_id = project_b.update(cx_b, |project, _| {
// project.collaborators().values().next().unwrap().peer_id
// });
-// let client_b_id = project_a.read_with(cx_a, |project, _| {
+// let client_b_id = project_a.update(cx_a, |project, _| {
// project.collaborators().values().next().unwrap().peer_id
// });
@@ -1370,13 +1430,13 @@
// });
// futures::try_join!(a_follow_b, b_follow_a).unwrap();
-// workspace_a.read_with(cx_a, |workspace, _| {
+// workspace_a.update(cx_a, |workspace, _| {
// assert_eq!(
// workspace.leader_for_pane(workspace.active_pane()),
// Some(client_b_id)
// );
// });
-// workspace_b.read_with(cx_b, |workspace, _| {
+// workspace_b.update(cx_b, |workspace, _| {
// assert_eq!(
// workspace.leader_for_pane(workspace.active_pane()),
// Some(client_a_id)
@@ -1398,7 +1458,7 @@
// // b opens a different file in project 2, a follows b
// // b opens a different file in project 1, a cannot follow b
// // b shares the project, a joins the project and follows b
-// let mut server = TestServer::start(&executor).await;
+// let mut server = TestServer::start(executor.clone()).await;
// let client_a = server.create_client(cx_a, "user_a").await;
// let client_b = server.create_client(cx_b, "user_b").await;
// cx_a.update(editor::init);
@@ -1435,8 +1495,14 @@
// let (project_a, worktree_id_a) = client_a.build_local_project("/a", cx_a).await;
// let (project_b, worktree_id_b) = client_b.build_local_project("/b", cx_b).await;
-// let workspace_a = client_a.build_workspace(&project_a, cx_a).root(cx_a);
-// let workspace_b = client_b.build_workspace(&project_b, cx_b).root(cx_b);
+// let workspace_a = client_a
+// .build_workspace(&project_a, cx_a)
+// .root(cx_a)
+// .unwrap();
+// let workspace_b = client_b
+// .build_workspace(&project_b, cx_b)
+// .root(cx_b)
+// .unwrap();
// cx_a.update(|cx| collab_ui::init(&client_a.app_state, cx));
// cx_b.update(|cx| collab_ui::init(&client_b.app_state, cx));
@@ -1455,6 +1521,12 @@
// .await
// .unwrap();
+// todo!("could be wrong")
+// let mut cx_a = VisualTestContext::from_window(*window_a, cx_a);
+// let cx_a = &mut cx_a;
+// let mut cx_b = VisualTestContext::from_window(*window_b, cx_b);
+// let cx_b = &mut cx_b;
+
// workspace_a
// .update(cx_a, |workspace, cx| {
// workspace.open_path((worktree_id_a, "w.rs"), None, true, cx)
@@ -1476,11 +1548,12 @@
// let workspace_b_project_a = cx_b
// .windows()
// .iter()
-// .max_by_key(|window| window.id())
+// .max_by_key(|window| window.item_id())
// .unwrap()
// .downcast::<Workspace>()
// .unwrap()
-// .root(cx_b);
+// .root(cx_b)
+// .unwrap();
// // assert that b is following a in project a in w.rs
// workspace_b_project_a.update(cx_b, |workspace, cx| {
@@ -1534,7 +1607,7 @@
// workspace.leader_for_pane(workspace.active_pane())
// );
// let item = workspace.active_pane().read(cx).active_item().unwrap();
-// assert_eq!(item.tab_description(0, cx).unwrap(), Cow::Borrowed("x.rs"));
+// assert_eq!(item.tab_description(0, cx).unwrap(), "x.rs".into());
// });
// // b moves to y.rs in b's project, a is still following but can't yet see
@@ -1578,11 +1651,12 @@
// let workspace_a_project_b = cx_a
// .windows()
// .iter()
-// .max_by_key(|window| window.id())
+// .max_by_key(|window| window.item_id())
// .unwrap()
// .downcast::<Workspace>()
// .unwrap()
-// .root(cx_a);
+// .root(cx_a)
+// .unwrap();
// workspace_a_project_b.update(cx_a, |workspace, cx| {
// assert_eq!(workspace.project().read(cx).remote_id(), Some(project_b_id));
@@ -1596,12 +1670,151 @@
// });
// }
+// #[gpui::test]
+// async fn test_following_into_excluded_file(
+// executor: BackgroundExecutor,
+// mut cx_a: &mut TestAppContext,
+// mut cx_b: &mut TestAppContext,
+// ) {
+// let mut server = TestServer::start(executor.clone()).await;
+// let client_a = server.create_client(cx_a, "user_a").await;
+// let client_b = server.create_client(cx_b, "user_b").await;
+// for cx in [&mut cx_a, &mut cx_b] {
+// cx.update(|cx| {
+// cx.update_global::<SettingsStore, _>(|store, cx| {
+// store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
+// project_settings.file_scan_exclusions = Some(vec!["**/.git".to_string()]);
+// });
+// });
+// });
+// }
+// server
+// .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
+// .await;
+// let active_call_a = cx_a.read(ActiveCall::global);
+// let active_call_b = cx_b.read(ActiveCall::global);
+
+// cx_a.update(editor::init);
+// cx_b.update(editor::init);
+
+// client_a
+// .fs()
+// .insert_tree(
+// "/a",
+// json!({
+// ".git": {
+// "COMMIT_EDITMSG": "write your commit message here",
+// },
+// "1.txt": "one\none\none",
+// "2.txt": "two\ntwo\ntwo",
+// "3.txt": "three\nthree\nthree",
+// }),
+// )
+// .await;
+// let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
+// active_call_a
+// .update(cx_a, |call, cx| call.set_location(Some(&project_a), cx))
+// .await
+// .unwrap();
+
+// let project_id = active_call_a
+// .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
+// .await
+// .unwrap();
+// let project_b = client_b.build_remote_project(project_id, cx_b).await;
+// active_call_b
+// .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
+// .await
+// .unwrap();
+
+// let window_a = client_a.build_workspace(&project_a, cx_a);
+// let workspace_a = window_a.root(cx_a).unwrap();
+// let peer_id_a = client_a.peer_id().unwrap();
+// let window_b = client_b.build_workspace(&project_b, cx_b);
+// let workspace_b = window_b.root(cx_b).unwrap();
+
+// todo!("could be wrong")
+// let mut cx_a = VisualTestContext::from_window(*window_a, cx_a);
+// let cx_a = &mut cx_a;
+// let mut cx_b = VisualTestContext::from_window(*window_b, cx_b);
+// let cx_b = &mut cx_b;
+
+// // Client A opens editors for a regular file and an excluded file.
+// let editor_for_regular = workspace_a
+// .update(cx_a, |workspace, cx| {
+// workspace.open_path((worktree_id, "1.txt"), None, true, cx)
+// })
+// .await
+// .unwrap()
+// .downcast::<Editor>()
+// .unwrap();
+// let editor_for_excluded_a = workspace_a
+// .update(cx_a, |workspace, cx| {
+// workspace.open_path((worktree_id, ".git/COMMIT_EDITMSG"), None, true, cx)
+// })
+// .await
+// .unwrap()
+// .downcast::<Editor>()
+// .unwrap();
+
+// // Client A updates their selections in those editors
+// editor_for_regular.update(cx_a, |editor, cx| {
+// editor.handle_input("a", cx);
+// editor.handle_input("b", cx);
+// editor.handle_input("c", cx);
+// editor.select_left(&Default::default(), cx);
+// assert_eq!(editor.selections.ranges(cx), vec![3..2]);
+// });
+// editor_for_excluded_a.update(cx_a, |editor, cx| {
+// editor.select_all(&Default::default(), cx);
+// editor.handle_input("new commit message", cx);
+// editor.select_left(&Default::default(), cx);
+// assert_eq!(editor.selections.ranges(cx), vec![18..17]);
+// });
+
+// // When client B starts following client A, currently visible file is replicated
+// workspace_b
+// .update(cx_b, |workspace, cx| {
+// workspace.follow(peer_id_a, cx).unwrap()
+// })
+// .await
+// .unwrap();
+
+// let editor_for_excluded_b = workspace_b.update(cx_b, |workspace, cx| {
+// workspace
+// .active_item(cx)
+// .unwrap()
+// .downcast::<Editor>()
+// .unwrap()
+// });
+// assert_eq!(
+// cx_b.read(|cx| editor_for_excluded_b.project_path(cx)),
+// Some((worktree_id, ".git/COMMIT_EDITMSG").into())
+// );
+// assert_eq!(
+// editor_for_excluded_b.update(cx_b, |editor, cx| editor.selections.ranges(cx)),
+// vec![18..17]
+// );
+
+// // Changes from B to the excluded file are replicated in A's editor
+// editor_for_excluded_b.update(cx_b, |editor, cx| {
+// editor.handle_input("\nCo-Authored-By: B <b@b.b>", cx);
+// });
+// executor.run_until_parked();
+// editor_for_excluded_a.update(cx_a, |editor, cx| {
+// assert_eq!(
+// editor.text(cx),
+// "new commit messag\nCo-Authored-By: B <b@b.b>"
+// );
+// });
+// }
+
// fn visible_push_notifications(
// cx: &mut TestAppContext,
-// ) -> Vec<gpui::ViewHandle<ProjectSharedNotification>> {
+// ) -> Vec<gpui::View<ProjectSharedNotification>> {
// let mut ret = Vec::new();
// for window in cx.windows() {
-// window.read_with(cx, |window| {
+// window.update(cx, |window| {
// if let Some(handle) = window
// .root_view()
// .clone()
@@ -1645,8 +1858,8 @@
// })
// }
-// fn pane_summaries(workspace: &ViewHandle<Workspace>, cx: &mut TestAppContext) -> Vec<PaneSummary> {
-// workspace.read_with(cx, |workspace, cx| {
+// fn pane_summaries(workspace: &View<Workspace>, cx: &mut WindowContext<'_>) -> Vec<PaneSummary> {
+// workspace.update(cx, |workspace, cx| {
// let active_pane = workspace.active_pane();
// workspace
// .panes()
@@ -510,10 +510,9 @@ async fn test_joining_channels_and_calling_multiple_users_simultaneously(
// Simultaneously join channel 1 and then channel 2
active_call_a
- .update(cx_a, |call, cx| call.join_channel(channel_1, None, cx))
+ .update(cx_a, |call, cx| call.join_channel(channel_1, cx))
.detach();
- let join_channel_2 =
- active_call_a.update(cx_a, |call, cx| call.join_channel(channel_2, None, cx));
+ let join_channel_2 = active_call_a.update(cx_a, |call, cx| call.join_channel(channel_2, cx));
join_channel_2.await.unwrap();
@@ -539,8 +538,7 @@ async fn test_joining_channels_and_calling_multiple_users_simultaneously(
call.invite(client_c.user_id().unwrap(), None, cx)
});
- let join_channel =
- active_call_a.update(cx_a, |call, cx| call.join_channel(channel_1, None, cx));
+ let join_channel = active_call_a.update(cx_a, |call, cx| call.join_channel(channel_1, cx));
b_invite.await.unwrap();
c_invite.await.unwrap();
@@ -569,8 +567,7 @@ async fn test_joining_channels_and_calling_multiple_users_simultaneously(
.unwrap();
// Simultaneously join channel 1 and call user B and user C from client A.
- let join_channel =
- active_call_a.update(cx_a, |call, cx| call.join_channel(channel_1, None, cx));
+ let join_channel = active_call_a.update(cx_a, |call, cx| call.join_channel(channel_1, cx));
let b_invite = active_call_a.update(cx_a, |call, cx| {
call.invite(client_b.user_id().unwrap(), None, cx)
@@ -2784,11 +2781,10 @@ async fn test_fs_operations(
let entry = project_b
.update(cx_b, |project, cx| {
- project
- .create_entry((worktree_id, "c.txt"), false, cx)
- .unwrap()
+ project.create_entry((worktree_id, "c.txt"), false, cx)
})
.await
+ .unwrap()
.unwrap();
worktree_a.read_with(cx_a, |worktree, _| {
@@ -2815,8 +2811,8 @@ async fn test_fs_operations(
.update(cx_b, |project, cx| {
project.rename_entry(entry.id, Path::new("d.txt"), cx)
})
- .unwrap()
.await
+ .unwrap()
.unwrap();
worktree_a.read_with(cx_a, |worktree, _| {
@@ -2841,11 +2837,10 @@ async fn test_fs_operations(
let dir_entry = project_b
.update(cx_b, |project, cx| {
- project
- .create_entry((worktree_id, "DIR"), true, cx)
- .unwrap()
+ project.create_entry((worktree_id, "DIR"), true, cx)
})
.await
+ .unwrap()
.unwrap();
worktree_a.read_with(cx_a, |worktree, _| {
@@ -2870,27 +2865,24 @@ async fn test_fs_operations(
project_b
.update(cx_b, |project, cx| {
- project
- .create_entry((worktree_id, "DIR/e.txt"), false, cx)
- .unwrap()
+ project.create_entry((worktree_id, "DIR/e.txt"), false, cx)
})
.await
+ .unwrap()
.unwrap();
project_b
.update(cx_b, |project, cx| {
- project
- .create_entry((worktree_id, "DIR/SUBDIR"), true, cx)
- .unwrap()
+ project.create_entry((worktree_id, "DIR/SUBDIR"), true, cx)
})
.await
+ .unwrap()
.unwrap();
project_b
.update(cx_b, |project, cx| {
- project
- .create_entry((worktree_id, "DIR/SUBDIR/f.txt"), false, cx)
- .unwrap()
+ project.create_entry((worktree_id, "DIR/SUBDIR/f.txt"), false, cx)
})
.await
+ .unwrap()
.unwrap();
worktree_a.read_with(cx_a, |worktree, _| {
@@ -2931,11 +2923,10 @@ async fn test_fs_operations(
project_b
.update(cx_b, |project, cx| {
- project
- .copy_entry(entry.id, Path::new("f.txt"), cx)
- .unwrap()
+ project.copy_entry(entry.id, Path::new("f.txt"), cx)
})
.await
+ .unwrap()
.unwrap();
worktree_a.read_with(cx_a, |worktree, _| {
@@ -665,7 +665,6 @@ impl RandomizedTest for ProjectCollaborationTest {
ensure_project_shared(&project, client, cx).await;
project
.update(cx, |p, cx| p.create_entry(project_path, is_dir, cx))
- .unwrap()
.await?;
}
@@ -221,7 +221,6 @@ impl TestServer {
fs: fs.clone(),
build_window_options: |_, _, _| Default::default(),
node_runtime: FakeNodeRuntime::new(),
- call_factory: |_| Box::new(workspace::TestCallHandler),
});
cx.update(|cx| {
@@ -18,7 +18,7 @@ mod contact_finder;
// };
use contact_finder::ContactFinder;
use menu::{Cancel, Confirm, SelectNext, SelectPrev};
-use rpc::proto;
+use rpc::proto::{self, PeerId};
use theme::{ActiveTheme, ThemeSettings};
// use context_menu::{ContextMenu, ContextMenuItem};
// use db::kvp::KEY_VALUE_STORE;
@@ -169,11 +169,12 @@ use editor::Editor;
use feature_flags::{ChannelsAlpha, FeatureFlagAppExt, FeatureFlagViewExt};
use fuzzy::{match_strings, StringMatchCandidate};
use gpui::{
- actions, div, img, overlay, prelude::*, px, rems, serde_json, Action, AppContext,
- AsyncWindowContext, Bounds, ClipboardItem, DismissEvent, Div, EventEmitter, FocusHandle,
- Focusable, FocusableView, InteractiveElement, IntoElement, Model, MouseDownEvent,
- ParentElement, Pixels, Point, PromptLevel, Render, RenderOnce, ScrollHandle, SharedString,
- Stateful, Styled, Subscription, Task, View, ViewContext, VisualContext, WeakView,
+ actions, canvas, div, img, overlay, point, prelude::*, px, rems, serde_json, Action,
+ AppContext, AsyncWindowContext, Bounds, ClipboardItem, DismissEvent, Div, EventEmitter,
+ FocusHandle, Focusable, FocusableView, Hsla, InteractiveElement, IntoElement, Length, Model,
+ MouseDownEvent, ParentElement, Pixels, Point, PromptLevel, Quad, Render, RenderOnce,
+ ScrollHandle, SharedString, Size, Stateful, Styled, Subscription, Task, View, ViewContext,
+ VisualContext, WeakView,
};
use project::{Fs, Project};
use serde_derive::{Deserialize, Serialize};
@@ -347,21 +348,21 @@ enum Section {
#[derive(Clone, Debug)]
enum ListEntry {
Header(Section),
- // CallParticipant {
- // user: Arc<User>,
- // peer_id: Option<PeerId>,
- // is_pending: bool,
- // },
- // ParticipantProject {
- // project_id: u64,
- // worktree_root_names: Vec<String>,
- // host_user_id: u64,
- // is_last: bool,
- // },
- // ParticipantScreen {
- // peer_id: Option<PeerId>,
- // is_last: bool,
- // },
+ CallParticipant {
+ user: Arc<User>,
+ peer_id: Option<PeerId>,
+ is_pending: bool,
+ },
+ ParticipantProject {
+ project_id: u64,
+ worktree_root_names: Vec<String>,
+ host_user_id: u64,
+ is_last: bool,
+ },
+ ParticipantScreen {
+ peer_id: Option<PeerId>,
+ is_last: bool,
+ },
IncomingRequest(Arc<User>),
OutgoingRequest(Arc<User>),
// ChannelInvite(Arc<Channel>),
@@ -370,12 +371,12 @@ enum ListEntry {
depth: usize,
has_children: bool,
},
- // ChannelNotes {
- // channel_id: ChannelId,
- // },
- // ChannelChat {
- // channel_id: ChannelId,
- // },
+ ChannelNotes {
+ channel_id: ChannelId,
+ },
+ ChannelChat {
+ channel_id: ChannelId,
+ },
ChannelEditor {
depth: usize,
},
@@ -708,136 +709,136 @@ impl CollabPanel {
let prev_selected_entry = self.selection.and_then(|ix| self.entries.get(ix).cloned());
let old_entries = mem::take(&mut self.entries);
- let scroll_to_top = false;
-
- // if let Some(room) = ActiveCall::global(cx).read(cx).room() {
- // self.entries.push(ListEntry::Header(Section::ActiveCall));
- // if !old_entries
- // .iter()
- // .any(|entry| matches!(entry, ListEntry::Header(Section::ActiveCall)))
- // {
- // scroll_to_top = true;
- // }
+ let mut scroll_to_top = false;
- // if !self.collapsed_sections.contains(&Section::ActiveCall) {
- // let room = room.read(cx);
+ if let Some(room) = ActiveCall::global(cx).read(cx).room() {
+ self.entries.push(ListEntry::Header(Section::ActiveCall));
+ if !old_entries
+ .iter()
+ .any(|entry| matches!(entry, ListEntry::Header(Section::ActiveCall)))
+ {
+ scroll_to_top = true;
+ }
- // if let Some(channel_id) = room.channel_id() {
- // self.entries.push(ListEntry::ChannelNotes { channel_id });
- // self.entries.push(ListEntry::ChannelChat { channel_id })
- // }
+ if !self.collapsed_sections.contains(&Section::ActiveCall) {
+ let room = room.read(cx);
- // // Populate the active user.
- // if let Some(user) = user_store.current_user() {
- // self.match_candidates.clear();
- // self.match_candidates.push(StringMatchCandidate {
- // id: 0,
- // string: user.github_login.clone(),
- // char_bag: user.github_login.chars().collect(),
- // });
- // let matches = executor.block(match_strings(
- // &self.match_candidates,
- // &query,
- // true,
- // usize::MAX,
- // &Default::default(),
- // executor.clone(),
- // ));
- // if !matches.is_empty() {
- // let user_id = user.id;
- // self.entries.push(ListEntry::CallParticipant {
- // user,
- // peer_id: None,
- // is_pending: false,
- // });
- // let mut projects = room.local_participant().projects.iter().peekable();
- // while let Some(project) = projects.next() {
- // self.entries.push(ListEntry::ParticipantProject {
- // project_id: project.id,
- // worktree_root_names: project.worktree_root_names.clone(),
- // host_user_id: user_id,
- // is_last: projects.peek().is_none() && !room.is_screen_sharing(),
- // });
- // }
- // if room.is_screen_sharing() {
- // self.entries.push(ListEntry::ParticipantScreen {
- // peer_id: None,
- // is_last: true,
- // });
- // }
- // }
- // }
+ if let Some(channel_id) = room.channel_id() {
+ self.entries.push(ListEntry::ChannelNotes { channel_id });
+ self.entries.push(ListEntry::ChannelChat { channel_id })
+ }
- // // Populate remote participants.
- // self.match_candidates.clear();
- // self.match_candidates
- // .extend(room.remote_participants().iter().map(|(_, participant)| {
- // StringMatchCandidate {
- // id: participant.user.id as usize,
- // string: participant.user.github_login.clone(),
- // char_bag: participant.user.github_login.chars().collect(),
- // }
- // }));
- // let matches = executor.block(match_strings(
- // &self.match_candidates,
- // &query,
- // true,
- // usize::MAX,
- // &Default::default(),
- // executor.clone(),
- // ));
- // for mat in matches {
- // let user_id = mat.candidate_id as u64;
- // let participant = &room.remote_participants()[&user_id];
- // self.entries.push(ListEntry::CallParticipant {
- // user: participant.user.clone(),
- // peer_id: Some(participant.peer_id),
- // is_pending: false,
- // });
- // let mut projects = participant.projects.iter().peekable();
- // while let Some(project) = projects.next() {
- // self.entries.push(ListEntry::ParticipantProject {
- // project_id: project.id,
- // worktree_root_names: project.worktree_root_names.clone(),
- // host_user_id: participant.user.id,
- // is_last: projects.peek().is_none()
- // && participant.video_tracks.is_empty(),
- // });
- // }
- // if !participant.video_tracks.is_empty() {
- // self.entries.push(ListEntry::ParticipantScreen {
- // peer_id: Some(participant.peer_id),
- // is_last: true,
- // });
- // }
- // }
+ // Populate the active user.
+ if let Some(user) = user_store.current_user() {
+ self.match_candidates.clear();
+ self.match_candidates.push(StringMatchCandidate {
+ id: 0,
+ string: user.github_login.clone(),
+ char_bag: user.github_login.chars().collect(),
+ });
+ let matches = executor.block(match_strings(
+ &self.match_candidates,
+ &query,
+ true,
+ usize::MAX,
+ &Default::default(),
+ executor.clone(),
+ ));
+ if !matches.is_empty() {
+ let user_id = user.id;
+ self.entries.push(ListEntry::CallParticipant {
+ user,
+ peer_id: None,
+ is_pending: false,
+ });
+ let mut projects = room.local_participant().projects.iter().peekable();
+ while let Some(project) = projects.next() {
+ self.entries.push(ListEntry::ParticipantProject {
+ project_id: project.id,
+ worktree_root_names: project.worktree_root_names.clone(),
+ host_user_id: user_id,
+ is_last: projects.peek().is_none() && !room.is_screen_sharing(),
+ });
+ }
+ if room.is_screen_sharing() {
+ self.entries.push(ListEntry::ParticipantScreen {
+ peer_id: None,
+ is_last: true,
+ });
+ }
+ }
+ }
- // // Populate pending participants.
- // self.match_candidates.clear();
- // self.match_candidates
- // .extend(room.pending_participants().iter().enumerate().map(
- // |(id, participant)| StringMatchCandidate {
- // id,
- // string: participant.github_login.clone(),
- // char_bag: participant.github_login.chars().collect(),
- // },
- // ));
- // let matches = executor.block(match_strings(
- // &self.match_candidates,
- // &query,
- // true,
- // usize::MAX,
- // &Default::default(),
- // executor.clone(),
- // ));
- // self.entries
- // .extend(matches.iter().map(|mat| ListEntry::CallParticipant {
- // user: room.pending_participants()[mat.candidate_id].clone(),
- // peer_id: None,
- // is_pending: true,
- // }));
- // }
- // }
+ // Populate remote participants.
+ self.match_candidates.clear();
+ self.match_candidates
+ .extend(room.remote_participants().iter().map(|(_, participant)| {
+ StringMatchCandidate {
+ id: participant.user.id as usize,
+ string: participant.user.github_login.clone(),
+ char_bag: participant.user.github_login.chars().collect(),
+ }
+ }));
+ let matches = executor.block(match_strings(
+ &self.match_candidates,
+ &query,
+ true,
+ usize::MAX,
+ &Default::default(),
+ executor.clone(),
+ ));
+ for mat in matches {
+ let user_id = mat.candidate_id as u64;
+ let participant = &room.remote_participants()[&user_id];
+ self.entries.push(ListEntry::CallParticipant {
+ user: participant.user.clone(),
+ peer_id: Some(participant.peer_id),
+ is_pending: false,
+ });
+ let mut projects = participant.projects.iter().peekable();
+ while let Some(project) = projects.next() {
+ self.entries.push(ListEntry::ParticipantProject {
+ project_id: project.id,
+ worktree_root_names: project.worktree_root_names.clone(),
+ host_user_id: participant.user.id,
+ is_last: projects.peek().is_none()
+ && participant.video_tracks.is_empty(),
+ });
+ }
+ if !participant.video_tracks.is_empty() {
+ self.entries.push(ListEntry::ParticipantScreen {
+ peer_id: Some(participant.peer_id),
+ is_last: true,
+ });
+ }
+ }
+
+ // Populate pending participants.
+ self.match_candidates.clear();
+ self.match_candidates
+ .extend(room.pending_participants().iter().enumerate().map(
+ |(id, participant)| StringMatchCandidate {
+ id,
+ string: participant.github_login.clone(),
+ char_bag: participant.github_login.chars().collect(),
+ },
+ ));
+ let matches = executor.block(match_strings(
+ &self.match_candidates,
+ &query,
+ true,
+ usize::MAX,
+ &Default::default(),
+ executor.clone(),
+ ));
+ self.entries
+ .extend(matches.iter().map(|mat| ListEntry::CallParticipant {
+ user: room.pending_participants()[mat.candidate_id].clone(),
+ peer_id: None,
+ is_pending: true,
+ }));
+ }
+ }
let mut request_entries = Vec::new();
@@ -1135,290 +1136,234 @@ impl CollabPanel {
cx.notify();
}
- // fn render_call_participant(
- // user: &User,
- // peer_id: Option<PeerId>,
- // user_store: ModelHandle<UserStore>,
- // is_pending: bool,
- // is_selected: bool,
- // theme: &theme::Theme,
- // cx: &mut ViewContext<Self>,
- // ) -> AnyElement<Self> {
- // enum CallParticipant {}
- // enum CallParticipantTooltip {}
- // enum LeaveCallButton {}
- // enum LeaveCallTooltip {}
-
- // let collab_theme = &theme.collab_panel;
-
- // let is_current_user =
- // user_store.read(cx).current_user().map(|user| user.id) == Some(user.id);
-
- // let content = MouseEventHandler::new::<CallParticipant, _>(
- // user.id as usize,
- // cx,
- // |mouse_state, cx| {
- // let style = if is_current_user {
- // *collab_theme
- // .contact_row
- // .in_state(is_selected)
- // .style_for(&mut Default::default())
- // } else {
- // *collab_theme
- // .contact_row
- // .in_state(is_selected)
- // .style_for(mouse_state)
- // };
-
- // Flex::row()
- // .with_children(user.avatar.clone().map(|avatar| {
- // Image::from_data(avatar)
- // .with_style(collab_theme.contact_avatar)
- // .aligned()
- // .left()
- // }))
- // .with_child(
- // Label::new(
- // user.github_login.clone(),
- // collab_theme.contact_username.text.clone(),
- // )
- // .contained()
- // .with_style(collab_theme.contact_username.container)
- // .aligned()
- // .left()
- // .flex(1., true),
- // )
- // .with_children(if is_pending {
- // Some(
- // Label::new("Calling", collab_theme.calling_indicator.text.clone())
- // .contained()
- // .with_style(collab_theme.calling_indicator.container)
- // .aligned()
- // .into_any(),
- // )
- // } else if is_current_user {
- // Some(
- // MouseEventHandler::new::<LeaveCallButton, _>(0, cx, |state, _| {
- // render_icon_button(
- // theme
- // .collab_panel
- // .leave_call_button
- // .style_for(is_selected, state),
- // "icons/exit.svg",
- // )
- // })
- // .with_cursor_style(CursorStyle::PointingHand)
- // .on_click(MouseButton::Left, |_, _, cx| {
- // Self::leave_call(cx);
- // })
- // .with_tooltip::<LeaveCallTooltip>(
- // 0,
- // "Leave call",
- // None,
- // theme.tooltip.clone(),
- // cx,
- // )
- // .into_any(),
- // )
- // } else {
- // None
- // })
- // .constrained()
- // .with_height(collab_theme.row_height)
- // .contained()
- // .with_style(style)
- // },
- // );
-
- // if is_current_user || is_pending || peer_id.is_none() {
- // return content.into_any();
- // }
-
- // let tooltip = format!("Follow {}", user.github_login);
-
- // content
- // .on_click(MouseButton::Left, move |_, this, cx| {
- // if let Some(workspace) = this.workspace.upgrade(cx) {
- // workspace
- // .update(cx, |workspace, cx| workspace.follow(peer_id.unwrap(), cx))
- // .map(|task| task.detach_and_log_err(cx));
- // }
- // })
- // .with_cursor_style(CursorStyle::PointingHand)
- // .with_tooltip::<CallParticipantTooltip>(
- // user.id as usize,
- // tooltip,
- // Some(Box::new(FollowNextCollaborator)),
- // theme.tooltip.clone(),
- // cx,
- // )
- // .into_any()
- // }
+ fn render_call_participant(
+ &self,
+ user: Arc<User>,
+ peer_id: Option<PeerId>,
+ is_pending: bool,
+ cx: &mut ViewContext<Self>,
+ ) -> impl IntoElement {
+ let is_current_user =
+ self.user_store.read(cx).current_user().map(|user| user.id) == Some(user.id);
+ let tooltip = format!("Follow {}", user.github_login);
- // fn render_participant_project(
- // project_id: u64,
- // worktree_root_names: &[String],
- // host_user_id: u64,
- // is_current: bool,
- // is_last: bool,
- // is_selected: bool,
- // theme: &theme::Theme,
- // cx: &mut ViewContext<Self>,
- // ) -> AnyElement<Self> {
- // enum JoinProject {}
- // enum JoinProjectTooltip {}
-
- // let collab_theme = &theme.collab_panel;
- // let host_avatar_width = collab_theme
- // .contact_avatar
- // .width
- // .or(collab_theme.contact_avatar.height)
- // .unwrap_or(0.);
- // let tree_branch = collab_theme.tree_branch;
- // let project_name = if worktree_root_names.is_empty() {
- // "untitled".to_string()
- // } else {
- // worktree_root_names.join(", ")
- // };
-
- // let content =
- // MouseEventHandler::new::<JoinProject, _>(project_id as usize, cx, |mouse_state, cx| {
- // let tree_branch = *tree_branch.in_state(is_selected).style_for(mouse_state);
- // let row = if is_current {
- // collab_theme
- // .project_row
- // .in_state(true)
- // .style_for(&mut Default::default())
- // } else {
- // collab_theme
- // .project_row
- // .in_state(is_selected)
- // .style_for(mouse_state)
- // };
-
- // Flex::row()
- // .with_child(render_tree_branch(
- // tree_branch,
- // &row.name.text,
- // is_last,
- // vec2f(host_avatar_width, collab_theme.row_height),
- // cx.font_cache(),
- // ))
- // .with_child(
- // Svg::new("icons/file_icons/folder.svg")
- // .with_color(collab_theme.channel_hash.color)
- // .constrained()
- // .with_width(collab_theme.channel_hash.width)
- // .aligned()
- // .left(),
- // )
- // .with_child(
- // Label::new(project_name.clone(), row.name.text.clone())
- // .aligned()
- // .left()
- // .contained()
- // .with_style(row.name.container)
- // .flex(1., false),
- // )
- // .constrained()
- // .with_height(collab_theme.row_height)
- // .contained()
- // .with_style(row.container)
- // });
-
- // if is_current {
- // return content.into_any();
- // }
-
- // content
- // .with_cursor_style(CursorStyle::PointingHand)
- // .on_click(MouseButton::Left, move |_, this, cx| {
- // if let Some(workspace) = this.workspace.upgrade(cx) {
- // let app_state = workspace.read(cx).app_state().clone();
- // workspace::join_remote_project(project_id, host_user_id, app_state, cx)
- // .detach_and_log_err(cx);
- // }
- // })
- // .with_tooltip::<JoinProjectTooltip>(
- // project_id as usize,
- // format!("Open {}", project_name),
- // None,
- // theme.tooltip.clone(),
- // cx,
- // )
- // .into_any()
- // }
+ ListItem::new(SharedString::from(user.github_login.clone()))
+ .left_child(Avatar::data(user.avatar.clone().unwrap()))
+ .child(
+ h_stack()
+ .w_full()
+ .justify_between()
+ .child(Label::new(user.github_login.clone()))
+ .child(if is_pending {
+ Label::new("Calling").color(Color::Muted).into_any_element()
+ } else if is_current_user {
+ IconButton::new("leave-call", Icon::ArrowRight)
+ .on_click(cx.listener(move |this, _, cx| {
+ Self::leave_call(cx);
+ }))
+ .tooltip(|cx| Tooltip::text("Leave Call", cx))
+ .into_any_element()
+ } else {
+ div().into_any_element()
+ }),
+ )
+ .when_some(peer_id, |this, peer_id| {
+ this.tooltip(move |cx| Tooltip::text(tooltip.clone(), cx))
+ .on_click(cx.listener(move |this, _, cx| {
+ this.workspace
+ .update(cx, |workspace, cx| workspace.follow(peer_id, cx));
+ }))
+ })
+ }
- // fn render_participant_screen(
- // peer_id: Option<PeerId>,
- // is_last: bool,
- // is_selected: bool,
- // theme: &theme::CollabPanel,
- // cx: &mut ViewContext<Self>,
- // ) -> AnyElement<Self> {
- // enum OpenSharedScreen {}
-
- // let host_avatar_width = theme
- // .contact_avatar
- // .width
- // .or(theme.contact_avatar.height)
- // .unwrap_or(0.);
- // let tree_branch = theme.tree_branch;
-
- // let handler = MouseEventHandler::new::<OpenSharedScreen, _>(
- // peer_id.map(|id| id.as_u64()).unwrap_or(0) as usize,
- // cx,
- // |mouse_state, cx| {
- // let tree_branch = *tree_branch.in_state(is_selected).style_for(mouse_state);
- // let row = theme
- // .project_row
- // .in_state(is_selected)
- // .style_for(mouse_state);
-
- // Flex::row()
- // .with_child(render_tree_branch(
- // tree_branch,
- // &row.name.text,
- // is_last,
- // vec2f(host_avatar_width, theme.row_height),
- // cx.font_cache(),
- // ))
- // .with_child(
- // Svg::new("icons/desktop.svg")
- // .with_color(theme.channel_hash.color)
- // .constrained()
- // .with_width(theme.channel_hash.width)
- // .aligned()
- // .left(),
- // )
- // .with_child(
- // Label::new("Screen", row.name.text.clone())
- // .aligned()
- // .left()
- // .contained()
- // .with_style(row.name.container)
- // .flex(1., false),
- // )
- // .constrained()
- // .with_height(theme.row_height)
- // .contained()
- // .with_style(row.container)
- // },
- // );
- // if peer_id.is_none() {
- // return handler.into_any();
- // }
- // handler
- // .with_cursor_style(CursorStyle::PointingHand)
- // .on_click(MouseButton::Left, move |_, this, cx| {
- // if let Some(workspace) = this.workspace.upgrade(cx) {
- // workspace.update(cx, |workspace, cx| {
- // workspace.open_shared_screen(peer_id.unwrap(), cx)
- // });
- // }
- // })
- // .into_any()
- // }
+ fn render_participant_project(
+ &self,
+ project_id: u64,
+ worktree_root_names: &[String],
+ host_user_id: u64,
+ // is_current: bool,
+ is_last: bool,
+ // is_selected: bool,
+ // theme: &theme::Theme,
+ cx: &mut ViewContext<Self>,
+ ) -> impl IntoElement {
+ let project_name: SharedString = if worktree_root_names.is_empty() {
+ "untitled".to_string()
+ } else {
+ worktree_root_names.join(", ")
+ }
+ .into();
+
+ let theme = cx.theme();
+
+ ListItem::new(project_id as usize)
+ .on_click(cx.listener(move |this, _, cx| {
+ this.workspace.update(cx, |workspace, cx| {
+ let app_state = workspace.app_state().clone();
+ workspace::join_remote_project(project_id, host_user_id, app_state, cx)
+ .detach_and_log_err(cx);
+ });
+ }))
+ .left_child(IconButton::new(0, Icon::Folder))
+ .child(
+ h_stack()
+ .w_full()
+ .justify_between()
+ .child(render_tree_branch(is_last, cx))
+ .child(Label::new(project_name.clone())),
+ )
+ .tooltip(move |cx| Tooltip::text(format!("Open {}", project_name), cx))
+
+ // enum JoinProject {}
+ // enum JoinProjectTooltip {}
+
+ // let collab_theme = &theme.collab_panel;
+ // let host_avatar_width = collab_theme
+ // .contact_avatar
+ // .width
+ // .or(collab_theme.contact_avatar.height)
+ // .unwrap_or(0.);
+ // let tree_branch = collab_theme.tree_branch;
+
+ // let content =
+ // MouseEventHandler::new::<JoinProject, _>(project_id as usize, cx, |mouse_state, cx| {
+ // let tree_branch = *tree_branch.in_state(is_selected).style_for(mouse_state);
+ // let row = if is_current {
+ // collab_theme
+ // .project_row
+ // .in_state(true)
+ // .style_for(&mut Default::default())
+ // } else {
+ // collab_theme
+ // .project_row
+ // .in_state(is_selected)
+ // .style_for(mouse_state)
+ // };
+
+ // Flex::row()
+ // .with_child(render_tree_branch(
+ // tree_branch,
+ // &row.name.text,
+ // is_last,
+ // vec2f(host_avatar_width, collab_theme.row_height),
+ // cx.font_cache(),
+ // ))
+ // .with_child(
+ // Svg::new("icons/file_icons/folder.svg")
+ // .with_color(collab_theme.channel_hash.color)
+ // .constrained()
+ // .with_width(collab_theme.channel_hash.width)
+ // .aligned()
+ // .left(),
+ // )
+ // .with_child(
+ // Label::new(project_name.clone(), row.name.text.clone())
+ // .aligned()
+ // .left()
+ // .contained()
+ // .with_style(row.name.container)
+ // .flex(1., false),
+ // )
+ // .constrained()
+ // .with_height(collab_theme.row_height)
+ // .contained()
+ // .with_style(row.container)
+ // });
+
+ // if is_current {
+ // return content.into_any();
+ // }
+
+ // content
+ // .with_cursor_style(CursorStyle::PointingHand)
+ // .on_click(MouseButton::Left, move |_, this, cx| {
+ // if let Some(workspace) = this.workspace.upgrade(cx) {
+ // let app_state = workspace.read(cx).app_state().clone();
+ // workspace::join_remote_project(project_id, host_user_id, app_state, cx)
+ // .detach_and_log_err(cx);
+ // }
+ // })
+ // .with_tooltip::<JoinProjectTooltip>(
+ // project_id as usize,
+ // format!("Open {}", project_name),
+ // None,
+ // theme.tooltip.clone(),
+ // cx,
+ // )
+ // .into_any()
+ }
+
+ fn render_participant_screen(
+ &self,
+ peer_id: Option<PeerId>,
+ is_last: bool,
+ cx: &mut ViewContext<Self>,
+ ) -> impl IntoElement {
+ // enum OpenSharedScreen {}
+
+ // let host_avatar_width = theme
+ // .contact_avatar
+ // .width
+ // .or(theme.contact_avatar.height)
+ // .unwrap_or(0.);
+ // let tree_branch = theme.tree_branch;
+
+ // let handler = MouseEventHandler::new::<OpenSharedScreen, _>(
+ // peer_id.map(|id| id.as_u64()).unwrap_or(0) as usize,
+ // cx,
+ // |mouse_state, cx| {
+ // let tree_branch = *tree_branch.in_state(is_selected).style_for(mouse_state);
+ // let row = theme
+ // .project_row
+ // .in_state(is_selected)
+ // .style_for(mouse_state);
+
+ // Flex::row()
+ // .with_child(render_tree_branch(
+ // tree_branch,
+ // &row.name.text,
+ // is_last,
+ // vec2f(host_avatar_width, theme.row_height),
+ // cx.font_cache(),
+ // ))
+ // .with_child(
+ // Svg::new("icons/desktop.svg")
+ // .with_color(theme.channel_hash.color)
+ // .constrained()
+ // .with_width(theme.channel_hash.width)
+ // .aligned()
+ // .left(),
+ // )
+ // .with_child(
+ // Label::new("Screen", row.name.text.clone())
+ // .aligned()
+ // .left()
+ // .contained()
+ // .with_style(row.name.container)
+ // .flex(1., false),
+ // )
+ // .constrained()
+ // .with_height(theme.row_height)
+ // .contained()
+ // .with_style(row.container)
+ // },
+ // );
+ // if peer_id.is_none() {
+ // return handler.into_any();
+ // }
+ // handler
+ // .with_cursor_style(CursorStyle::PointingHand)
+ // .on_click(MouseButton::Left, move |_, this, cx| {
+ // if let Some(workspace) = this.workspace.upgrade(cx) {
+ // workspace.update(cx, |workspace, cx| {
+ // workspace.open_shared_screen(peer_id.unwrap(), cx)
+ // });
+ // }
+ // })
+ // .into_any()
+
+ div()
+ }
fn take_editing_state(&mut self, cx: &mut ViewContext<Self>) -> bool {
if let Some(_) = self.channel_editing_state.take() {
@@ -1465,117 +1410,114 @@ impl CollabPanel {
// .into_any()
// }
- // fn render_channel_notes(
- // &self,
- // channel_id: ChannelId,
- // theme: &theme::CollabPanel,
- // is_selected: bool,
- // ix: usize,
- // cx: &mut ViewContext<Self>,
- // ) -> AnyElement<Self> {
- // enum ChannelNotes {}
- // let host_avatar_width = theme
- // .contact_avatar
- // .width
- // .or(theme.contact_avatar.height)
- // .unwrap_or(0.);
-
- // MouseEventHandler::new::<ChannelNotes, _>(ix as usize, cx, |state, cx| {
- // let tree_branch = *theme.tree_branch.in_state(is_selected).style_for(state);
- // let row = theme.project_row.in_state(is_selected).style_for(state);
-
- // Flex::<Self>::row()
- // .with_child(render_tree_branch(
- // tree_branch,
- // &row.name.text,
- // false,
- // vec2f(host_avatar_width, theme.row_height),
- // cx.font_cache(),
- // ))
- // .with_child(
- // Svg::new("icons/file.svg")
- // .with_color(theme.channel_hash.color)
- // .constrained()
- // .with_width(theme.channel_hash.width)
- // .aligned()
- // .left(),
- // )
- // .with_child(
- // Label::new("notes", theme.channel_name.text.clone())
- // .contained()
- // .with_style(theme.channel_name.container)
- // .aligned()
- // .left()
- // .flex(1., true),
- // )
- // .constrained()
- // .with_height(theme.row_height)
- // .contained()
- // .with_style(*theme.channel_row.style_for(is_selected, state))
- // .with_padding_left(theme.channel_row.default_style().padding.left)
- // })
- // .on_click(MouseButton::Left, move |_, this, cx| {
- // this.open_channel_notes(&OpenChannelNotes { channel_id }, cx);
- // })
- // .with_cursor_style(CursorStyle::PointingHand)
- // .into_any()
- // }
+ fn render_channel_notes(
+ &self,
+ channel_id: ChannelId,
+ cx: &mut ViewContext<Self>,
+ ) -> impl IntoElement {
+ // enum ChannelNotes {}
+ // let host_avatar_width = theme
+ // .contact_avatar
+ // .width
+ // .or(theme.contact_avatar.height)
+ // .unwrap_or(0.);
+
+ // MouseEventHandler::new::<ChannelNotes, _>(ix as usize, cx, |state, cx| {
+ // let tree_branch = *theme.tree_branch.in_state(is_selected).style_for(state);
+ // let row = theme.project_row.in_state(is_selected).style_for(state);
+
+ // Flex::<Self>::row()
+ // .with_child(render_tree_branch(
+ // tree_branch,
+ // &row.name.text,
+ // false,
+ // vec2f(host_avatar_width, theme.row_height),
+ // cx.font_cache(),
+ // ))
+ // .with_child(
+ // Svg::new("icons/file.svg")
+ // .with_color(theme.channel_hash.color)
+ // .constrained()
+ // .with_width(theme.channel_hash.width)
+ // .aligned()
+ // .left(),
+ // )
+ // .with_child(
+ // Label::new("notes", theme.channel_name.text.clone())
+ // .contained()
+ // .with_style(theme.channel_name.container)
+ // .aligned()
+ // .left()
+ // .flex(1., true),
+ // )
+ // .constrained()
+ // .with_height(theme.row_height)
+ // .contained()
+ // .with_style(*theme.channel_row.style_for(is_selected, state))
+ // .with_padding_left(theme.channel_row.default_style().padding.left)
+ // })
+ // .on_click(MouseButton::Left, move |_, this, cx| {
+ // this.open_channel_notes(&OpenChannelNotes { channel_id }, cx);
+ // })
+ // .with_cursor_style(CursorStyle::PointingHand)
+ // .into_any()
- // fn render_channel_chat(
- // &self,
- // channel_id: ChannelId,
- // theme: &theme::CollabPanel,
- // is_selected: bool,
- // ix: usize,
- // cx: &mut ViewContext<Self>,
- // ) -> AnyElement<Self> {
- // enum ChannelChat {}
- // let host_avatar_width = theme
- // .contact_avatar
- // .width
- // .or(theme.contact_avatar.height)
- // .unwrap_or(0.);
-
- // MouseEventHandler::new::<ChannelChat, _>(ix as usize, cx, |state, cx| {
- // let tree_branch = *theme.tree_branch.in_state(is_selected).style_for(state);
- // let row = theme.project_row.in_state(is_selected).style_for(state);
-
- // Flex::<Self>::row()
- // .with_child(render_tree_branch(
- // tree_branch,
- // &row.name.text,
- // true,
- // vec2f(host_avatar_width, theme.row_height),
- // cx.font_cache(),
- // ))
- // .with_child(
- // Svg::new("icons/conversations.svg")
- // .with_color(theme.channel_hash.color)
- // .constrained()
- // .with_width(theme.channel_hash.width)
- // .aligned()
- // .left(),
- // )
- // .with_child(
- // Label::new("chat", theme.channel_name.text.clone())
- // .contained()
- // .with_style(theme.channel_name.container)
- // .aligned()
- // .left()
- // .flex(1., true),
- // )
- // .constrained()
- // .with_height(theme.row_height)
- // .contained()
- // .with_style(*theme.channel_row.style_for(is_selected, state))
- // .with_padding_left(theme.channel_row.default_style().padding.left)
- // })
- // .on_click(MouseButton::Left, move |_, this, cx| {
- // this.join_channel_chat(&JoinChannelChat { channel_id }, cx);
- // })
- // .with_cursor_style(CursorStyle::PointingHand)
- // .into_any()
- // }
+ div()
+ }
+
+ fn render_channel_chat(
+ &self,
+ channel_id: ChannelId,
+ cx: &mut ViewContext<Self>,
+ ) -> impl IntoElement {
+ // enum ChannelChat {}
+ // let host_avatar_width = theme
+ // .contact_avatar
+ // .width
+ // .or(theme.contact_avatar.height)
+ // .unwrap_or(0.);
+
+ // MouseEventHandler::new::<ChannelChat, _>(ix as usize, cx, |state, cx| {
+ // let tree_branch = *theme.tree_branch.in_state(is_selected).style_for(state);
+ // let row = theme.project_row.in_state(is_selected).style_for(state);
+
+ // Flex::<Self>::row()
+ // .with_child(render_tree_branch(
+ // tree_branch,
+ // &row.name.text,
+ // true,
+ // vec2f(host_avatar_width, theme.row_height),
+ // cx.font_cache(),
+ // ))
+ // .with_child(
+ // Svg::new("icons/conversations.svg")
+ // .with_color(theme.channel_hash.color)
+ // .constrained()
+ // .with_width(theme.channel_hash.width)
+ // .aligned()
+ // .left(),
+ // )
+ // .with_child(
+ // Label::new("chat", theme.channel_name.text.clone())
+ // .contained()
+ // .with_style(theme.channel_name.container)
+ // .aligned()
+ // .left()
+ // .flex(1., true),
+ // )
+ // .constrained()
+ // .with_height(theme.row_height)
+ // .contained()
+ // .with_style(*theme.channel_row.style_for(is_selected, state))
+ // .with_padding_left(theme.channel_row.default_style().padding.left)
+ // })
+ // .on_click(MouseButton::Left, move |_, this, cx| {
+ // this.join_channel_chat(&JoinChannelChat { channel_id }, cx);
+ // })
+ // .with_cursor_style(CursorStyle::PointingHand)
+ // .into_any()
+ div()
+ }
// fn render_channel_invite(
// channel: Arc<Channel>,
@@ -31,9 +31,9 @@ use std::sync::Arc;
use call::ActiveCall;
use client::{Client, UserStore};
use gpui::{
- div, px, rems, AppContext, Div, Element, InteractiveElement, IntoElement, Model, MouseButton,
- ParentElement, Render, RenderOnce, Stateful, StatefulInteractiveElement, Styled, Subscription,
- ViewContext, VisualContext, WeakView, WindowBounds,
+ actions, div, px, rems, AppContext, Div, Element, InteractiveElement, IntoElement, Model,
+ MouseButton, ParentElement, Render, RenderOnce, Stateful, StatefulInteractiveElement, Styled,
+ Subscription, ViewContext, VisualContext, WeakView, WindowBounds,
};
use project::{Project, RepositoryEntry};
use theme::ActiveTheme;
@@ -49,6 +49,14 @@ use crate::face_pile::FacePile;
const MAX_PROJECT_NAME_LENGTH: usize = 40;
const MAX_BRANCH_NAME_LENGTH: usize = 40;
+actions!(
+ ShareProject,
+ UnshareProject,
+ ToggleUserMenu,
+ ToggleProjectMenu,
+ SwitchBranch
+);
+
// actions!(
// collab,
// [
@@ -91,37 +99,23 @@ impl Render for CollabTitlebarItem {
type Element = Stateful<Div>;
fn render(&mut self, cx: &mut ViewContext<Self>) -> Self::Element {
- let is_in_room = self
- .workspace
- .update(cx, |this, cx| this.call_state().is_in_room(cx))
- .unwrap_or_default();
+ let room = ActiveCall::global(cx).read(cx).room();
+ let is_in_room = room.is_some();
let is_shared = is_in_room && self.project.read(cx).is_shared();
let current_user = self.user_store.read(cx).current_user();
let client = self.client.clone();
- let users = self
- .workspace
- .update(cx, |this, cx| this.call_state().remote_participants(cx))
- .log_err()
- .flatten();
- let is_muted = self
- .workspace
- .update(cx, |this, cx| this.call_state().is_muted(cx))
- .log_err()
- .flatten()
- .unwrap_or_default();
- let is_deafened = self
- .workspace
- .update(cx, |this, cx| this.call_state().is_deafened(cx))
- .log_err()
- .flatten()
- .unwrap_or_default();
- let speakers_icon = if self
- .workspace
- .update(cx, |this, cx| this.call_state().is_deafened(cx))
- .log_err()
- .flatten()
- .unwrap_or_default()
- {
+ let remote_participants = room.map(|room| {
+ room.read(cx)
+ .remote_participants()
+ .values()
+ .map(|participant| (participant.user.clone(), participant.peer_id))
+ .collect::<Vec<_>>()
+ });
+ let is_muted = room.map_or(false, |room| room.read(cx).is_muted(cx));
+ let is_deafened = room
+ .and_then(|room| room.read(cx).is_deafened())
+ .unwrap_or(false);
+ let speakers_icon = if is_deafened {
ui::Icon::AudioOff
} else {
ui::Icon::AudioOn
@@ -157,7 +151,7 @@ impl Render for CollabTitlebarItem {
.children(self.render_project_branch(cx)),
)
.when_some(
- users.zip(current_user.clone()),
+ remote_participants.zip(current_user.clone()),
|this, (remote_participants, current_user)| {
let mut pile = FacePile::default();
pile.extend(
@@ -168,25 +162,30 @@ impl Render for CollabTitlebarItem {
div().child(Avatar::data(avatar.clone())).into_any_element()
})
.into_iter()
- .chain(remote_participants.into_iter().flat_map(|(user, peer_id)| {
- user.avatar.as_ref().map(|avatar| {
- div()
- .child(
- Avatar::data(avatar.clone()).into_element().into_any(),
- )
- .on_mouse_down(MouseButton::Left, {
- let workspace = workspace.clone();
- move |_, cx| {
- workspace
- .update(cx, |this, cx| {
- this.open_shared_screen(peer_id, cx);
- })
- .log_err();
- }
- })
- .into_any_element()
- })
- })),
+ .chain(remote_participants.into_iter().filter_map(
+ |(user, peer_id)| {
+ let avatar = user.avatar.as_ref()?;
+ Some(
+ div()
+ .child(
+ Avatar::data(avatar.clone())
+ .into_element()
+ .into_any(),
+ )
+ .on_mouse_down(MouseButton::Left, {
+ let workspace = workspace.clone();
+ move |_, cx| {
+ workspace
+ .update(cx, |this, cx| {
+ this.open_shared_screen(peer_id, cx);
+ })
+ .log_err();
+ }
+ })
+ .into_any_element(),
+ )
+ },
+ )),
);
this.child(pile.render(cx))
},
@@ -204,20 +203,24 @@ impl Render for CollabTitlebarItem {
"toggle_sharing",
if is_shared { "Unshare" } else { "Share" },
)
- .style(ButtonStyle::Subtle),
+ .style(ButtonStyle::Subtle)
+ .on_click(cx.listener(
+ move |this, _, cx| {
+ if is_shared {
+ this.unshare_project(&Default::default(), cx);
+ } else {
+ this.share_project(&Default::default(), cx);
+ }
+ },
+ )),
)
.child(
IconButton::new("leave-call", ui::Icon::Exit)
.style(ButtonStyle::Subtle)
- .on_click({
- let workspace = workspace.clone();
- move |_, cx| {
- workspace
- .update(cx, |this, cx| {
- this.call_state().hang_up(cx).detach();
- })
- .log_err();
- }
+ .on_click(move |_, cx| {
+ ActiveCall::global(cx)
+ .update(cx, |call, cx| call.hang_up(cx))
+ .detach_and_log_err(cx);
}),
),
)
@@ -235,15 +238,8 @@ impl Render for CollabTitlebarItem {
)
.style(ButtonStyle::Subtle)
.selected(is_muted)
- .on_click({
- let workspace = workspace.clone();
- move |_, cx| {
- workspace
- .update(cx, |this, cx| {
- this.call_state().toggle_mute(cx);
- })
- .log_err();
- }
+ .on_click(move |_, cx| {
+ crate::toggle_mute(&Default::default(), cx)
}),
)
.child(
@@ -258,26 +254,15 @@ impl Render for CollabTitlebarItem {
cx,
)
})
- .on_click({
- let workspace = workspace.clone();
- move |_, cx| {
- workspace
- .update(cx, |this, cx| {
- this.call_state().toggle_deafen(cx);
- })
- .log_err();
- }
+ .on_click(move |_, cx| {
+ crate::toggle_mute(&Default::default(), cx)
}),
)
.child(
IconButton::new("screen-share", ui::Icon::Screen)
.style(ButtonStyle::Subtle)
.on_click(move |_, cx| {
- workspace
- .update(cx, |this, cx| {
- this.call_state().toggle_screen_share(cx);
- })
- .log_err();
+ crate::toggle_screen_sharing(&Default::default(), cx)
}),
)
.pl_2(),
@@ -451,46 +436,19 @@ impl CollabTitlebarItem {
// render_project_owner -> resolve if you are in a room -> Option<foo>
pub fn render_project_owner(&self, cx: &mut ViewContext<Self>) -> Option<impl Element> {
- // TODO: We can't finish implementing this until project sharing works
- // - [ ] Show the project owner when the project is remote (maybe done)
- // - [x] Show the project owner when the project is local
- // - [ ] Show the project owner with a lock icon when the project is local and unshared
-
- let remote_id = self.project.read(cx).remote_id();
- let is_local = remote_id.is_none();
- let is_shared = self.project.read(cx).is_shared();
- let (user_name, participant_index) = {
- if let Some(host) = self.project.read(cx).host() {
- debug_assert!(!is_local);
- let (Some(host_user), Some(participant_index)) = (
- self.user_store.read(cx).get_cached_user(host.user_id),
- self.user_store
- .read(cx)
- .participant_indices()
- .get(&host.user_id),
- ) else {
- return None;
- };
- (host_user.github_login.clone(), participant_index.0)
- } else {
- debug_assert!(is_local);
- let name = self
- .user_store
- .read(cx)
- .current_user()
- .map(|user| user.github_login.clone())?;
- (name, 0)
- }
- };
+ let host = self.project.read(cx).host()?;
+ let host = self.user_store.read(cx).get_cached_user(host.user_id)?;
+ let participant_index = self
+ .user_store
+ .read(cx)
+ .participant_indices()
+ .get(&host.id)?;
Some(
div().border().border_color(gpui::red()).child(
- Button::new(
- "project_owner_trigger",
- format!("{user_name} ({})", !is_shared),
- )
- .color(Color::Player(participant_index))
- .style(ButtonStyle::Subtle)
- .tooltip(move |cx| Tooltip::text("Toggle following", cx)),
+ Button::new("project_owner_trigger", host.github_login.clone())
+ .color(Color::Player(participant_index.0))
+ .style(ButtonStyle::Subtle)
+ .tooltip(move |cx| Tooltip::text("Toggle following", cx)),
),
)
}
@@ -730,21 +688,21 @@ impl CollabTitlebarItem {
cx.notify();
}
- // fn share_project(&mut self, _: &ShareProject, cx: &mut ViewContext<Self>) {
- // let active_call = ActiveCall::global(cx);
- // let project = self.project.clone();
- // active_call
- // .update(cx, |call, cx| call.share_project(project, cx))
- // .detach_and_log_err(cx);
- // }
+ fn share_project(&mut self, _: &ShareProject, cx: &mut ViewContext<Self>) {
+ let active_call = ActiveCall::global(cx);
+ let project = self.project.clone();
+ active_call
+ .update(cx, |call, cx| call.share_project(project, cx))
+ .detach_and_log_err(cx);
+ }
- // fn unshare_project(&mut self, _: &UnshareProject, cx: &mut ViewContext<Self>) {
- // let active_call = ActiveCall::global(cx);
- // let project = self.project.clone();
- // active_call
- // .update(cx, |call, cx| call.unshare_project(project, cx))
- // .log_err();
- // }
+ fn unshare_project(&mut self, _: &UnshareProject, cx: &mut ViewContext<Self>) {
+ let active_call = ActiveCall::global(cx);
+ let project = self.project.clone();
+ active_call
+ .update(cx, |call, cx| call.unshare_project(project, cx))
+ .log_err();
+ }
// pub fn toggle_user_menu(&mut self, _: &ToggleUserMenu, cx: &mut ViewContext<Self>) {
// self.user_menu.update(cx, |user_menu, cx| {
@@ -9,22 +9,21 @@ mod panel_settings;
use std::{rc::Rc, sync::Arc};
+use call::{report_call_event_for_room, ActiveCall, Room};
pub use collab_panel::CollabPanel;
pub use collab_titlebar_item::CollabTitlebarItem;
use gpui::{
- point, AppContext, GlobalPixels, Pixels, PlatformDisplay, Size, WindowBounds, WindowKind,
- WindowOptions,
+ actions, point, AppContext, GlobalPixels, Pixels, PlatformDisplay, Size, Task, WindowBounds,
+ WindowKind, WindowOptions,
};
pub use panel_settings::{
ChatPanelSettings, CollaborationPanelSettings, NotificationPanelSettings,
};
use settings::Settings;
+use util::ResultExt;
use workspace::AppState;
-// actions!(
-// collab,
-// [ToggleScreenSharing, ToggleMute, ToggleDeafen, LeaveCall]
-// );
+actions!(ToggleScreenSharing, ToggleMute, ToggleDeafen, LeaveCall);
pub fn init(app_state: &Arc<AppState>, cx: &mut AppContext) {
CollaborationPanelSettings::register(cx);
@@ -42,61 +41,61 @@ pub fn init(app_state: &Arc<AppState>, cx: &mut AppContext) {
// cx.add_global_action(toggle_deafen);
}
-// pub fn toggle_screen_sharing(_: &ToggleScreenSharing, cx: &mut AppContext) {
-// let call = ActiveCall::global(cx).read(cx);
-// if let Some(room) = call.room().cloned() {
-// let client = call.client();
-// let toggle_screen_sharing = room.update(cx, |room, cx| {
-// if room.is_screen_sharing() {
-// report_call_event_for_room(
-// "disable screen share",
-// room.id(),
-// room.channel_id(),
-// &client,
-// cx,
-// );
-// Task::ready(room.unshare_screen(cx))
-// } else {
-// report_call_event_for_room(
-// "enable screen share",
-// room.id(),
-// room.channel_id(),
-// &client,
-// cx,
-// );
-// room.share_screen(cx)
-// }
-// });
-// toggle_screen_sharing.detach_and_log_err(cx);
-// }
-// }
+pub fn toggle_screen_sharing(_: &ToggleScreenSharing, cx: &mut AppContext) {
+ let call = ActiveCall::global(cx).read(cx);
+ if let Some(room) = call.room().cloned() {
+ let client = call.client();
+ let toggle_screen_sharing = room.update(cx, |room, cx| {
+ if room.is_screen_sharing() {
+ report_call_event_for_room(
+ "disable screen share",
+ room.id(),
+ room.channel_id(),
+ &client,
+ cx,
+ );
+ Task::ready(room.unshare_screen(cx))
+ } else {
+ report_call_event_for_room(
+ "enable screen share",
+ room.id(),
+ room.channel_id(),
+ &client,
+ cx,
+ );
+ room.share_screen(cx)
+ }
+ });
+ toggle_screen_sharing.detach_and_log_err(cx);
+ }
+}
-// pub fn toggle_mute(_: &ToggleMute, cx: &mut AppContext) {
-// let call = ActiveCall::global(cx).read(cx);
-// if let Some(room) = call.room().cloned() {
-// let client = call.client();
-// room.update(cx, |room, cx| {
-// let operation = if room.is_muted(cx) {
-// "enable microphone"
-// } else {
-// "disable microphone"
-// };
-// report_call_event_for_room(operation, room.id(), room.channel_id(), &client, cx);
+pub fn toggle_mute(_: &ToggleMute, cx: &mut AppContext) {
+ let call = ActiveCall::global(cx).read(cx);
+ if let Some(room) = call.room().cloned() {
+ let client = call.client();
+ room.update(cx, |room, cx| {
+ let operation = if room.is_muted(cx) {
+ "enable microphone"
+ } else {
+ "disable microphone"
+ };
+ report_call_event_for_room(operation, room.id(), room.channel_id(), &client, cx);
-// room.toggle_mute(cx)
-// })
-// .map(|task| task.detach_and_log_err(cx))
-// .log_err();
-// }
-// }
+ room.toggle_mute(cx)
+ })
+ .map(|task| task.detach_and_log_err(cx))
+ .log_err();
+ }
+}
-// pub fn toggle_deafen(_: &ToggleDeafen, cx: &mut AppContext) {
-// if let Some(room) = ActiveCall::global(cx).read(cx).room().cloned() {
-// room.update(cx, Room::toggle_deafen)
-// .map(|task| task.detach_and_log_err(cx))
-// .log_err();
-// }
-// }
+pub fn toggle_deafen(_: &ToggleDeafen, cx: &mut AppContext) {
+ if let Some(room) = ActiveCall::global(cx).read(cx).room().cloned() {
+ room.update(cx, Room::toggle_deafen)
+ .map(|task| task.detach_and_log_err(cx))
+ .log_err();
+ }
+}
fn notification_window_options(
screen: Rc<dyn PlatformDisplay>,
@@ -311,7 +311,11 @@ impl PickerDelegate for CommandPaletteDelegate {
command.name.clone(),
r#match.positions.clone(),
))
- .children(KeyBinding::for_action(&*command.action, cx)),
+ .children(KeyBinding::for_action_in(
+ &*command.action,
+ &self.previous_focus_handle,
+ cx,
+ )),
),
)
}
@@ -45,6 +45,6 @@ fs = { path = "../fs", features = ["test-support"] }
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
language = { package = "language2", path = "../language2", features = ["test-support"] }
lsp = { package = "lsp2", path = "../lsp2", features = ["test-support"] }
-rpc = { path = "../rpc", features = ["test-support"] }
+rpc = { package = "rpc2", path = "../rpc2", features = ["test-support"] }
settings = { package = "settings2", path = "../settings2", features = ["test-support"] }
util = { path = "../util", features = ["test-support"] }
@@ -1002,229 +1002,231 @@ async fn get_copilot_lsp(http: Arc<dyn HttpClient>) -> anyhow::Result<PathBuf> {
}
}
-// #[cfg(test)]
-// mod tests {
-// use super::*;
-// use gpui::{executor::Deterministic, TestAppContext};
-
-// #[gpui::test(iterations = 10)]
-// async fn test_buffer_management(deterministic: Arc<Deterministic>, cx: &mut TestAppContext) {
-// deterministic.forbid_parking();
-// let (copilot, mut lsp) = Copilot::fake(cx);
-
-// let buffer_1 = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, "Hello"));
-// let buffer_1_uri: lsp::Url = format!("buffer://{}", buffer_1.id()).parse().unwrap();
-// copilot.update(cx, |copilot, cx| copilot.register_buffer(&buffer_1, cx));
-// assert_eq!(
-// lsp.receive_notification::<lsp::notification::DidOpenTextDocument>()
-// .await,
-// lsp::DidOpenTextDocumentParams {
-// text_document: lsp::TextDocumentItem::new(
-// buffer_1_uri.clone(),
-// "plaintext".into(),
-// 0,
-// "Hello".into()
-// ),
-// }
-// );
-
-// let buffer_2 = cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, "Goodbye"));
-// let buffer_2_uri: lsp::Url = format!("buffer://{}", buffer_2.id()).parse().unwrap();
-// copilot.update(cx, |copilot, cx| copilot.register_buffer(&buffer_2, cx));
-// assert_eq!(
-// lsp.receive_notification::<lsp::notification::DidOpenTextDocument>()
-// .await,
-// lsp::DidOpenTextDocumentParams {
-// text_document: lsp::TextDocumentItem::new(
-// buffer_2_uri.clone(),
-// "plaintext".into(),
-// 0,
-// "Goodbye".into()
-// ),
-// }
-// );
-
-// buffer_1.update(cx, |buffer, cx| buffer.edit([(5..5, " world")], None, cx));
-// assert_eq!(
-// lsp.receive_notification::<lsp::notification::DidChangeTextDocument>()
-// .await,
-// lsp::DidChangeTextDocumentParams {
-// text_document: lsp::VersionedTextDocumentIdentifier::new(buffer_1_uri.clone(), 1),
-// content_changes: vec![lsp::TextDocumentContentChangeEvent {
-// range: Some(lsp::Range::new(
-// lsp::Position::new(0, 5),
-// lsp::Position::new(0, 5)
-// )),
-// range_length: None,
-// text: " world".into(),
-// }],
-// }
-// );
-
-// // Ensure updates to the file are reflected in the LSP.
-// buffer_1
-// .update(cx, |buffer, cx| {
-// buffer.file_updated(
-// Arc::new(File {
-// abs_path: "/root/child/buffer-1".into(),
-// path: Path::new("child/buffer-1").into(),
-// }),
-// cx,
-// )
-// })
-// .await;
-// assert_eq!(
-// lsp.receive_notification::<lsp::notification::DidCloseTextDocument>()
-// .await,
-// lsp::DidCloseTextDocumentParams {
-// text_document: lsp::TextDocumentIdentifier::new(buffer_1_uri),
-// }
-// );
-// let buffer_1_uri = lsp::Url::from_file_path("/root/child/buffer-1").unwrap();
-// assert_eq!(
-// lsp.receive_notification::<lsp::notification::DidOpenTextDocument>()
-// .await,
-// lsp::DidOpenTextDocumentParams {
-// text_document: lsp::TextDocumentItem::new(
-// buffer_1_uri.clone(),
-// "plaintext".into(),
-// 1,
-// "Hello world".into()
-// ),
-// }
-// );
-
-// // Ensure all previously-registered buffers are closed when signing out.
-// lsp.handle_request::<request::SignOut, _, _>(|_, _| async {
-// Ok(request::SignOutResult {})
-// });
-// copilot
-// .update(cx, |copilot, cx| copilot.sign_out(cx))
-// .await
-// .unwrap();
-// assert_eq!(
-// lsp.receive_notification::<lsp::notification::DidCloseTextDocument>()
-// .await,
-// lsp::DidCloseTextDocumentParams {
-// text_document: lsp::TextDocumentIdentifier::new(buffer_2_uri.clone()),
-// }
-// );
-// assert_eq!(
-// lsp.receive_notification::<lsp::notification::DidCloseTextDocument>()
-// .await,
-// lsp::DidCloseTextDocumentParams {
-// text_document: lsp::TextDocumentIdentifier::new(buffer_1_uri.clone()),
-// }
-// );
-
-// // Ensure all previously-registered buffers are re-opened when signing in.
-// lsp.handle_request::<request::SignInInitiate, _, _>(|_, _| async {
-// Ok(request::SignInInitiateResult::AlreadySignedIn {
-// user: "user-1".into(),
-// })
-// });
-// copilot
-// .update(cx, |copilot, cx| copilot.sign_in(cx))
-// .await
-// .unwrap();
-// assert_eq!(
-// lsp.receive_notification::<lsp::notification::DidOpenTextDocument>()
-// .await,
-// lsp::DidOpenTextDocumentParams {
-// text_document: lsp::TextDocumentItem::new(
-// buffer_2_uri.clone(),
-// "plaintext".into(),
-// 0,
-// "Goodbye".into()
-// ),
-// }
-// );
-// assert_eq!(
-// lsp.receive_notification::<lsp::notification::DidOpenTextDocument>()
-// .await,
-// lsp::DidOpenTextDocumentParams {
-// text_document: lsp::TextDocumentItem::new(
-// buffer_1_uri.clone(),
-// "plaintext".into(),
-// 0,
-// "Hello world".into()
-// ),
-// }
-// );
-
-// // Dropping a buffer causes it to be closed on the LSP side as well.
-// cx.update(|_| drop(buffer_2));
-// assert_eq!(
-// lsp.receive_notification::<lsp::notification::DidCloseTextDocument>()
-// .await,
-// lsp::DidCloseTextDocumentParams {
-// text_document: lsp::TextDocumentIdentifier::new(buffer_2_uri),
-// }
-// );
-// }
-
-// struct File {
-// abs_path: PathBuf,
-// path: Arc<Path>,
-// }
-
-// impl language2::File for File {
-// fn as_local(&self) -> Option<&dyn language2::LocalFile> {
-// Some(self)
-// }
-
-// fn mtime(&self) -> std::time::SystemTime {
-// unimplemented!()
-// }
-
-// fn path(&self) -> &Arc<Path> {
-// &self.path
-// }
-
-// fn full_path(&self, _: &AppContext) -> PathBuf {
-// unimplemented!()
-// }
-
-// fn file_name<'a>(&'a self, _: &'a AppContext) -> &'a std::ffi::OsStr {
-// unimplemented!()
-// }
-
-// fn is_deleted(&self) -> bool {
-// unimplemented!()
-// }
-
-// fn as_any(&self) -> &dyn std::any::Any {
-// unimplemented!()
-// }
-
-// fn to_proto(&self) -> rpc::proto::File {
-// unimplemented!()
-// }
-
-// fn worktree_id(&self) -> usize {
-// 0
-// }
-// }
-
-// impl language::LocalFile for File {
-// fn abs_path(&self, _: &AppContext) -> PathBuf {
-// self.abs_path.clone()
-// }
-
-// fn load(&self, _: &AppContext) -> Task<Result<String>> {
-// unimplemented!()
-// }
-
-// fn buffer_reloaded(
-// &self,
-// _: u64,
-// _: &clock::Global,
-// _: language::RopeFingerprint,
-// _: language::LineEnding,
-// _: std::time::SystemTime,
-// _: &mut AppContext,
-// ) {
-// unimplemented!()
-// }
-// }
-// }
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use gpui::TestAppContext;
+
+ #[gpui::test(iterations = 10)]
+ async fn test_buffer_management(cx: &mut TestAppContext) {
+ let (copilot, mut lsp) = Copilot::fake(cx);
+
+ let buffer_1 = cx.build_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), "Hello"));
+ let buffer_1_uri: lsp::Url = format!("buffer://{}", buffer_1.entity_id().as_u64())
+ .parse()
+ .unwrap();
+ copilot.update(cx, |copilot, cx| copilot.register_buffer(&buffer_1, cx));
+ assert_eq!(
+ lsp.receive_notification::<lsp::notification::DidOpenTextDocument>()
+ .await,
+ lsp::DidOpenTextDocumentParams {
+ text_document: lsp::TextDocumentItem::new(
+ buffer_1_uri.clone(),
+ "plaintext".into(),
+ 0,
+ "Hello".into()
+ ),
+ }
+ );
+
+ let buffer_2 = cx.build_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), "Goodbye"));
+ let buffer_2_uri: lsp::Url = format!("buffer://{}", buffer_2.entity_id().as_u64())
+ .parse()
+ .unwrap();
+ copilot.update(cx, |copilot, cx| copilot.register_buffer(&buffer_2, cx));
+ assert_eq!(
+ lsp.receive_notification::<lsp::notification::DidOpenTextDocument>()
+ .await,
+ lsp::DidOpenTextDocumentParams {
+ text_document: lsp::TextDocumentItem::new(
+ buffer_2_uri.clone(),
+ "plaintext".into(),
+ 0,
+ "Goodbye".into()
+ ),
+ }
+ );
+
+ buffer_1.update(cx, |buffer, cx| buffer.edit([(5..5, " world")], None, cx));
+ assert_eq!(
+ lsp.receive_notification::<lsp::notification::DidChangeTextDocument>()
+ .await,
+ lsp::DidChangeTextDocumentParams {
+ text_document: lsp::VersionedTextDocumentIdentifier::new(buffer_1_uri.clone(), 1),
+ content_changes: vec![lsp::TextDocumentContentChangeEvent {
+ range: Some(lsp::Range::new(
+ lsp::Position::new(0, 5),
+ lsp::Position::new(0, 5)
+ )),
+ range_length: None,
+ text: " world".into(),
+ }],
+ }
+ );
+
+ // Ensure updates to the file are reflected in the LSP.
+ buffer_1.update(cx, |buffer, cx| {
+ buffer.file_updated(
+ Arc::new(File {
+ abs_path: "/root/child/buffer-1".into(),
+ path: Path::new("child/buffer-1").into(),
+ }),
+ cx,
+ )
+ });
+ assert_eq!(
+ lsp.receive_notification::<lsp::notification::DidCloseTextDocument>()
+ .await,
+ lsp::DidCloseTextDocumentParams {
+ text_document: lsp::TextDocumentIdentifier::new(buffer_1_uri),
+ }
+ );
+ let buffer_1_uri = lsp::Url::from_file_path("/root/child/buffer-1").unwrap();
+ assert_eq!(
+ lsp.receive_notification::<lsp::notification::DidOpenTextDocument>()
+ .await,
+ lsp::DidOpenTextDocumentParams {
+ text_document: lsp::TextDocumentItem::new(
+ buffer_1_uri.clone(),
+ "plaintext".into(),
+ 1,
+ "Hello world".into()
+ ),
+ }
+ );
+
+ // Ensure all previously-registered buffers are closed when signing out.
+ lsp.handle_request::<request::SignOut, _, _>(|_, _| async {
+ Ok(request::SignOutResult {})
+ });
+ copilot
+ .update(cx, |copilot, cx| copilot.sign_out(cx))
+ .await
+ .unwrap();
+ // todo!() po: these notifications now happen in reverse order?
+ assert_eq!(
+ lsp.receive_notification::<lsp::notification::DidCloseTextDocument>()
+ .await,
+ lsp::DidCloseTextDocumentParams {
+ text_document: lsp::TextDocumentIdentifier::new(buffer_1_uri.clone()),
+ }
+ );
+ assert_eq!(
+ lsp.receive_notification::<lsp::notification::DidCloseTextDocument>()
+ .await,
+ lsp::DidCloseTextDocumentParams {
+ text_document: lsp::TextDocumentIdentifier::new(buffer_2_uri.clone()),
+ }
+ );
+
+ // Ensure all previously-registered buffers are re-opened when signing in.
+ lsp.handle_request::<request::SignInInitiate, _, _>(|_, _| async {
+ Ok(request::SignInInitiateResult::AlreadySignedIn {
+ user: "user-1".into(),
+ })
+ });
+ copilot
+ .update(cx, |copilot, cx| copilot.sign_in(cx))
+ .await
+ .unwrap();
+
+ assert_eq!(
+ lsp.receive_notification::<lsp::notification::DidOpenTextDocument>()
+ .await,
+ lsp::DidOpenTextDocumentParams {
+ text_document: lsp::TextDocumentItem::new(
+ buffer_1_uri.clone(),
+ "plaintext".into(),
+ 0,
+ "Hello world".into()
+ ),
+ }
+ );
+ assert_eq!(
+ lsp.receive_notification::<lsp::notification::DidOpenTextDocument>()
+ .await,
+ lsp::DidOpenTextDocumentParams {
+ text_document: lsp::TextDocumentItem::new(
+ buffer_2_uri.clone(),
+ "plaintext".into(),
+ 0,
+ "Goodbye".into()
+ ),
+ }
+ );
+ // Dropping a buffer causes it to be closed on the LSP side as well.
+ cx.update(|_| drop(buffer_2));
+ assert_eq!(
+ lsp.receive_notification::<lsp::notification::DidCloseTextDocument>()
+ .await,
+ lsp::DidCloseTextDocumentParams {
+ text_document: lsp::TextDocumentIdentifier::new(buffer_2_uri),
+ }
+ );
+ }
+
+ struct File {
+ abs_path: PathBuf,
+ path: Arc<Path>,
+ }
+
+ impl language::File for File {
+ fn as_local(&self) -> Option<&dyn language::LocalFile> {
+ Some(self)
+ }
+
+ fn mtime(&self) -> std::time::SystemTime {
+ unimplemented!()
+ }
+
+ fn path(&self) -> &Arc<Path> {
+ &self.path
+ }
+
+ fn full_path(&self, _: &AppContext) -> PathBuf {
+ unimplemented!()
+ }
+
+ fn file_name<'a>(&'a self, _: &'a AppContext) -> &'a std::ffi::OsStr {
+ unimplemented!()
+ }
+
+ fn is_deleted(&self) -> bool {
+ unimplemented!()
+ }
+
+ fn as_any(&self) -> &dyn std::any::Any {
+ unimplemented!()
+ }
+
+ fn to_proto(&self) -> rpc::proto::File {
+ unimplemented!()
+ }
+
+ fn worktree_id(&self) -> usize {
+ 0
+ }
+ }
+
+ impl language::LocalFile for File {
+ fn abs_path(&self, _: &AppContext) -> PathBuf {
+ self.abs_path.clone()
+ }
+
+ fn load(&self, _: &AppContext) -> Task<Result<String>> {
+ unimplemented!()
+ }
+
+ fn buffer_reloaded(
+ &self,
+ _: u64,
+ _: &clock::Global,
+ _: language::RopeFingerprint,
+ _: language::LineEnding,
+ _: std::time::SystemTime,
+ _: &mut AppContext,
+ ) {
+ unimplemented!()
+ }
+ }
+}
@@ -201,9 +201,8 @@ impl CopilotButton {
url: COPILOT_SETTINGS_URL.to_string(),
}
.boxed_clone(),
- cx,
)
- .action("Sign Out", SignOut.boxed_clone(), cx)
+ .action("Sign Out", SignOut.boxed_clone())
});
}
@@ -990,905 +990,869 @@ pub fn next_rows(display_row: u32, display_map: &DisplaySnapshot) -> impl Iterat
})
}
-// #[cfg(test)]
-// pub mod tests {
-// use super::*;
-// use crate::{
-// movement,
-// test::{editor_test_context::EditorTestContext, marked_display_snapshot},
-// };
-// use gpui::{AppContext, Hsla};
-// use language::{
-// language_settings::{AllLanguageSettings, AllLanguageSettingsContent},
-// Buffer, Language, LanguageConfig, SelectionGoal,
-// };
-// use project::Project;
-// use rand::{prelude::*, Rng};
-// use settings::SettingsStore;
-// use smol::stream::StreamExt;
-// use std::{env, sync::Arc};
-// use theme::SyntaxTheme;
-// use util::test::{marked_text_ranges, sample_text};
-// use Bias::*;
-
-// #[gpui::test(iterations = 100)]
-// async fn test_random_display_map(cx: &mut gpui::TestAppContext, mut rng: StdRng) {
-// cx.foreground().set_block_on_ticks(0..=50);
-// cx.foreground().forbid_parking();
-// let operations = env::var("OPERATIONS")
-// .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
-// .unwrap_or(10);
-
-// let font_cache = cx.font_cache().clone();
-// let mut tab_size = rng.gen_range(1..=4);
-// let buffer_start_excerpt_header_height = rng.gen_range(1..=5);
-// let excerpt_header_height = rng.gen_range(1..=5);
-// let family_id = font_cache
-// .load_family(&["Helvetica"], &Default::default())
-// .unwrap();
-// let font_id = font_cache
-// .select_font(family_id, &Default::default())
-// .unwrap();
-// let font_size = 14.0;
-// let max_wrap_width = 300.0;
-// let mut wrap_width = if rng.gen_bool(0.1) {
-// None
-// } else {
-// Some(rng.gen_range(0.0..=max_wrap_width))
-// };
-
-// log::info!("tab size: {}", tab_size);
-// log::info!("wrap width: {:?}", wrap_width);
-
-// cx.update(|cx| {
-// init_test(cx, |s| s.defaults.tab_size = NonZeroU32::new(tab_size));
-// });
-
-// let buffer = cx.update(|cx| {
-// if rng.gen() {
-// let len = rng.gen_range(0..10);
-// let text = util::RandomCharIter::new(&mut rng)
-// .take(len)
-// .collect::<String>();
-// MultiBuffer::build_simple(&text, cx)
-// } else {
-// MultiBuffer::build_random(&mut rng, cx)
-// }
-// });
-
-// let map = cx.add_model(|cx| {
-// DisplayMap::new(
-// buffer.clone(),
-// font_id,
-// font_size,
-// wrap_width,
-// buffer_start_excerpt_header_height,
-// excerpt_header_height,
-// cx,
-// )
-// });
-// let mut notifications = observe(&map, cx);
-// let mut fold_count = 0;
-// let mut blocks = Vec::new();
-
-// let snapshot = map.update(cx, |map, cx| map.snapshot(cx));
-// log::info!("buffer text: {:?}", snapshot.buffer_snapshot.text());
-// log::info!("fold text: {:?}", snapshot.fold_snapshot.text());
-// log::info!("tab text: {:?}", snapshot.tab_snapshot.text());
-// log::info!("wrap text: {:?}", snapshot.wrap_snapshot.text());
-// log::info!("block text: {:?}", snapshot.block_snapshot.text());
-// log::info!("display text: {:?}", snapshot.text());
-
-// for _i in 0..operations {
-// match rng.gen_range(0..100) {
-// 0..=19 => {
-// wrap_width = if rng.gen_bool(0.2) {
-// None
-// } else {
-// Some(rng.gen_range(0.0..=max_wrap_width))
-// };
-// log::info!("setting wrap width to {:?}", wrap_width);
-// map.update(cx, |map, cx| map.set_wrap_width(wrap_width, cx));
-// }
-// 20..=29 => {
-// let mut tab_sizes = vec![1, 2, 3, 4];
-// tab_sizes.remove((tab_size - 1) as usize);
-// tab_size = *tab_sizes.choose(&mut rng).unwrap();
-// log::info!("setting tab size to {:?}", tab_size);
-// cx.update(|cx| {
-// cx.update_global::<SettingsStore, _, _>(|store, cx| {
-// store.update_user_settings::<AllLanguageSettings>(cx, |s| {
-// s.defaults.tab_size = NonZeroU32::new(tab_size);
-// });
-// });
-// });
-// }
-// 30..=44 => {
-// map.update(cx, |map, cx| {
-// if rng.gen() || blocks.is_empty() {
-// let buffer = map.snapshot(cx).buffer_snapshot;
-// let block_properties = (0..rng.gen_range(1..=1))
-// .map(|_| {
-// let position =
-// buffer.anchor_after(buffer.clip_offset(
-// rng.gen_range(0..=buffer.len()),
-// Bias::Left,
-// ));
-
-// let disposition = if rng.gen() {
-// BlockDisposition::Above
-// } else {
-// BlockDisposition::Below
-// };
-// let height = rng.gen_range(1..5);
-// log::info!(
-// "inserting block {:?} {:?} with height {}",
-// disposition,
-// position.to_point(&buffer),
-// height
-// );
-// BlockProperties {
-// style: BlockStyle::Fixed,
-// position,
-// height,
-// disposition,
-// render: Arc::new(|_| Empty::new().into_any()),
-// }
-// })
-// .collect::<Vec<_>>();
-// blocks.extend(map.insert_blocks(block_properties, cx));
-// } else {
-// blocks.shuffle(&mut rng);
-// let remove_count = rng.gen_range(1..=4.min(blocks.len()));
-// let block_ids_to_remove = (0..remove_count)
-// .map(|_| blocks.remove(rng.gen_range(0..blocks.len())))
-// .collect();
-// log::info!("removing block ids {:?}", block_ids_to_remove);
-// map.remove_blocks(block_ids_to_remove, cx);
-// }
-// });
-// }
-// 45..=79 => {
-// let mut ranges = Vec::new();
-// for _ in 0..rng.gen_range(1..=3) {
-// buffer.read_with(cx, |buffer, cx| {
-// let buffer = buffer.read(cx);
-// let end = buffer.clip_offset(rng.gen_range(0..=buffer.len()), Right);
-// let start = buffer.clip_offset(rng.gen_range(0..=end), Left);
-// ranges.push(start..end);
-// });
-// }
-
-// if rng.gen() && fold_count > 0 {
-// log::info!("unfolding ranges: {:?}", ranges);
-// map.update(cx, |map, cx| {
-// map.unfold(ranges, true, cx);
-// });
-// } else {
-// log::info!("folding ranges: {:?}", ranges);
-// map.update(cx, |map, cx| {
-// map.fold(ranges, cx);
-// });
-// }
-// }
-// _ => {
-// buffer.update(cx, |buffer, cx| buffer.randomly_mutate(&mut rng, 5, cx));
-// }
-// }
-
-// if map.read_with(cx, |map, cx| map.is_rewrapping(cx)) {
-// notifications.next().await.unwrap();
-// }
-
-// let snapshot = map.update(cx, |map, cx| map.snapshot(cx));
-// fold_count = snapshot.fold_count();
-// log::info!("buffer text: {:?}", snapshot.buffer_snapshot.text());
-// log::info!("fold text: {:?}", snapshot.fold_snapshot.text());
-// log::info!("tab text: {:?}", snapshot.tab_snapshot.text());
-// log::info!("wrap text: {:?}", snapshot.wrap_snapshot.text());
-// log::info!("block text: {:?}", snapshot.block_snapshot.text());
-// log::info!("display text: {:?}", snapshot.text());
-
-// // Line boundaries
-// let buffer = &snapshot.buffer_snapshot;
-// for _ in 0..5 {
-// let row = rng.gen_range(0..=buffer.max_point().row);
-// let column = rng.gen_range(0..=buffer.line_len(row));
-// let point = buffer.clip_point(Point::new(row, column), Left);
-
-// let (prev_buffer_bound, prev_display_bound) = snapshot.prev_line_boundary(point);
-// let (next_buffer_bound, next_display_bound) = snapshot.next_line_boundary(point);
-
-// assert!(prev_buffer_bound <= point);
-// assert!(next_buffer_bound >= point);
-// assert_eq!(prev_buffer_bound.column, 0);
-// assert_eq!(prev_display_bound.column(), 0);
-// if next_buffer_bound < buffer.max_point() {
-// assert_eq!(buffer.chars_at(next_buffer_bound).next(), Some('\n'));
-// }
-
-// assert_eq!(
-// prev_display_bound,
-// prev_buffer_bound.to_display_point(&snapshot),
-// "row boundary before {:?}. reported buffer row boundary: {:?}",
-// point,
-// prev_buffer_bound
-// );
-// assert_eq!(
-// next_display_bound,
-// next_buffer_bound.to_display_point(&snapshot),
-// "display row boundary after {:?}. reported buffer row boundary: {:?}",
-// point,
-// next_buffer_bound
-// );
-// assert_eq!(
-// prev_buffer_bound,
-// prev_display_bound.to_point(&snapshot),
-// "row boundary before {:?}. reported display row boundary: {:?}",
-// point,
-// prev_display_bound
-// );
-// assert_eq!(
-// next_buffer_bound,
-// next_display_bound.to_point(&snapshot),
-// "row boundary after {:?}. reported display row boundary: {:?}",
-// point,
-// next_display_bound
-// );
-// }
-
-// // Movement
-// let min_point = snapshot.clip_point(DisplayPoint::new(0, 0), Left);
-// let max_point = snapshot.clip_point(snapshot.max_point(), Right);
-// for _ in 0..5 {
-// let row = rng.gen_range(0..=snapshot.max_point().row());
-// let column = rng.gen_range(0..=snapshot.line_len(row));
-// let point = snapshot.clip_point(DisplayPoint::new(row, column), Left);
-
-// log::info!("Moving from point {:?}", point);
-
-// let moved_right = movement::right(&snapshot, point);
-// log::info!("Right {:?}", moved_right);
-// if point < max_point {
-// assert!(moved_right > point);
-// if point.column() == snapshot.line_len(point.row())
-// || snapshot.soft_wrap_indent(point.row()).is_some()
-// && point.column() == snapshot.line_len(point.row()) - 1
-// {
-// assert!(moved_right.row() > point.row());
-// }
-// } else {
-// assert_eq!(moved_right, point);
-// }
-
-// let moved_left = movement::left(&snapshot, point);
-// log::info!("Left {:?}", moved_left);
-// if point > min_point {
-// assert!(moved_left < point);
-// if point.column() == 0 {
-// assert!(moved_left.row() < point.row());
-// }
-// } else {
-// assert_eq!(moved_left, point);
-// }
-// }
-// }
-// }
-
-// #[gpui::test(retries = 5)]
-// async fn test_soft_wraps(cx: &mut gpui::TestAppContext) {
-// cx.foreground().set_block_on_ticks(usize::MAX..=usize::MAX);
-// cx.update(|cx| {
-// init_test(cx, |_| {});
-// });
-
-// let mut cx = EditorTestContext::new(cx).await;
-// let editor = cx.editor.clone();
-// let window = cx.window.clone();
-
-// cx.update_window(window, |cx| {
-// let text_layout_details =
-// editor.read_with(cx, |editor, cx| editor.text_layout_details(cx));
-
-// let font_cache = cx.font_cache().clone();
-
-// let family_id = font_cache
-// .load_family(&["Helvetica"], &Default::default())
-// .unwrap();
-// let font_id = font_cache
-// .select_font(family_id, &Default::default())
-// .unwrap();
-// let font_size = 12.0;
-// let wrap_width = Some(64.);
-
-// let text = "one two three four five\nsix seven eight";
-// let buffer = MultiBuffer::build_simple(text, cx);
-// let map = cx.add_model(|cx| {
-// DisplayMap::new(buffer.clone(), font_id, font_size, wrap_width, 1, 1, cx)
-// });
-
-// let snapshot = map.update(cx, |map, cx| map.snapshot(cx));
-// assert_eq!(
-// snapshot.text_chunks(0).collect::<String>(),
-// "one two \nthree four \nfive\nsix seven \neight"
-// );
-// assert_eq!(
-// snapshot.clip_point(DisplayPoint::new(0, 8), Bias::Left),
-// DisplayPoint::new(0, 7)
-// );
-// assert_eq!(
-// snapshot.clip_point(DisplayPoint::new(0, 8), Bias::Right),
-// DisplayPoint::new(1, 0)
-// );
-// assert_eq!(
-// movement::right(&snapshot, DisplayPoint::new(0, 7)),
-// DisplayPoint::new(1, 0)
-// );
-// assert_eq!(
-// movement::left(&snapshot, DisplayPoint::new(1, 0)),
-// DisplayPoint::new(0, 7)
-// );
-
-// let x = snapshot.x_for_point(DisplayPoint::new(1, 10), &text_layout_details);
-// assert_eq!(
-// movement::up(
-// &snapshot,
-// DisplayPoint::new(1, 10),
-// SelectionGoal::None,
-// false,
-// &text_layout_details,
-// ),
-// (
-// DisplayPoint::new(0, 7),
-// SelectionGoal::HorizontalPosition(x)
-// )
-// );
-// assert_eq!(
-// movement::down(
-// &snapshot,
-// DisplayPoint::new(0, 7),
-// SelectionGoal::HorizontalPosition(x),
-// false,
-// &text_layout_details
-// ),
-// (
-// DisplayPoint::new(1, 10),
-// SelectionGoal::HorizontalPosition(x)
-// )
-// );
-// assert_eq!(
-// movement::down(
-// &snapshot,
-// DisplayPoint::new(1, 10),
-// SelectionGoal::HorizontalPosition(x),
-// false,
-// &text_layout_details
-// ),
-// (
-// DisplayPoint::new(2, 4),
-// SelectionGoal::HorizontalPosition(x)
-// )
-// );
-
-// let ix = snapshot.buffer_snapshot.text().find("seven").unwrap();
-// buffer.update(cx, |buffer, cx| {
-// buffer.edit([(ix..ix, "and ")], None, cx);
-// });
-
-// let snapshot = map.update(cx, |map, cx| map.snapshot(cx));
-// assert_eq!(
-// snapshot.text_chunks(1).collect::<String>(),
-// "three four \nfive\nsix and \nseven eight"
-// );
-
-// // Re-wrap on font size changes
-// map.update(cx, |map, cx| map.set_font_with_size(font_id, font_size + 3., cx));
-
-// let snapshot = map.update(cx, |map, cx| map.snapshot(cx));
-// assert_eq!(
-// snapshot.text_chunks(1).collect::<String>(),
-// "three \nfour five\nsix and \nseven \neight"
-// )
-// });
-// }
-
-// #[gpui::test]
-// fn test_text_chunks(cx: &mut gpui::AppContext) {
-// init_test(cx, |_| {});
-
-// let text = sample_text(6, 6, 'a');
-// let buffer = MultiBuffer::build_simple(&text, cx);
-// let family_id = cx
-// .font_cache()
-// .load_family(&["Helvetica"], &Default::default())
-// .unwrap();
-// let font_id = cx
-// .font_cache()
-// .select_font(family_id, &Default::default())
-// .unwrap();
-// let font_size = 14.0;
-// let map =
-// cx.add_model(|cx| DisplayMap::new(buffer.clone(), font_id, font_size, None, 1, 1, cx));
-
-// buffer.update(cx, |buffer, cx| {
-// buffer.edit(
-// vec![
-// (Point::new(1, 0)..Point::new(1, 0), "\t"),
-// (Point::new(1, 1)..Point::new(1, 1), "\t"),
-// (Point::new(2, 1)..Point::new(2, 1), "\t"),
-// ],
-// None,
-// cx,
-// )
-// });
-
-// assert_eq!(
-// map.update(cx, |map, cx| map.snapshot(cx))
-// .text_chunks(1)
-// .collect::<String>()
-// .lines()
-// .next(),
-// Some(" b bbbbb")
-// );
-// assert_eq!(
-// map.update(cx, |map, cx| map.snapshot(cx))
-// .text_chunks(2)
-// .collect::<String>()
-// .lines()
-// .next(),
-// Some("c ccccc")
-// );
-// }
-
-// #[gpui::test]
-// async fn test_chunks(cx: &mut gpui::TestAppContext) {
-// use unindent::Unindent as _;
-
-// let text = r#"
-// fn outer() {}
-
-// mod module {
-// fn inner() {}
-// }"#
-// .unindent();
-
-// let theme = SyntaxTheme::new(vec![
-// ("mod.body".to_string(), Hsla::red().into()),
-// ("fn.name".to_string(), Hsla::blue().into()),
-// ]);
-// let language = Arc::new(
-// Language::new(
-// LanguageConfig {
-// name: "Test".into(),
-// path_suffixes: vec![".test".to_string()],
-// ..Default::default()
-// },
-// Some(tree_sitter_rust::language()),
-// )
-// .with_highlights_query(
-// r#"
-// (mod_item name: (identifier) body: _ @mod.body)
-// (function_item name: (identifier) @fn.name)
-// "#,
-// )
-// .unwrap(),
-// );
-// language.set_theme(&theme);
-
-// cx.update(|cx| init_test(cx, |s| s.defaults.tab_size = Some(2.try_into().unwrap())));
-
-// let buffer = cx
-// .add_model(|cx| Buffer::new(0, cx.model_id() as u64, text).with_language(language, cx));
-// buffer.condition(cx, |buf, _| !buf.is_parsing()).await;
-// let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
-
-// let font_cache = cx.font_cache();
-// let family_id = font_cache
-// .load_family(&["Helvetica"], &Default::default())
-// .unwrap();
-// let font_id = font_cache
-// .select_font(family_id, &Default::default())
-// .unwrap();
-// let font_size = 14.0;
-
-// let map = cx.add_model(|cx| DisplayMap::new(buffer, font_id, font_size, None, 1, 1, cx));
-// assert_eq!(
-// cx.update(|cx| syntax_chunks(0..5, &map, &theme, cx)),
-// vec![
-// ("fn ".to_string(), None),
-// ("outer".to_string(), Some(Hsla::blue())),
-// ("() {}\n\nmod module ".to_string(), None),
-// ("{\n fn ".to_string(), Some(Hsla::red())),
-// ("inner".to_string(), Some(Hsla::blue())),
-// ("() {}\n}".to_string(), Some(Hsla::red())),
-// ]
-// );
-// assert_eq!(
-// cx.update(|cx| syntax_chunks(3..5, &map, &theme, cx)),
-// vec![
-// (" fn ".to_string(), Some(Hsla::red())),
-// ("inner".to_string(), Some(Hsla::blue())),
-// ("() {}\n}".to_string(), Some(Hsla::red())),
-// ]
-// );
-
-// map.update(cx, |map, cx| {
-// map.fold(vec![Point::new(0, 6)..Point::new(3, 2)], cx)
-// });
-// assert_eq!(
-// cx.update(|cx| syntax_chunks(0..2, &map, &theme, cx)),
-// vec![
-// ("fn ".to_string(), None),
-// ("out".to_string(), Some(Hsla::blue())),
-// ("⋯".to_string(), None),
-// (" fn ".to_string(), Some(Hsla::red())),
-// ("inner".to_string(), Some(Hsla::blue())),
-// ("() {}\n}".to_string(), Some(Hsla::red())),
-// ]
-// );
-// }
-
-// #[gpui::test]
-// async fn test_chunks_with_soft_wrapping(cx: &mut gpui::TestAppContext) {
-// use unindent::Unindent as _;
-
-// cx.foreground().set_block_on_ticks(usize::MAX..=usize::MAX);
-
-// let text = r#"
-// fn outer() {}
-
-// mod module {
-// fn inner() {}
-// }"#
-// .unindent();
-
-// let theme = SyntaxTheme::new(vec![
-// ("mod.body".to_string(), Hsla::red().into()),
-// ("fn.name".to_string(), Hsla::blue().into()),
-// ]);
-// let language = Arc::new(
-// Language::new(
-// LanguageConfig {
-// name: "Test".into(),
-// path_suffixes: vec![".test".to_string()],
-// ..Default::default()
-// },
-// Some(tree_sitter_rust::language()),
-// )
-// .with_highlights_query(
-// r#"
-// (mod_item name: (identifier) body: _ @mod.body)
-// (function_item name: (identifier) @fn.name)
-// "#,
-// )
-// .unwrap(),
-// );
-// language.set_theme(&theme);
-
-// cx.update(|cx| init_test(cx, |_| {}));
-
-// let buffer = cx
-// .add_model(|cx| Buffer::new(0, cx.model_id() as u64, text).with_language(language, cx));
-// buffer.condition(cx, |buf, _| !buf.is_parsing()).await;
-// let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
-
-// let font_cache = cx.font_cache();
-
-// let family_id = font_cache
-// .load_family(&["Courier"], &Default::default())
-// .unwrap();
-// let font_id = font_cache
-// .select_font(family_id, &Default::default())
-// .unwrap();
-// let font_size = 16.0;
-
-// let map =
-// cx.add_model(|cx| DisplayMap::new(buffer, font_id, font_size, Some(40.0), 1, 1, cx));
-// assert_eq!(
-// cx.update(|cx| syntax_chunks(0..5, &map, &theme, cx)),
-// [
-// ("fn \n".to_string(), None),
-// ("oute\nr".to_string(), Some(Hsla::blue())),
-// ("() \n{}\n\n".to_string(), None),
-// ]
-// );
-// assert_eq!(
-// cx.update(|cx| syntax_chunks(3..5, &map, &theme, cx)),
-// [("{}\n\n".to_string(), None)]
-// );
-
-// map.update(cx, |map, cx| {
-// map.fold(vec![Point::new(0, 6)..Point::new(3, 2)], cx)
-// });
-// assert_eq!(
-// cx.update(|cx| syntax_chunks(1..4, &map, &theme, cx)),
-// [
-// ("out".to_string(), Some(Hsla::blue())),
-// ("⋯\n".to_string(), None),
-// (" \nfn ".to_string(), Some(Hsla::red())),
-// ("i\n".to_string(), Some(Hsla::blue()))
-// ]
-// );
-// }
-
-// #[gpui::test]
-// async fn test_chunks_with_text_highlights(cx: &mut gpui::TestAppContext) {
-// cx.update(|cx| init_test(cx, |_| {}));
-
-// let theme = SyntaxTheme::new(vec![
-// ("operator".to_string(), Hsla::red().into()),
-// ("string".to_string(), Hsla::green().into()),
-// ]);
-// let language = Arc::new(
-// Language::new(
-// LanguageConfig {
-// name: "Test".into(),
-// path_suffixes: vec![".test".to_string()],
-// ..Default::default()
-// },
-// Some(tree_sitter_rust::language()),
-// )
-// .with_highlights_query(
-// r#"
-// ":" @operator
-// (string_literal) @string
-// "#,
-// )
-// .unwrap(),
-// );
-// language.set_theme(&theme);
-
-// let (text, highlighted_ranges) = marked_text_ranges(r#"constˇ «a»: B = "c «d»""#, false);
-
-// let buffer = cx
-// .add_model(|cx| Buffer::new(0, cx.model_id() as u64, text).with_language(language, cx));
-// buffer.condition(cx, |buf, _| !buf.is_parsing()).await;
-
-// let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
-// let buffer_snapshot = buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx));
-
-// let font_cache = cx.font_cache();
-// let family_id = font_cache
-// .load_family(&["Courier"], &Default::default())
-// .unwrap();
-// let font_id = font_cache
-// .select_font(family_id, &Default::default())
-// .unwrap();
-// let font_size = 16.0;
-// let map = cx.add_model(|cx| DisplayMap::new(buffer, font_id, font_size, None, 1, 1, cx));
-
-// enum MyType {}
-
-// let style = HighlightStyle {
-// color: Some(Hsla::blue()),
-// ..Default::default()
-// };
-
-// map.update(cx, |map, _cx| {
-// map.highlight_text(
-// TypeId::of::<MyType>(),
-// highlighted_ranges
-// .into_iter()
-// .map(|range| {
-// buffer_snapshot.anchor_before(range.start)
-// ..buffer_snapshot.anchor_before(range.end)
-// })
-// .collect(),
-// style,
-// );
-// });
-
-// assert_eq!(
-// cx.update(|cx| chunks(0..10, &map, &theme, cx)),
-// [
-// ("const ".to_string(), None, None),
-// ("a".to_string(), None, Some(Hsla::blue())),
-// (":".to_string(), Some(Hsla::red()), None),
-// (" B = ".to_string(), None, None),
-// ("\"c ".to_string(), Some(Hsla::green()), None),
-// ("d".to_string(), Some(Hsla::green()), Some(Hsla::blue())),
-// ("\"".to_string(), Some(Hsla::green()), None),
-// ]
-// );
-// }
-
-// #[gpui::test]
-// fn test_clip_point(cx: &mut gpui::AppContext) {
-// init_test(cx, |_| {});
-
-// fn assert(text: &str, shift_right: bool, bias: Bias, cx: &mut gpui::AppContext) {
-// let (unmarked_snapshot, mut markers) = marked_display_snapshot(text, cx);
-
-// match bias {
-// Bias::Left => {
-// if shift_right {
-// *markers[1].column_mut() += 1;
-// }
-
-// assert_eq!(unmarked_snapshot.clip_point(markers[1], bias), markers[0])
-// }
-// Bias::Right => {
-// if shift_right {
-// *markers[0].column_mut() += 1;
-// }
-
-// assert_eq!(unmarked_snapshot.clip_point(markers[0], bias), markers[1])
-// }
-// };
-// }
-
-// use Bias::{Left, Right};
-// assert("ˇˇα", false, Left, cx);
-// assert("ˇˇα", true, Left, cx);
-// assert("ˇˇα", false, Right, cx);
-// assert("ˇαˇ", true, Right, cx);
-// assert("ˇˇ✋", false, Left, cx);
-// assert("ˇˇ✋", true, Left, cx);
-// assert("ˇˇ✋", false, Right, cx);
-// assert("ˇ✋ˇ", true, Right, cx);
-// assert("ˇˇ🍐", false, Left, cx);
-// assert("ˇˇ🍐", true, Left, cx);
-// assert("ˇˇ🍐", false, Right, cx);
-// assert("ˇ🍐ˇ", true, Right, cx);
-// assert("ˇˇ\t", false, Left, cx);
-// assert("ˇˇ\t", true, Left, cx);
-// assert("ˇˇ\t", false, Right, cx);
-// assert("ˇ\tˇ", true, Right, cx);
-// assert(" ˇˇ\t", false, Left, cx);
-// assert(" ˇˇ\t", true, Left, cx);
-// assert(" ˇˇ\t", false, Right, cx);
-// assert(" ˇ\tˇ", true, Right, cx);
-// assert(" ˇˇ\t", false, Left, cx);
-// assert(" ˇˇ\t", false, Right, cx);
-// }
-
-// #[gpui::test]
-// fn test_clip_at_line_ends(cx: &mut gpui::AppContext) {
-// init_test(cx, |_| {});
-
-// fn assert(text: &str, cx: &mut gpui::AppContext) {
-// let (mut unmarked_snapshot, markers) = marked_display_snapshot(text, cx);
-// unmarked_snapshot.clip_at_line_ends = true;
-// assert_eq!(
-// unmarked_snapshot.clip_point(markers[1], Bias::Left),
-// markers[0]
-// );
-// }
-
-// assert("ˇˇ", cx);
-// assert("ˇaˇ", cx);
-// assert("aˇbˇ", cx);
-// assert("aˇαˇ", cx);
-// }
-
-// #[gpui::test]
-// fn test_tabs_with_multibyte_chars(cx: &mut gpui::AppContext) {
-// init_test(cx, |_| {});
-
-// let text = "✅\t\tα\nβ\t\n🏀β\t\tγ";
-// let buffer = MultiBuffer::build_simple(text, cx);
-// let font_cache = cx.font_cache();
-// let family_id = font_cache
-// .load_family(&["Helvetica"], &Default::default())
-// .unwrap();
-// let font_id = font_cache
-// .select_font(family_id, &Default::default())
-// .unwrap();
-// let font_size = 14.0;
-
-// let map =
-// cx.add_model(|cx| DisplayMap::new(buffer.clone(), font_id, font_size, None, 1, 1, cx));
-// let map = map.update(cx, |map, cx| map.snapshot(cx));
-// assert_eq!(map.text(), "✅ α\nβ \n🏀β γ");
-// assert_eq!(
-// map.text_chunks(0).collect::<String>(),
-// "✅ α\nβ \n🏀β γ"
-// );
-// assert_eq!(map.text_chunks(1).collect::<String>(), "β \n🏀β γ");
-// assert_eq!(map.text_chunks(2).collect::<String>(), "🏀β γ");
-
-// let point = Point::new(0, "✅\t\t".len() as u32);
-// let display_point = DisplayPoint::new(0, "✅ ".len() as u32);
-// assert_eq!(point.to_display_point(&map), display_point);
-// assert_eq!(display_point.to_point(&map), point);
-
-// let point = Point::new(1, "β\t".len() as u32);
-// let display_point = DisplayPoint::new(1, "β ".len() as u32);
-// assert_eq!(point.to_display_point(&map), display_point);
-// assert_eq!(display_point.to_point(&map), point,);
-
-// let point = Point::new(2, "🏀β\t\t".len() as u32);
-// let display_point = DisplayPoint::new(2, "🏀β ".len() as u32);
-// assert_eq!(point.to_display_point(&map), display_point);
-// assert_eq!(display_point.to_point(&map), point,);
-
-// // Display points inside of expanded tabs
-// assert_eq!(
-// DisplayPoint::new(0, "✅ ".len() as u32).to_point(&map),
-// Point::new(0, "✅\t".len() as u32),
-// );
-// assert_eq!(
-// DisplayPoint::new(0, "✅ ".len() as u32).to_point(&map),
-// Point::new(0, "✅".len() as u32),
-// );
-
-// // Clipping display points inside of multi-byte characters
-// assert_eq!(
-// map.clip_point(DisplayPoint::new(0, "✅".len() as u32 - 1), Left),
-// DisplayPoint::new(0, 0)
-// );
-// assert_eq!(
-// map.clip_point(DisplayPoint::new(0, "✅".len() as u32 - 1), Bias::Right),
-// DisplayPoint::new(0, "✅".len() as u32)
-// );
-// }
-
-// #[gpui::test]
-// fn test_max_point(cx: &mut gpui::AppContext) {
-// init_test(cx, |_| {});
-
-// let buffer = MultiBuffer::build_simple("aaa\n\t\tbbb", cx);
-// let font_cache = cx.font_cache();
-// let family_id = font_cache
-// .load_family(&["Helvetica"], &Default::default())
-// .unwrap();
-// let font_id = font_cache
-// .select_font(family_id, &Default::default())
-// .unwrap();
-// let font_size = 14.0;
-// let map =
-// cx.add_model(|cx| DisplayMap::new(buffer.clone(), font_id, font_size, None, 1, 1, cx));
-// assert_eq!(
-// map.update(cx, |map, cx| map.snapshot(cx)).max_point(),
-// DisplayPoint::new(1, 11)
-// )
-// }
-
-// fn syntax_chunks<'a>(
-// rows: Range<u32>,
-// map: &Model<DisplayMap>,
-// theme: &'a SyntaxTheme,
-// cx: &mut AppContext,
-// ) -> Vec<(String, Option<Hsla>)> {
-// chunks(rows, map, theme, cx)
-// .into_iter()
-// .map(|(text, color, _)| (text, color))
-// .collect()
-// }
-
-// fn chunks<'a>(
-// rows: Range<u32>,
-// map: &Model<DisplayMap>,
-// theme: &'a SyntaxTheme,
-// cx: &mut AppContext,
-// ) -> Vec<(String, Option<Hsla>, Option<Hsla>)> {
-// let snapshot = map.update(cx, |map, cx| map.snapshot(cx));
-// let mut chunks: Vec<(String, Option<Hsla>, Option<Hsla>)> = Vec::new();
-// for chunk in snapshot.chunks(rows, true, None, None) {
-// let syntax_color = chunk
-// .syntax_highlight_id
-// .and_then(|id| id.style(theme)?.color);
-// let highlight_color = chunk.highlight_style.and_then(|style| style.color);
-// if let Some((last_chunk, last_syntax_color, last_highlight_color)) = chunks.last_mut() {
-// if syntax_color == *last_syntax_color && highlight_color == *last_highlight_color {
-// last_chunk.push_str(chunk.text);
-// continue;
-// }
-// }
-// chunks.push((chunk.text.to_string(), syntax_color, highlight_color));
-// }
-// chunks
-// }
-
-// fn init_test(cx: &mut AppContext, f: impl Fn(&mut AllLanguageSettingsContent)) {
-// cx.foreground().forbid_parking();
-// cx.set_global(SettingsStore::test(cx));
-// language::init(cx);
-// crate::init(cx);
-// Project::init_settings(cx);
-// theme::init((), cx);
-// cx.update_global::<SettingsStore, _, _>(|store, cx| {
-// store.update_user_settings::<AllLanguageSettings>(cx, f);
-// });
-// }
-// }
+#[cfg(test)]
+pub mod tests {
+ use super::*;
+ use crate::{
+ movement,
+ test::{editor_test_context::EditorTestContext, marked_display_snapshot},
+ };
+ use gpui::{div, font, observe, px, AppContext, Context, Element, Hsla};
+ use language::{
+ language_settings::{AllLanguageSettings, AllLanguageSettingsContent},
+ Buffer, Language, LanguageConfig, SelectionGoal,
+ };
+ use project::Project;
+ use rand::{prelude::*, Rng};
+ use settings::SettingsStore;
+ use smol::stream::StreamExt;
+ use std::{env, sync::Arc};
+ use theme::{LoadThemes, SyntaxTheme};
+ use util::test::{marked_text_ranges, sample_text};
+ use Bias::*;
+
+ #[gpui::test(iterations = 100)]
+ async fn test_random_display_map(cx: &mut gpui::TestAppContext, mut rng: StdRng) {
+ cx.background_executor.set_block_on_ticks(0..=50);
+ let operations = env::var("OPERATIONS")
+ .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
+ .unwrap_or(10);
+
+ let test_platform = &cx.test_platform;
+ let mut tab_size = rng.gen_range(1..=4);
+ let buffer_start_excerpt_header_height = rng.gen_range(1..=5);
+ let excerpt_header_height = rng.gen_range(1..=5);
+ let font_size = px(14.0);
+ let max_wrap_width = 300.0;
+ let mut wrap_width = if rng.gen_bool(0.1) {
+ None
+ } else {
+ Some(px(rng.gen_range(0.0..=max_wrap_width)))
+ };
+
+ log::info!("tab size: {}", tab_size);
+ log::info!("wrap width: {:?}", wrap_width);
+
+ cx.update(|cx| {
+ init_test(cx, |s| s.defaults.tab_size = NonZeroU32::new(tab_size));
+ });
+
+ let buffer = cx.update(|cx| {
+ if rng.gen() {
+ let len = rng.gen_range(0..10);
+ let text = util::RandomCharIter::new(&mut rng)
+ .take(len)
+ .collect::<String>();
+ MultiBuffer::build_simple(&text, cx)
+ } else {
+ MultiBuffer::build_random(&mut rng, cx)
+ }
+ });
+
+ let map = cx.build_model(|cx| {
+ DisplayMap::new(
+ buffer.clone(),
+ font("Helvetica"),
+ font_size,
+ wrap_width,
+ buffer_start_excerpt_header_height,
+ excerpt_header_height,
+ cx,
+ )
+ });
+ let mut notifications = observe(&map, cx);
+ let mut fold_count = 0;
+ let mut blocks = Vec::new();
+
+ let snapshot = map.update(cx, |map, cx| map.snapshot(cx));
+ log::info!("buffer text: {:?}", snapshot.buffer_snapshot.text());
+ log::info!("fold text: {:?}", snapshot.fold_snapshot.text());
+ log::info!("tab text: {:?}", snapshot.tab_snapshot.text());
+ log::info!("wrap text: {:?}", snapshot.wrap_snapshot.text());
+ log::info!("block text: {:?}", snapshot.block_snapshot.text());
+ log::info!("display text: {:?}", snapshot.text());
+
+ for _i in 0..operations {
+ match rng.gen_range(0..100) {
+ 0..=19 => {
+ wrap_width = if rng.gen_bool(0.2) {
+ None
+ } else {
+ Some(px(rng.gen_range(0.0..=max_wrap_width)))
+ };
+ log::info!("setting wrap width to {:?}", wrap_width);
+ map.update(cx, |map, cx| map.set_wrap_width(wrap_width, cx));
+ }
+ 20..=29 => {
+ let mut tab_sizes = vec![1, 2, 3, 4];
+ tab_sizes.remove((tab_size - 1) as usize);
+ tab_size = *tab_sizes.choose(&mut rng).unwrap();
+ log::info!("setting tab size to {:?}", tab_size);
+ cx.update(|cx| {
+ cx.update_global::<SettingsStore, _>(|store, cx| {
+ store.update_user_settings::<AllLanguageSettings>(cx, |s| {
+ s.defaults.tab_size = NonZeroU32::new(tab_size);
+ });
+ });
+ });
+ }
+ 30..=44 => {
+ map.update(cx, |map, cx| {
+ if rng.gen() || blocks.is_empty() {
+ let buffer = map.snapshot(cx).buffer_snapshot;
+ let block_properties = (0..rng.gen_range(1..=1))
+ .map(|_| {
+ let position =
+ buffer.anchor_after(buffer.clip_offset(
+ rng.gen_range(0..=buffer.len()),
+ Bias::Left,
+ ));
+
+ let disposition = if rng.gen() {
+ BlockDisposition::Above
+ } else {
+ BlockDisposition::Below
+ };
+ let height = rng.gen_range(1..5);
+ log::info!(
+ "inserting block {:?} {:?} with height {}",
+ disposition,
+ position.to_point(&buffer),
+ height
+ );
+ BlockProperties {
+ style: BlockStyle::Fixed,
+ position,
+ height,
+ disposition,
+ render: Arc::new(|_| div().into_any()),
+ }
+ })
+ .collect::<Vec<_>>();
+ blocks.extend(map.insert_blocks(block_properties, cx));
+ } else {
+ blocks.shuffle(&mut rng);
+ let remove_count = rng.gen_range(1..=4.min(blocks.len()));
+ let block_ids_to_remove = (0..remove_count)
+ .map(|_| blocks.remove(rng.gen_range(0..blocks.len())))
+ .collect();
+ log::info!("removing block ids {:?}", block_ids_to_remove);
+ map.remove_blocks(block_ids_to_remove, cx);
+ }
+ });
+ }
+ 45..=79 => {
+ let mut ranges = Vec::new();
+ for _ in 0..rng.gen_range(1..=3) {
+ buffer.read_with(cx, |buffer, cx| {
+ let buffer = buffer.read(cx);
+ let end = buffer.clip_offset(rng.gen_range(0..=buffer.len()), Right);
+ let start = buffer.clip_offset(rng.gen_range(0..=end), Left);
+ ranges.push(start..end);
+ });
+ }
+
+ if rng.gen() && fold_count > 0 {
+ log::info!("unfolding ranges: {:?}", ranges);
+ map.update(cx, |map, cx| {
+ map.unfold(ranges, true, cx);
+ });
+ } else {
+ log::info!("folding ranges: {:?}", ranges);
+ map.update(cx, |map, cx| {
+ map.fold(ranges, cx);
+ });
+ }
+ }
+ _ => {
+ buffer.update(cx, |buffer, cx| buffer.randomly_mutate(&mut rng, 5, cx));
+ }
+ }
+
+ if map.read_with(cx, |map, cx| map.is_rewrapping(cx)) {
+ notifications.next().await.unwrap();
+ }
+
+ let snapshot = map.update(cx, |map, cx| map.snapshot(cx));
+ fold_count = snapshot.fold_count();
+ log::info!("buffer text: {:?}", snapshot.buffer_snapshot.text());
+ log::info!("fold text: {:?}", snapshot.fold_snapshot.text());
+ log::info!("tab text: {:?}", snapshot.tab_snapshot.text());
+ log::info!("wrap text: {:?}", snapshot.wrap_snapshot.text());
+ log::info!("block text: {:?}", snapshot.block_snapshot.text());
+ log::info!("display text: {:?}", snapshot.text());
+
+ // Line boundaries
+ let buffer = &snapshot.buffer_snapshot;
+ for _ in 0..5 {
+ let row = rng.gen_range(0..=buffer.max_point().row);
+ let column = rng.gen_range(0..=buffer.line_len(row));
+ let point = buffer.clip_point(Point::new(row, column), Left);
+
+ let (prev_buffer_bound, prev_display_bound) = snapshot.prev_line_boundary(point);
+ let (next_buffer_bound, next_display_bound) = snapshot.next_line_boundary(point);
+
+ assert!(prev_buffer_bound <= point);
+ assert!(next_buffer_bound >= point);
+ assert_eq!(prev_buffer_bound.column, 0);
+ assert_eq!(prev_display_bound.column(), 0);
+ if next_buffer_bound < buffer.max_point() {
+ assert_eq!(buffer.chars_at(next_buffer_bound).next(), Some('\n'));
+ }
+
+ assert_eq!(
+ prev_display_bound,
+ prev_buffer_bound.to_display_point(&snapshot),
+ "row boundary before {:?}. reported buffer row boundary: {:?}",
+ point,
+ prev_buffer_bound
+ );
+ assert_eq!(
+ next_display_bound,
+ next_buffer_bound.to_display_point(&snapshot),
+ "display row boundary after {:?}. reported buffer row boundary: {:?}",
+ point,
+ next_buffer_bound
+ );
+ assert_eq!(
+ prev_buffer_bound,
+ prev_display_bound.to_point(&snapshot),
+ "row boundary before {:?}. reported display row boundary: {:?}",
+ point,
+ prev_display_bound
+ );
+ assert_eq!(
+ next_buffer_bound,
+ next_display_bound.to_point(&snapshot),
+ "row boundary after {:?}. reported display row boundary: {:?}",
+ point,
+ next_display_bound
+ );
+ }
+
+ // Movement
+ let min_point = snapshot.clip_point(DisplayPoint::new(0, 0), Left);
+ let max_point = snapshot.clip_point(snapshot.max_point(), Right);
+ for _ in 0..5 {
+ let row = rng.gen_range(0..=snapshot.max_point().row());
+ let column = rng.gen_range(0..=snapshot.line_len(row));
+ let point = snapshot.clip_point(DisplayPoint::new(row, column), Left);
+
+ log::info!("Moving from point {:?}", point);
+
+ let moved_right = movement::right(&snapshot, point);
+ log::info!("Right {:?}", moved_right);
+ if point < max_point {
+ assert!(moved_right > point);
+ if point.column() == snapshot.line_len(point.row())
+ || snapshot.soft_wrap_indent(point.row()).is_some()
+ && point.column() == snapshot.line_len(point.row()) - 1
+ {
+ assert!(moved_right.row() > point.row());
+ }
+ } else {
+ assert_eq!(moved_right, point);
+ }
+
+ let moved_left = movement::left(&snapshot, point);
+ log::info!("Left {:?}", moved_left);
+ if point > min_point {
+ assert!(moved_left < point);
+ if point.column() == 0 {
+ assert!(moved_left.row() < point.row());
+ }
+ } else {
+ assert_eq!(moved_left, point);
+ }
+ }
+ }
+ }
+
+ #[gpui::test(retries = 5)]
+ async fn test_soft_wraps(cx: &mut gpui::TestAppContext) {
+ cx.background_executor
+ .set_block_on_ticks(usize::MAX..=usize::MAX);
+ cx.update(|cx| {
+ init_test(cx, |_| {});
+ });
+
+ let mut cx = EditorTestContext::new(cx).await;
+ let editor = cx.editor.clone();
+ let window = cx.window.clone();
+
+ cx.update_window(window, |_, cx| {
+ let text_layout_details =
+ editor.update(cx, |editor, cx| editor.text_layout_details(cx));
+
+ let font_size = px(12.0);
+ let wrap_width = Some(px(64.));
+
+ let text = "one two three four five\nsix seven eight";
+ let buffer = MultiBuffer::build_simple(text, cx);
+ let map = cx.build_model(|cx| {
+ DisplayMap::new(
+ buffer.clone(),
+ font("Helvetica"),
+ font_size,
+ wrap_width,
+ 1,
+ 1,
+ cx,
+ )
+ });
+
+ let snapshot = map.update(cx, |map, cx| map.snapshot(cx));
+ assert_eq!(
+ snapshot.text_chunks(0).collect::<String>(),
+ "one two \nthree four \nfive\nsix seven \neight"
+ );
+ assert_eq!(
+ snapshot.clip_point(DisplayPoint::new(0, 8), Bias::Left),
+ DisplayPoint::new(0, 7)
+ );
+ assert_eq!(
+ snapshot.clip_point(DisplayPoint::new(0, 8), Bias::Right),
+ DisplayPoint::new(1, 0)
+ );
+ assert_eq!(
+ movement::right(&snapshot, DisplayPoint::new(0, 7)),
+ DisplayPoint::new(1, 0)
+ );
+ assert_eq!(
+ movement::left(&snapshot, DisplayPoint::new(1, 0)),
+ DisplayPoint::new(0, 7)
+ );
+
+ let x = snapshot.x_for_display_point(DisplayPoint::new(1, 10), &text_layout_details);
+ assert_eq!(
+ movement::up(
+ &snapshot,
+ DisplayPoint::new(1, 10),
+ SelectionGoal::None,
+ false,
+ &text_layout_details,
+ ),
+ (
+ DisplayPoint::new(0, 7),
+ SelectionGoal::HorizontalPosition(x.0)
+ )
+ );
+ assert_eq!(
+ movement::down(
+ &snapshot,
+ DisplayPoint::new(0, 7),
+ SelectionGoal::HorizontalPosition(x.0),
+ false,
+ &text_layout_details
+ ),
+ (
+ DisplayPoint::new(1, 10),
+ SelectionGoal::HorizontalPosition(x.0)
+ )
+ );
+ assert_eq!(
+ movement::down(
+ &snapshot,
+ DisplayPoint::new(1, 10),
+ SelectionGoal::HorizontalPosition(x.0),
+ false,
+ &text_layout_details
+ ),
+ (
+ DisplayPoint::new(2, 4),
+ SelectionGoal::HorizontalPosition(x.0)
+ )
+ );
+
+ let ix = snapshot.buffer_snapshot.text().find("seven").unwrap();
+ buffer.update(cx, |buffer, cx| {
+ buffer.edit([(ix..ix, "and ")], None, cx);
+ });
+
+ let snapshot = map.update(cx, |map, cx| map.snapshot(cx));
+ assert_eq!(
+ snapshot.text_chunks(1).collect::<String>(),
+ "three four \nfive\nsix and \nseven eight"
+ );
+
+ // Re-wrap on font size changes
+ map.update(cx, |map, cx| {
+ map.set_font(font("Helvetica"), px(font_size.0 + 3.), cx)
+ });
+
+ let snapshot = map.update(cx, |map, cx| map.snapshot(cx));
+ assert_eq!(
+ snapshot.text_chunks(1).collect::<String>(),
+ "three \nfour five\nsix and \nseven \neight"
+ )
+ });
+ }
+
+ #[gpui::test]
+ fn test_text_chunks(cx: &mut gpui::AppContext) {
+ init_test(cx, |_| {});
+
+ let text = sample_text(6, 6, 'a');
+ let buffer = MultiBuffer::build_simple(&text, cx);
+
+ let font_size = px(14.0);
+ let map = cx.build_model(|cx| {
+ DisplayMap::new(buffer.clone(), font("Helvetica"), font_size, None, 1, 1, cx)
+ });
+
+ buffer.update(cx, |buffer, cx| {
+ buffer.edit(
+ vec![
+ (Point::new(1, 0)..Point::new(1, 0), "\t"),
+ (Point::new(1, 1)..Point::new(1, 1), "\t"),
+ (Point::new(2, 1)..Point::new(2, 1), "\t"),
+ ],
+ None,
+ cx,
+ )
+ });
+
+ assert_eq!(
+ map.update(cx, |map, cx| map.snapshot(cx))
+ .text_chunks(1)
+ .collect::<String>()
+ .lines()
+ .next(),
+ Some(" b bbbbb")
+ );
+ assert_eq!(
+ map.update(cx, |map, cx| map.snapshot(cx))
+ .text_chunks(2)
+ .collect::<String>()
+ .lines()
+ .next(),
+ Some("c ccccc")
+ );
+ }
+
+ #[gpui::test]
+ async fn test_chunks(cx: &mut gpui::TestAppContext) {
+ use unindent::Unindent as _;
+
+ let text = r#"
+ fn outer() {}
+
+ mod module {
+ fn inner() {}
+ }"#
+ .unindent();
+
+ let theme = SyntaxTheme::new_test(vec![
+ ("mod.body", Hsla::red().into()),
+ ("fn.name", Hsla::blue().into()),
+ ]);
+ let language = Arc::new(
+ Language::new(
+ LanguageConfig {
+ name: "Test".into(),
+ path_suffixes: vec![".test".to_string()],
+ ..Default::default()
+ },
+ Some(tree_sitter_rust::language()),
+ )
+ .with_highlights_query(
+ r#"
+ (mod_item name: (identifier) body: _ @mod.body)
+ (function_item name: (identifier) @fn.name)
+ "#,
+ )
+ .unwrap(),
+ );
+ language.set_theme(&theme);
+
+ cx.update(|cx| init_test(cx, |s| s.defaults.tab_size = Some(2.try_into().unwrap())));
+
+ let buffer = cx.build_model(|cx| {
+ Buffer::new(0, cx.entity_id().as_u64(), text).with_language(language, cx)
+ });
+ cx.condition(&buffer, |buf, _| !buf.is_parsing()).await;
+ let buffer = cx.build_model(|cx| MultiBuffer::singleton(buffer, cx));
+
+ let font_size = px(14.0);
+
+ let map = cx.build_model(|cx| {
+ DisplayMap::new(buffer, font("Helvetica"), font_size, None, 1, 1, cx)
+ });
+ assert_eq!(
+ cx.update(|cx| syntax_chunks(0..5, &map, &theme, cx)),
+ vec![
+ ("fn ".to_string(), None),
+ ("outer".to_string(), Some(Hsla::blue())),
+ ("() {}\n\nmod module ".to_string(), None),
+ ("{\n fn ".to_string(), Some(Hsla::red())),
+ ("inner".to_string(), Some(Hsla::blue())),
+ ("() {}\n}".to_string(), Some(Hsla::red())),
+ ]
+ );
+ assert_eq!(
+ cx.update(|cx| syntax_chunks(3..5, &map, &theme, cx)),
+ vec![
+ (" fn ".to_string(), Some(Hsla::red())),
+ ("inner".to_string(), Some(Hsla::blue())),
+ ("() {}\n}".to_string(), Some(Hsla::red())),
+ ]
+ );
+
+ map.update(cx, |map, cx| {
+ map.fold(vec![Point::new(0, 6)..Point::new(3, 2)], cx)
+ });
+ assert_eq!(
+ cx.update(|cx| syntax_chunks(0..2, &map, &theme, cx)),
+ vec![
+ ("fn ".to_string(), None),
+ ("out".to_string(), Some(Hsla::blue())),
+ ("⋯".to_string(), None),
+ (" fn ".to_string(), Some(Hsla::red())),
+ ("inner".to_string(), Some(Hsla::blue())),
+ ("() {}\n}".to_string(), Some(Hsla::red())),
+ ]
+ );
+ }
+
+ #[gpui::test]
+ async fn test_chunks_with_soft_wrapping(cx: &mut gpui::TestAppContext) {
+ use unindent::Unindent as _;
+
+ cx.background_executor
+ .set_block_on_ticks(usize::MAX..=usize::MAX);
+
+ let text = r#"
+ fn outer() {}
+
+ mod module {
+ fn inner() {}
+ }"#
+ .unindent();
+
+ let theme = SyntaxTheme::new_test(vec![
+ ("mod.body", Hsla::red().into()),
+ ("fn.name", Hsla::blue().into()),
+ ]);
+ let language = Arc::new(
+ Language::new(
+ LanguageConfig {
+ name: "Test".into(),
+ path_suffixes: vec![".test".to_string()],
+ ..Default::default()
+ },
+ Some(tree_sitter_rust::language()),
+ )
+ .with_highlights_query(
+ r#"
+ (mod_item name: (identifier) body: _ @mod.body)
+ (function_item name: (identifier) @fn.name)
+ "#,
+ )
+ .unwrap(),
+ );
+ language.set_theme(&theme);
+
+ cx.update(|cx| init_test(cx, |_| {}));
+
+ let buffer = cx.build_model(|cx| {
+ Buffer::new(0, cx.entity_id().as_u64(), text).with_language(language, cx)
+ });
+ cx.condition(&buffer, |buf, _| !buf.is_parsing()).await;
+ let buffer = cx.build_model(|cx| MultiBuffer::singleton(buffer, cx));
+
+ let font_size = px(16.0);
+
+ let map = cx.build_model(|cx| {
+ DisplayMap::new(buffer, font("Courier"), font_size, Some(px(40.0)), 1, 1, cx)
+ });
+ assert_eq!(
+ cx.update(|cx| syntax_chunks(0..5, &map, &theme, cx)),
+ [
+ ("fn \n".to_string(), None),
+ ("oute\nr".to_string(), Some(Hsla::blue())),
+ ("() \n{}\n\n".to_string(), None),
+ ]
+ );
+ assert_eq!(
+ cx.update(|cx| syntax_chunks(3..5, &map, &theme, cx)),
+ [("{}\n\n".to_string(), None)]
+ );
+
+ map.update(cx, |map, cx| {
+ map.fold(vec![Point::new(0, 6)..Point::new(3, 2)], cx)
+ });
+ assert_eq!(
+ cx.update(|cx| syntax_chunks(1..4, &map, &theme, cx)),
+ [
+ ("out".to_string(), Some(Hsla::blue())),
+ ("⋯\n".to_string(), None),
+ (" \nfn ".to_string(), Some(Hsla::red())),
+ ("i\n".to_string(), Some(Hsla::blue()))
+ ]
+ );
+ }
+
+ #[gpui::test]
+ async fn test_chunks_with_text_highlights(cx: &mut gpui::TestAppContext) {
+ cx.update(|cx| init_test(cx, |_| {}));
+
+ let theme = SyntaxTheme::new_test(vec![
+ ("operator", Hsla::red().into()),
+ ("string", Hsla::green().into()),
+ ]);
+ let language = Arc::new(
+ Language::new(
+ LanguageConfig {
+ name: "Test".into(),
+ path_suffixes: vec![".test".to_string()],
+ ..Default::default()
+ },
+ Some(tree_sitter_rust::language()),
+ )
+ .with_highlights_query(
+ r#"
+ ":" @operator
+ (string_literal) @string
+ "#,
+ )
+ .unwrap(),
+ );
+ language.set_theme(&theme);
+
+ let (text, highlighted_ranges) = marked_text_ranges(r#"constˇ «a»: B = "c «d»""#, false);
+
+ let buffer = cx.build_model(|cx| {
+ Buffer::new(0, cx.entity_id().as_u64(), text).with_language(language, cx)
+ });
+ cx.condition(&buffer, |buf, _| !buf.is_parsing()).await;
+
+ let buffer = cx.build_model(|cx| MultiBuffer::singleton(buffer, cx));
+ let buffer_snapshot = buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx));
+
+ let font_size = px(16.0);
+ let map = cx
+ .build_model(|cx| DisplayMap::new(buffer, font("Courier"), font_size, None, 1, 1, cx));
+
+ enum MyType {}
+
+ let style = HighlightStyle {
+ color: Some(Hsla::blue()),
+ ..Default::default()
+ };
+
+ map.update(cx, |map, _cx| {
+ map.highlight_text(
+ TypeId::of::<MyType>(),
+ highlighted_ranges
+ .into_iter()
+ .map(|range| {
+ buffer_snapshot.anchor_before(range.start)
+ ..buffer_snapshot.anchor_before(range.end)
+ })
+ .collect(),
+ style,
+ );
+ });
+
+ assert_eq!(
+ cx.update(|cx| chunks(0..10, &map, &theme, cx)),
+ [
+ ("const ".to_string(), None, None),
+ ("a".to_string(), None, Some(Hsla::blue())),
+ (":".to_string(), Some(Hsla::red()), None),
+ (" B = ".to_string(), None, None),
+ ("\"c ".to_string(), Some(Hsla::green()), None),
+ ("d".to_string(), Some(Hsla::green()), Some(Hsla::blue())),
+ ("\"".to_string(), Some(Hsla::green()), None),
+ ]
+ );
+ }
+
+ #[gpui::test]
+ fn test_clip_point(cx: &mut gpui::AppContext) {
+ init_test(cx, |_| {});
+
+ fn assert(text: &str, shift_right: bool, bias: Bias, cx: &mut gpui::AppContext) {
+ let (unmarked_snapshot, mut markers) = marked_display_snapshot(text, cx);
+
+ match bias {
+ Bias::Left => {
+ if shift_right {
+ *markers[1].column_mut() += 1;
+ }
+
+ assert_eq!(unmarked_snapshot.clip_point(markers[1], bias), markers[0])
+ }
+ Bias::Right => {
+ if shift_right {
+ *markers[0].column_mut() += 1;
+ }
+
+ assert_eq!(unmarked_snapshot.clip_point(markers[0], bias), markers[1])
+ }
+ };
+ }
+
+ use Bias::{Left, Right};
+ assert("ˇˇα", false, Left, cx);
+ assert("ˇˇα", true, Left, cx);
+ assert("ˇˇα", false, Right, cx);
+ assert("ˇαˇ", true, Right, cx);
+ assert("ˇˇ✋", false, Left, cx);
+ assert("ˇˇ✋", true, Left, cx);
+ assert("ˇˇ✋", false, Right, cx);
+ assert("ˇ✋ˇ", true, Right, cx);
+ assert("ˇˇ🍐", false, Left, cx);
+ assert("ˇˇ🍐", true, Left, cx);
+ assert("ˇˇ🍐", false, Right, cx);
+ assert("ˇ🍐ˇ", true, Right, cx);
+ assert("ˇˇ\t", false, Left, cx);
+ assert("ˇˇ\t", true, Left, cx);
+ assert("ˇˇ\t", false, Right, cx);
+ assert("ˇ\tˇ", true, Right, cx);
+ assert(" ˇˇ\t", false, Left, cx);
+ assert(" ˇˇ\t", true, Left, cx);
+ assert(" ˇˇ\t", false, Right, cx);
+ assert(" ˇ\tˇ", true, Right, cx);
+ assert(" ˇˇ\t", false, Left, cx);
+ assert(" ˇˇ\t", false, Right, cx);
+ }
+
+ #[gpui::test]
+ fn test_clip_at_line_ends(cx: &mut gpui::AppContext) {
+ init_test(cx, |_| {});
+
+ fn assert(text: &str, cx: &mut gpui::AppContext) {
+ let (mut unmarked_snapshot, markers) = marked_display_snapshot(text, cx);
+ unmarked_snapshot.clip_at_line_ends = true;
+ assert_eq!(
+ unmarked_snapshot.clip_point(markers[1], Bias::Left),
+ markers[0]
+ );
+ }
+
+ assert("ˇˇ", cx);
+ assert("ˇaˇ", cx);
+ assert("aˇbˇ", cx);
+ assert("aˇαˇ", cx);
+ }
+
+ #[gpui::test]
+ fn test_tabs_with_multibyte_chars(cx: &mut gpui::AppContext) {
+ init_test(cx, |_| {});
+
+ let text = "✅\t\tα\nβ\t\n🏀β\t\tγ";
+ let buffer = MultiBuffer::build_simple(text, cx);
+ let font_size = px(14.0);
+
+ let map = cx.build_model(|cx| {
+ DisplayMap::new(buffer.clone(), font("Helvetica"), font_size, None, 1, 1, cx)
+ });
+ let map = map.update(cx, |map, cx| map.snapshot(cx));
+ assert_eq!(map.text(), "✅ α\nβ \n🏀β γ");
+ assert_eq!(
+ map.text_chunks(0).collect::<String>(),
+ "✅ α\nβ \n🏀β γ"
+ );
+ assert_eq!(map.text_chunks(1).collect::<String>(), "β \n🏀β γ");
+ assert_eq!(map.text_chunks(2).collect::<String>(), "🏀β γ");
+
+ let point = Point::new(0, "✅\t\t".len() as u32);
+ let display_point = DisplayPoint::new(0, "✅ ".len() as u32);
+ assert_eq!(point.to_display_point(&map), display_point);
+ assert_eq!(display_point.to_point(&map), point);
+
+ let point = Point::new(1, "β\t".len() as u32);
+ let display_point = DisplayPoint::new(1, "β ".len() as u32);
+ assert_eq!(point.to_display_point(&map), display_point);
+ assert_eq!(display_point.to_point(&map), point,);
+
+ let point = Point::new(2, "🏀β\t\t".len() as u32);
+ let display_point = DisplayPoint::new(2, "🏀β ".len() as u32);
+ assert_eq!(point.to_display_point(&map), display_point);
+ assert_eq!(display_point.to_point(&map), point,);
+
+ // Display points inside of expanded tabs
+ assert_eq!(
+ DisplayPoint::new(0, "✅ ".len() as u32).to_point(&map),
+ Point::new(0, "✅\t".len() as u32),
+ );
+ assert_eq!(
+ DisplayPoint::new(0, "✅ ".len() as u32).to_point(&map),
+ Point::new(0, "✅".len() as u32),
+ );
+
+ // Clipping display points inside of multi-byte characters
+ assert_eq!(
+ map.clip_point(DisplayPoint::new(0, "✅".len() as u32 - 1), Left),
+ DisplayPoint::new(0, 0)
+ );
+ assert_eq!(
+ map.clip_point(DisplayPoint::new(0, "✅".len() as u32 - 1), Bias::Right),
+ DisplayPoint::new(0, "✅".len() as u32)
+ );
+ }
+
+ #[gpui::test]
+ fn test_max_point(cx: &mut gpui::AppContext) {
+ init_test(cx, |_| {});
+
+ let buffer = MultiBuffer::build_simple("aaa\n\t\tbbb", cx);
+ let font_size = px(14.0);
+ let map = cx.build_model(|cx| {
+ DisplayMap::new(buffer.clone(), font("Helvetica"), font_size, None, 1, 1, cx)
+ });
+ assert_eq!(
+ map.update(cx, |map, cx| map.snapshot(cx)).max_point(),
+ DisplayPoint::new(1, 11)
+ )
+ }
+
+ fn syntax_chunks<'a>(
+ rows: Range<u32>,
+ map: &Model<DisplayMap>,
+ theme: &'a SyntaxTheme,
+ cx: &mut AppContext,
+ ) -> Vec<(String, Option<Hsla>)> {
+ chunks(rows, map, theme, cx)
+ .into_iter()
+ .map(|(text, color, _)| (text, color))
+ .collect()
+ }
+
+ fn chunks<'a>(
+ rows: Range<u32>,
+ map: &Model<DisplayMap>,
+ theme: &'a SyntaxTheme,
+ cx: &mut AppContext,
+ ) -> Vec<(String, Option<Hsla>, Option<Hsla>)> {
+ let snapshot = map.update(cx, |map, cx| map.snapshot(cx));
+ let mut chunks: Vec<(String, Option<Hsla>, Option<Hsla>)> = Vec::new();
+ for chunk in snapshot.chunks(rows, true, None, None) {
+ let syntax_color = chunk
+ .syntax_highlight_id
+ .and_then(|id| id.style(theme)?.color);
+ let highlight_color = chunk.highlight_style.and_then(|style| style.color);
+ if let Some((last_chunk, last_syntax_color, last_highlight_color)) = chunks.last_mut() {
+ if syntax_color == *last_syntax_color && highlight_color == *last_highlight_color {
+ last_chunk.push_str(chunk.text);
+ continue;
+ }
+ }
+ chunks.push((chunk.text.to_string(), syntax_color, highlight_color));
+ }
+ chunks
+ }
+
+ fn init_test(cx: &mut AppContext, f: impl Fn(&mut AllLanguageSettingsContent)) {
+ let settings = SettingsStore::test(cx);
+ cx.set_global(settings);
+ language::init(cx);
+ crate::init(cx);
+ Project::init_settings(cx);
+ theme::init(LoadThemes::JustBase, cx);
+ cx.update_global::<SettingsStore, _>(|store, cx| {
+ store.update_user_settings::<AllLanguageSettings>(cx, f);
+ });
+ }
+}
@@ -988,680 +988,664 @@ fn offset_for_row(s: &str, target: u32) -> (u32, usize) {
(row, offset)
}
-// #[cfg(test)]
-// mod tests {
-// use super::*;
-// use crate::display_map::inlay_map::InlayMap;
-// use crate::display_map::{fold_map::FoldMap, tab_map::TabMap, wrap_map::WrapMap};
-// use gpui::Element;
-// use multi_buffer::MultiBuffer;
-// use rand::prelude::*;
-// use settings::SettingsStore;
-// use std::env;
-// use util::RandomCharIter;
-
-// #[gpui::test]
-// fn test_offset_for_row() {
-// assert_eq!(offset_for_row("", 0), (0, 0));
-// assert_eq!(offset_for_row("", 1), (0, 0));
-// assert_eq!(offset_for_row("abcd", 0), (0, 0));
-// assert_eq!(offset_for_row("abcd", 1), (0, 4));
-// assert_eq!(offset_for_row("\n", 0), (0, 0));
-// assert_eq!(offset_for_row("\n", 1), (1, 1));
-// assert_eq!(offset_for_row("abc\ndef\nghi", 0), (0, 0));
-// assert_eq!(offset_for_row("abc\ndef\nghi", 1), (1, 4));
-// assert_eq!(offset_for_row("abc\ndef\nghi", 2), (2, 8));
-// assert_eq!(offset_for_row("abc\ndef\nghi", 3), (2, 11));
-// }
-
-// #[gpui::test]
-// fn test_basic_blocks(cx: &mut gpui::AppContext) {
-// init_test(cx);
-
-// let family_id = cx
-// .font_cache()
-// .load_family(&["Helvetica"], &Default::default())
-// .unwrap();
-// let font_id = cx
-// .font_cache()
-// .select_font(family_id, &Default::default())
-// .unwrap();
-
-// let text = "aaa\nbbb\nccc\nddd";
-
-// let buffer = MultiBuffer::build_simple(text, cx);
-// let buffer_snapshot = buffer.read(cx).snapshot(cx);
-// let subscription = buffer.update(cx, |buffer, _| buffer.subscribe());
-// let (mut inlay_map, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
-// let (mut fold_map, fold_snapshot) = FoldMap::new(inlay_snapshot);
-// let (mut tab_map, tab_snapshot) = TabMap::new(fold_snapshot, 1.try_into().unwrap());
-// let (wrap_map, wraps_snapshot) = WrapMap::new(tab_snapshot, font_id, 14.0, None, cx);
-// let mut block_map = BlockMap::new(wraps_snapshot.clone(), 1, 1);
-
-// let mut writer = block_map.write(wraps_snapshot.clone(), Default::default());
-// let block_ids = writer.insert(vec![
-// BlockProperties {
-// style: BlockStyle::Fixed,
-// position: buffer_snapshot.anchor_after(Point::new(1, 0)),
-// height: 1,
-// disposition: BlockDisposition::Above,
-// render: Arc::new(|_| Empty::new().into_any_named("block 1")),
-// },
-// BlockProperties {
-// style: BlockStyle::Fixed,
-// position: buffer_snapshot.anchor_after(Point::new(1, 2)),
-// height: 2,
-// disposition: BlockDisposition::Above,
-// render: Arc::new(|_| Empty::new().into_any_named("block 2")),
-// },
-// BlockProperties {
-// style: BlockStyle::Fixed,
-// position: buffer_snapshot.anchor_after(Point::new(3, 3)),
-// height: 3,
-// disposition: BlockDisposition::Below,
-// render: Arc::new(|_| Empty::new().into_any_named("block 3")),
-// },
-// ]);
-
-// let snapshot = block_map.read(wraps_snapshot, Default::default());
-// assert_eq!(snapshot.text(), "aaa\n\n\n\nbbb\nccc\nddd\n\n\n");
-
-// let blocks = snapshot
-// .blocks_in_range(0..8)
-// .map(|(start_row, block)| {
-// let block = block.as_custom().unwrap();
-// (start_row..start_row + block.height as u32, block.id)
-// })
-// .collect::<Vec<_>>();
-
-// // When multiple blocks are on the same line, the newer blocks appear first.
-// assert_eq!(
-// blocks,
-// &[
-// (1..2, block_ids[0]),
-// (2..4, block_ids[1]),
-// (7..10, block_ids[2]),
-// ]
-// );
-
-// assert_eq!(
-// snapshot.to_block_point(WrapPoint::new(0, 3)),
-// BlockPoint::new(0, 3)
-// );
-// assert_eq!(
-// snapshot.to_block_point(WrapPoint::new(1, 0)),
-// BlockPoint::new(4, 0)
-// );
-// assert_eq!(
-// snapshot.to_block_point(WrapPoint::new(3, 3)),
-// BlockPoint::new(6, 3)
-// );
-
-// assert_eq!(
-// snapshot.to_wrap_point(BlockPoint::new(0, 3)),
-// WrapPoint::new(0, 3)
-// );
-// assert_eq!(
-// snapshot.to_wrap_point(BlockPoint::new(1, 0)),
-// WrapPoint::new(1, 0)
-// );
-// assert_eq!(
-// snapshot.to_wrap_point(BlockPoint::new(3, 0)),
-// WrapPoint::new(1, 0)
-// );
-// assert_eq!(
-// snapshot.to_wrap_point(BlockPoint::new(7, 0)),
-// WrapPoint::new(3, 3)
-// );
-
-// assert_eq!(
-// snapshot.clip_point(BlockPoint::new(1, 0), Bias::Left),
-// BlockPoint::new(0, 3)
-// );
-// assert_eq!(
-// snapshot.clip_point(BlockPoint::new(1, 0), Bias::Right),
-// BlockPoint::new(4, 0)
-// );
-// assert_eq!(
-// snapshot.clip_point(BlockPoint::new(1, 1), Bias::Left),
-// BlockPoint::new(0, 3)
-// );
-// assert_eq!(
-// snapshot.clip_point(BlockPoint::new(1, 1), Bias::Right),
-// BlockPoint::new(4, 0)
-// );
-// assert_eq!(
-// snapshot.clip_point(BlockPoint::new(4, 0), Bias::Left),
-// BlockPoint::new(4, 0)
-// );
-// assert_eq!(
-// snapshot.clip_point(BlockPoint::new(4, 0), Bias::Right),
-// BlockPoint::new(4, 0)
-// );
-// assert_eq!(
-// snapshot.clip_point(BlockPoint::new(6, 3), Bias::Left),
-// BlockPoint::new(6, 3)
-// );
-// assert_eq!(
-// snapshot.clip_point(BlockPoint::new(6, 3), Bias::Right),
-// BlockPoint::new(6, 3)
-// );
-// assert_eq!(
-// snapshot.clip_point(BlockPoint::new(7, 0), Bias::Left),
-// BlockPoint::new(6, 3)
-// );
-// assert_eq!(
-// snapshot.clip_point(BlockPoint::new(7, 0), Bias::Right),
-// BlockPoint::new(6, 3)
-// );
-
-// assert_eq!(
-// snapshot.buffer_rows(0).collect::<Vec<_>>(),
-// &[
-// Some(0),
-// None,
-// None,
-// None,
-// Some(1),
-// Some(2),
-// Some(3),
-// None,
-// None,
-// None
-// ]
-// );
-
-// // Insert a line break, separating two block decorations into separate lines.
-// let buffer_snapshot = buffer.update(cx, |buffer, cx| {
-// buffer.edit([(Point::new(1, 1)..Point::new(1, 1), "!!!\n")], None, cx);
-// buffer.snapshot(cx)
-// });
-
-// let (inlay_snapshot, inlay_edits) =
-// inlay_map.sync(buffer_snapshot, subscription.consume().into_inner());
-// let (fold_snapshot, fold_edits) = fold_map.read(inlay_snapshot, inlay_edits);
-// let (tab_snapshot, tab_edits) =
-// tab_map.sync(fold_snapshot, fold_edits, 4.try_into().unwrap());
-// let (wraps_snapshot, wrap_edits) = wrap_map.update(cx, |wrap_map, cx| {
-// wrap_map.sync(tab_snapshot, tab_edits, cx)
-// });
-// let snapshot = block_map.read(wraps_snapshot, wrap_edits);
-// assert_eq!(snapshot.text(), "aaa\n\nb!!!\n\n\nbb\nccc\nddd\n\n\n");
-// }
-
-// #[gpui::test]
-// fn test_blocks_on_wrapped_lines(cx: &mut gpui::AppContext) {
-// init_test(cx);
-
-// let family_id = cx
-// .font_cache()
-// .load_family(&["Helvetica"], &Default::default())
-// .unwrap();
-// let font_id = cx
-// .font_cache()
-// .select_font(family_id, &Default::default())
-// .unwrap();
-
-// let text = "one two three\nfour five six\nseven eight";
-
-// let buffer = MultiBuffer::build_simple(text, cx);
-// let buffer_snapshot = buffer.read(cx).snapshot(cx);
-// let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
-// let (_, fold_snapshot) = FoldMap::new(inlay_snapshot);
-// let (_, tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap());
-// let (_, wraps_snapshot) = WrapMap::new(tab_snapshot, font_id, 14.0, Some(60.), cx);
-// let mut block_map = BlockMap::new(wraps_snapshot.clone(), 1, 1);
-
-// let mut writer = block_map.write(wraps_snapshot.clone(), Default::default());
-// writer.insert(vec![
-// BlockProperties {
-// style: BlockStyle::Fixed,
-// position: buffer_snapshot.anchor_after(Point::new(1, 12)),
-// disposition: BlockDisposition::Above,
-// render: Arc::new(|_| Empty::new().into_any_named("block 1")),
-// height: 1,
-// },
-// BlockProperties {
-// style: BlockStyle::Fixed,
-// position: buffer_snapshot.anchor_after(Point::new(1, 1)),
-// disposition: BlockDisposition::Below,
-// render: Arc::new(|_| Empty::new().into_any_named("block 2")),
-// height: 1,
-// },
-// ]);
-
-// // Blocks with an 'above' disposition go above their corresponding buffer line.
-// // Blocks with a 'below' disposition go below their corresponding buffer line.
-// let snapshot = block_map.read(wraps_snapshot, Default::default());
-// assert_eq!(
-// snapshot.text(),
-// "one two \nthree\n\nfour five \nsix\n\nseven \neight"
-// );
-// }
-
-// #[gpui::test(iterations = 100)]
-// fn test_random_blocks(cx: &mut gpui::AppContext, mut rng: StdRng) {
-// init_test(cx);
-
-// let operations = env::var("OPERATIONS")
-// .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
-// .unwrap_or(10);
-
-// let wrap_width = if rng.gen_bool(0.2) {
-// None
-// } else {
-// Some(rng.gen_range(0.0..=100.0))
-// };
-// let tab_size = 1.try_into().unwrap();
-// let family_id = cx
-// .font_cache()
-// .load_family(&["Helvetica"], &Default::default())
-// .unwrap();
-// let font_id = cx
-// .font_cache()
-// .select_font(family_id, &Default::default())
-// .unwrap();
-// let font_size = 14.0;
-// let buffer_start_header_height = rng.gen_range(1..=5);
-// let excerpt_header_height = rng.gen_range(1..=5);
-
-// log::info!("Wrap width: {:?}", wrap_width);
-// log::info!("Excerpt Header Height: {:?}", excerpt_header_height);
-
-// let buffer = if rng.gen() {
-// let len = rng.gen_range(0..10);
-// let text = RandomCharIter::new(&mut rng).take(len).collect::<String>();
-// log::info!("initial buffer text: {:?}", text);
-// MultiBuffer::build_simple(&text, cx)
-// } else {
-// MultiBuffer::build_random(&mut rng, cx)
-// };
-
-// let mut buffer_snapshot = buffer.read(cx).snapshot(cx);
-// let (mut inlay_map, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
-// let (mut fold_map, fold_snapshot) = FoldMap::new(inlay_snapshot);
-// let (mut tab_map, tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap());
-// let (wrap_map, wraps_snapshot) =
-// WrapMap::new(tab_snapshot, font_id, font_size, wrap_width, cx);
-// let mut block_map = BlockMap::new(
-// wraps_snapshot,
-// buffer_start_header_height,
-// excerpt_header_height,
-// );
-// let mut custom_blocks = Vec::new();
-
-// for _ in 0..operations {
-// let mut buffer_edits = Vec::new();
-// match rng.gen_range(0..=100) {
-// 0..=19 => {
-// let wrap_width = if rng.gen_bool(0.2) {
-// None
-// } else {
-// Some(rng.gen_range(0.0..=100.0))
-// };
-// log::info!("Setting wrap width to {:?}", wrap_width);
-// wrap_map.update(cx, |map, cx| map.set_wrap_width(wrap_width, cx));
-// }
-// 20..=39 => {
-// let block_count = rng.gen_range(1..=5);
-// let block_properties = (0..block_count)
-// .map(|_| {
-// let buffer = buffer.read(cx).read(cx);
-// let position = buffer.anchor_after(
-// buffer.clip_offset(rng.gen_range(0..=buffer.len()), Bias::Left),
-// );
-
-// let disposition = if rng.gen() {
-// BlockDisposition::Above
-// } else {
-// BlockDisposition::Below
-// };
-// let height = rng.gen_range(1..5);
-// log::info!(
-// "inserting block {:?} {:?} with height {}",
-// disposition,
-// position.to_point(&buffer),
-// height
-// );
-// BlockProperties {
-// style: BlockStyle::Fixed,
-// position,
-// height,
-// disposition,
-// render: Arc::new(|_| Empty::new().into_any()),
-// }
-// })
-// .collect::<Vec<_>>();
-
-// let (inlay_snapshot, inlay_edits) =
-// inlay_map.sync(buffer_snapshot.clone(), vec![]);
-// let (fold_snapshot, fold_edits) = fold_map.read(inlay_snapshot, inlay_edits);
-// let (tab_snapshot, tab_edits) =
-// tab_map.sync(fold_snapshot, fold_edits, tab_size);
-// let (wraps_snapshot, wrap_edits) = wrap_map.update(cx, |wrap_map, cx| {
-// wrap_map.sync(tab_snapshot, tab_edits, cx)
-// });
-// let mut block_map = block_map.write(wraps_snapshot, wrap_edits);
-// let block_ids = block_map.insert(block_properties.clone());
-// for (block_id, props) in block_ids.into_iter().zip(block_properties) {
-// custom_blocks.push((block_id, props));
-// }
-// }
-// 40..=59 if !custom_blocks.is_empty() => {
-// let block_count = rng.gen_range(1..=4.min(custom_blocks.len()));
-// let block_ids_to_remove = (0..block_count)
-// .map(|_| {
-// custom_blocks
-// .remove(rng.gen_range(0..custom_blocks.len()))
-// .0
-// })
-// .collect();
-
-// let (inlay_snapshot, inlay_edits) =
-// inlay_map.sync(buffer_snapshot.clone(), vec![]);
-// let (fold_snapshot, fold_edits) = fold_map.read(inlay_snapshot, inlay_edits);
-// let (tab_snapshot, tab_edits) =
-// tab_map.sync(fold_snapshot, fold_edits, tab_size);
-// let (wraps_snapshot, wrap_edits) = wrap_map.update(cx, |wrap_map, cx| {
-// wrap_map.sync(tab_snapshot, tab_edits, cx)
-// });
-// let mut block_map = block_map.write(wraps_snapshot, wrap_edits);
-// block_map.remove(block_ids_to_remove);
-// }
-// _ => {
-// buffer.update(cx, |buffer, cx| {
-// let mutation_count = rng.gen_range(1..=5);
-// let subscription = buffer.subscribe();
-// buffer.randomly_mutate(&mut rng, mutation_count, cx);
-// buffer_snapshot = buffer.snapshot(cx);
-// buffer_edits.extend(subscription.consume());
-// log::info!("buffer text: {:?}", buffer_snapshot.text());
-// });
-// }
-// }
-
-// let (inlay_snapshot, inlay_edits) =
-// inlay_map.sync(buffer_snapshot.clone(), buffer_edits);
-// let (fold_snapshot, fold_edits) = fold_map.read(inlay_snapshot, inlay_edits);
-// let (tab_snapshot, tab_edits) = tab_map.sync(fold_snapshot, fold_edits, tab_size);
-// let (wraps_snapshot, wrap_edits) = wrap_map.update(cx, |wrap_map, cx| {
-// wrap_map.sync(tab_snapshot, tab_edits, cx)
-// });
-// let blocks_snapshot = block_map.read(wraps_snapshot.clone(), wrap_edits);
-// assert_eq!(
-// blocks_snapshot.transforms.summary().input_rows,
-// wraps_snapshot.max_point().row() + 1
-// );
-// log::info!("blocks text: {:?}", blocks_snapshot.text());
-
-// let mut expected_blocks = Vec::new();
-// expected_blocks.extend(custom_blocks.iter().map(|(id, block)| {
-// let mut position = block.position.to_point(&buffer_snapshot);
-// match block.disposition {
-// BlockDisposition::Above => {
-// position.column = 0;
-// }
-// BlockDisposition::Below => {
-// position.column = buffer_snapshot.line_len(position.row);
-// }
-// };
-// let row = wraps_snapshot.make_wrap_point(position, Bias::Left).row();
-// (
-// row,
-// ExpectedBlock::Custom {
-// disposition: block.disposition,
-// id: *id,
-// height: block.height,
-// },
-// )
-// }));
-// expected_blocks.extend(buffer_snapshot.excerpt_boundaries_in_range(0..).map(
-// |boundary| {
-// let position =
-// wraps_snapshot.make_wrap_point(Point::new(boundary.row, 0), Bias::Left);
-// (
-// position.row(),
-// ExpectedBlock::ExcerptHeader {
-// height: if boundary.starts_new_buffer {
-// buffer_start_header_height
-// } else {
-// excerpt_header_height
-// },
-// starts_new_buffer: boundary.starts_new_buffer,
-// },
-// )
-// },
-// ));
-// expected_blocks.sort_unstable();
-// let mut sorted_blocks_iter = expected_blocks.into_iter().peekable();
-
-// let input_buffer_rows = buffer_snapshot.buffer_rows(0).collect::<Vec<_>>();
-// let mut expected_buffer_rows = Vec::new();
-// let mut expected_text = String::new();
-// let mut expected_block_positions = Vec::new();
-// let input_text = wraps_snapshot.text();
-// for (row, input_line) in input_text.split('\n').enumerate() {
-// let row = row as u32;
-// if row > 0 {
-// expected_text.push('\n');
-// }
-
-// let buffer_row = input_buffer_rows[wraps_snapshot
-// .to_point(WrapPoint::new(row, 0), Bias::Left)
-// .row as usize];
-
-// while let Some((block_row, block)) = sorted_blocks_iter.peek() {
-// if *block_row == row && block.disposition() == BlockDisposition::Above {
-// let (_, block) = sorted_blocks_iter.next().unwrap();
-// let height = block.height() as usize;
-// expected_block_positions
-// .push((expected_text.matches('\n').count() as u32, block));
-// let text = "\n".repeat(height);
-// expected_text.push_str(&text);
-// for _ in 0..height {
-// expected_buffer_rows.push(None);
-// }
-// } else {
-// break;
-// }
-// }
-
-// let soft_wrapped = wraps_snapshot.to_tab_point(WrapPoint::new(row, 0)).column() > 0;
-// expected_buffer_rows.push(if soft_wrapped { None } else { buffer_row });
-// expected_text.push_str(input_line);
-
-// while let Some((block_row, block)) = sorted_blocks_iter.peek() {
-// if *block_row == row && block.disposition() == BlockDisposition::Below {
-// let (_, block) = sorted_blocks_iter.next().unwrap();
-// let height = block.height() as usize;
-// expected_block_positions
-// .push((expected_text.matches('\n').count() as u32 + 1, block));
-// let text = "\n".repeat(height);
-// expected_text.push_str(&text);
-// for _ in 0..height {
-// expected_buffer_rows.push(None);
-// }
-// } else {
-// break;
-// }
-// }
-// }
-
-// let expected_lines = expected_text.split('\n').collect::<Vec<_>>();
-// let expected_row_count = expected_lines.len();
-// for start_row in 0..expected_row_count {
-// let expected_text = expected_lines[start_row..].join("\n");
-// let actual_text = blocks_snapshot
-// .chunks(
-// start_row as u32..blocks_snapshot.max_point().row + 1,
-// false,
-// Highlights::default(),
-// )
-// .map(|chunk| chunk.text)
-// .collect::<String>();
-// assert_eq!(
-// actual_text, expected_text,
-// "incorrect text starting from row {}",
-// start_row
-// );
-// assert_eq!(
-// blocks_snapshot
-// .buffer_rows(start_row as u32)
-// .collect::<Vec<_>>(),
-// &expected_buffer_rows[start_row..]
-// );
-// }
-
-// assert_eq!(
-// blocks_snapshot
-// .blocks_in_range(0..(expected_row_count as u32))
-// .map(|(row, block)| (row, block.clone().into()))
-// .collect::<Vec<_>>(),
-// expected_block_positions
-// );
-
-// let mut expected_longest_rows = Vec::new();
-// let mut longest_line_len = -1_isize;
-// for (row, line) in expected_lines.iter().enumerate() {
-// let row = row as u32;
-
-// assert_eq!(
-// blocks_snapshot.line_len(row),
-// line.len() as u32,
-// "invalid line len for row {}",
-// row
-// );
-
-// let line_char_count = line.chars().count() as isize;
-// match line_char_count.cmp(&longest_line_len) {
-// Ordering::Less => {}
-// Ordering::Equal => expected_longest_rows.push(row),
-// Ordering::Greater => {
-// longest_line_len = line_char_count;
-// expected_longest_rows.clear();
-// expected_longest_rows.push(row);
-// }
-// }
-// }
-
-// let longest_row = blocks_snapshot.longest_row();
-// assert!(
-// expected_longest_rows.contains(&longest_row),
-// "incorrect longest row {}. expected {:?} with length {}",
-// longest_row,
-// expected_longest_rows,
-// longest_line_len,
-// );
-
-// for row in 0..=blocks_snapshot.wrap_snapshot.max_point().row() {
-// let wrap_point = WrapPoint::new(row, 0);
-// let block_point = blocks_snapshot.to_block_point(wrap_point);
-// assert_eq!(blocks_snapshot.to_wrap_point(block_point), wrap_point);
-// }
-
-// let mut block_point = BlockPoint::new(0, 0);
-// for c in expected_text.chars() {
-// let left_point = blocks_snapshot.clip_point(block_point, Bias::Left);
-// let left_buffer_point = blocks_snapshot.to_point(left_point, Bias::Left);
-// assert_eq!(
-// blocks_snapshot.to_block_point(blocks_snapshot.to_wrap_point(left_point)),
-// left_point
-// );
-// assert_eq!(
-// left_buffer_point,
-// buffer_snapshot.clip_point(left_buffer_point, Bias::Right),
-// "{:?} is not valid in buffer coordinates",
-// left_point
-// );
-
-// let right_point = blocks_snapshot.clip_point(block_point, Bias::Right);
-// let right_buffer_point = blocks_snapshot.to_point(right_point, Bias::Right);
-// assert_eq!(
-// blocks_snapshot.to_block_point(blocks_snapshot.to_wrap_point(right_point)),
-// right_point
-// );
-// assert_eq!(
-// right_buffer_point,
-// buffer_snapshot.clip_point(right_buffer_point, Bias::Left),
-// "{:?} is not valid in buffer coordinates",
-// right_point
-// );
-
-// if c == '\n' {
-// block_point.0 += Point::new(1, 0);
-// } else {
-// block_point.column += c.len_utf8() as u32;
-// }
-// }
-// }
-
-// #[derive(Debug, Eq, PartialEq, Ord, PartialOrd)]
-// enum ExpectedBlock {
-// ExcerptHeader {
-// height: u8,
-// starts_new_buffer: bool,
-// },
-// Custom {
-// disposition: BlockDisposition,
-// id: BlockId,
-// height: u8,
-// },
-// }
-
-// impl ExpectedBlock {
-// fn height(&self) -> u8 {
-// match self {
-// ExpectedBlock::ExcerptHeader { height, .. } => *height,
-// ExpectedBlock::Custom { height, .. } => *height,
-// }
-// }
-
-// fn disposition(&self) -> BlockDisposition {
-// match self {
-// ExpectedBlock::ExcerptHeader { .. } => BlockDisposition::Above,
-// ExpectedBlock::Custom { disposition, .. } => *disposition,
-// }
-// }
-// }
-
-// impl From<TransformBlock> for ExpectedBlock {
-// fn from(block: TransformBlock) -> Self {
-// match block {
-// TransformBlock::Custom(block) => ExpectedBlock::Custom {
-// id: block.id,
-// disposition: block.disposition,
-// height: block.height,
-// },
-// TransformBlock::ExcerptHeader {
-// height,
-// starts_new_buffer,
-// ..
-// } => ExpectedBlock::ExcerptHeader {
-// height,
-// starts_new_buffer,
-// },
-// }
-// }
-// }
-// }
-
-// fn init_test(cx: &mut gpui::AppContext) {
-// cx.set_global(SettingsStore::test(cx));
-// theme::init(cx);
-// }
-
-// impl TransformBlock {
-// fn as_custom(&self) -> Option<&Block> {
-// match self {
-// TransformBlock::Custom(block) => Some(block),
-// TransformBlock::ExcerptHeader { .. } => None,
-// }
-// }
-// }
-
-// impl BlockSnapshot {
-// fn to_point(&self, point: BlockPoint, bias: Bias) -> Point {
-// self.wrap_snapshot.to_point(self.to_wrap_point(point), bias)
-// }
-// }
-// }
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::display_map::inlay_map::InlayMap;
+ use crate::display_map::{fold_map::FoldMap, tab_map::TabMap, wrap_map::WrapMap};
+ use gpui::{div, font, px, Element, Platform as _};
+ use multi_buffer::MultiBuffer;
+ use rand::prelude::*;
+ use settings::SettingsStore;
+ use std::env;
+ use util::RandomCharIter;
+
+ #[gpui::test]
+ fn test_offset_for_row() {
+ assert_eq!(offset_for_row("", 0), (0, 0));
+ assert_eq!(offset_for_row("", 1), (0, 0));
+ assert_eq!(offset_for_row("abcd", 0), (0, 0));
+ assert_eq!(offset_for_row("abcd", 1), (0, 4));
+ assert_eq!(offset_for_row("\n", 0), (0, 0));
+ assert_eq!(offset_for_row("\n", 1), (1, 1));
+ assert_eq!(offset_for_row("abc\ndef\nghi", 0), (0, 0));
+ assert_eq!(offset_for_row("abc\ndef\nghi", 1), (1, 4));
+ assert_eq!(offset_for_row("abc\ndef\nghi", 2), (2, 8));
+ assert_eq!(offset_for_row("abc\ndef\nghi", 3), (2, 11));
+ }
+
+ #[gpui::test]
+ fn test_basic_blocks(cx: &mut gpui::TestAppContext) {
+ cx.update(|cx| init_test(cx));
+
+ let text = "aaa\nbbb\nccc\nddd";
+
+ let buffer = cx.update(|cx| MultiBuffer::build_simple(text, cx));
+ let buffer_snapshot = cx.update(|cx| buffer.read(cx).snapshot(cx));
+ let subscription = buffer.update(cx, |buffer, _| buffer.subscribe());
+ let (mut inlay_map, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
+ let (mut fold_map, fold_snapshot) = FoldMap::new(inlay_snapshot);
+ let (mut tab_map, tab_snapshot) = TabMap::new(fold_snapshot, 1.try_into().unwrap());
+ let (wrap_map, wraps_snapshot) =
+ cx.update(|cx| WrapMap::new(tab_snapshot, font("Helvetica"), px(14.0), None, cx));
+ let mut block_map = BlockMap::new(wraps_snapshot.clone(), 1, 1);
+
+ let mut writer = block_map.write(wraps_snapshot.clone(), Default::default());
+ let block_ids = writer.insert(vec![
+ BlockProperties {
+ style: BlockStyle::Fixed,
+ position: buffer_snapshot.anchor_after(Point::new(1, 0)),
+ height: 1,
+ disposition: BlockDisposition::Above,
+ render: Arc::new(|_| div().into_any()),
+ },
+ BlockProperties {
+ style: BlockStyle::Fixed,
+ position: buffer_snapshot.anchor_after(Point::new(1, 2)),
+ height: 2,
+ disposition: BlockDisposition::Above,
+ render: Arc::new(|_| div().into_any()),
+ },
+ BlockProperties {
+ style: BlockStyle::Fixed,
+ position: buffer_snapshot.anchor_after(Point::new(3, 3)),
+ height: 3,
+ disposition: BlockDisposition::Below,
+ render: Arc::new(|_| div().into_any()),
+ },
+ ]);
+
+ let snapshot = block_map.read(wraps_snapshot, Default::default());
+ assert_eq!(snapshot.text(), "aaa\n\n\n\nbbb\nccc\nddd\n\n\n");
+
+ let blocks = snapshot
+ .blocks_in_range(0..8)
+ .map(|(start_row, block)| {
+ let block = block.as_custom().unwrap();
+ (start_row..start_row + block.height as u32, block.id)
+ })
+ .collect::<Vec<_>>();
+
+ // When multiple blocks are on the same line, the newer blocks appear first.
+ assert_eq!(
+ blocks,
+ &[
+ (1..2, block_ids[0]),
+ (2..4, block_ids[1]),
+ (7..10, block_ids[2]),
+ ]
+ );
+
+ assert_eq!(
+ snapshot.to_block_point(WrapPoint::new(0, 3)),
+ BlockPoint::new(0, 3)
+ );
+ assert_eq!(
+ snapshot.to_block_point(WrapPoint::new(1, 0)),
+ BlockPoint::new(4, 0)
+ );
+ assert_eq!(
+ snapshot.to_block_point(WrapPoint::new(3, 3)),
+ BlockPoint::new(6, 3)
+ );
+
+ assert_eq!(
+ snapshot.to_wrap_point(BlockPoint::new(0, 3)),
+ WrapPoint::new(0, 3)
+ );
+ assert_eq!(
+ snapshot.to_wrap_point(BlockPoint::new(1, 0)),
+ WrapPoint::new(1, 0)
+ );
+ assert_eq!(
+ snapshot.to_wrap_point(BlockPoint::new(3, 0)),
+ WrapPoint::new(1, 0)
+ );
+ assert_eq!(
+ snapshot.to_wrap_point(BlockPoint::new(7, 0)),
+ WrapPoint::new(3, 3)
+ );
+
+ assert_eq!(
+ snapshot.clip_point(BlockPoint::new(1, 0), Bias::Left),
+ BlockPoint::new(0, 3)
+ );
+ assert_eq!(
+ snapshot.clip_point(BlockPoint::new(1, 0), Bias::Right),
+ BlockPoint::new(4, 0)
+ );
+ assert_eq!(
+ snapshot.clip_point(BlockPoint::new(1, 1), Bias::Left),
+ BlockPoint::new(0, 3)
+ );
+ assert_eq!(
+ snapshot.clip_point(BlockPoint::new(1, 1), Bias::Right),
+ BlockPoint::new(4, 0)
+ );
+ assert_eq!(
+ snapshot.clip_point(BlockPoint::new(4, 0), Bias::Left),
+ BlockPoint::new(4, 0)
+ );
+ assert_eq!(
+ snapshot.clip_point(BlockPoint::new(4, 0), Bias::Right),
+ BlockPoint::new(4, 0)
+ );
+ assert_eq!(
+ snapshot.clip_point(BlockPoint::new(6, 3), Bias::Left),
+ BlockPoint::new(6, 3)
+ );
+ assert_eq!(
+ snapshot.clip_point(BlockPoint::new(6, 3), Bias::Right),
+ BlockPoint::new(6, 3)
+ );
+ assert_eq!(
+ snapshot.clip_point(BlockPoint::new(7, 0), Bias::Left),
+ BlockPoint::new(6, 3)
+ );
+ assert_eq!(
+ snapshot.clip_point(BlockPoint::new(7, 0), Bias::Right),
+ BlockPoint::new(6, 3)
+ );
+
+ assert_eq!(
+ snapshot.buffer_rows(0).collect::<Vec<_>>(),
+ &[
+ Some(0),
+ None,
+ None,
+ None,
+ Some(1),
+ Some(2),
+ Some(3),
+ None,
+ None,
+ None
+ ]
+ );
+
+ // Insert a line break, separating two block decorations into separate lines.
+ let buffer_snapshot = buffer.update(cx, |buffer, cx| {
+ buffer.edit([(Point::new(1, 1)..Point::new(1, 1), "!!!\n")], None, cx);
+ buffer.snapshot(cx)
+ });
+
+ let (inlay_snapshot, inlay_edits) =
+ inlay_map.sync(buffer_snapshot, subscription.consume().into_inner());
+ let (fold_snapshot, fold_edits) = fold_map.read(inlay_snapshot, inlay_edits);
+ let (tab_snapshot, tab_edits) =
+ tab_map.sync(fold_snapshot, fold_edits, 4.try_into().unwrap());
+ let (wraps_snapshot, wrap_edits) = wrap_map.update(cx, |wrap_map, cx| {
+ wrap_map.sync(tab_snapshot, tab_edits, cx)
+ });
+ let snapshot = block_map.read(wraps_snapshot, wrap_edits);
+ assert_eq!(snapshot.text(), "aaa\n\nb!!!\n\n\nbb\nccc\nddd\n\n\n");
+ }
+
+ #[gpui::test]
+ fn test_blocks_on_wrapped_lines(cx: &mut gpui::TestAppContext) {
+ cx.update(|cx| init_test(cx));
+
+ let font_id = cx
+ .test_platform
+ .text_system()
+ .font_id(&font("Helvetica"))
+ .unwrap();
+
+ let text = "one two three\nfour five six\nseven eight";
+
+ let buffer = cx.update(|cx| MultiBuffer::build_simple(text, cx));
+ let buffer_snapshot = cx.update(|cx| buffer.read(cx).snapshot(cx));
+ let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
+ let (_, fold_snapshot) = FoldMap::new(inlay_snapshot);
+ let (_, tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap());
+ let (_, wraps_snapshot) = cx.update(|cx| {
+ WrapMap::new(tab_snapshot, font("Helvetica"), px(14.0), Some(px(60.)), cx)
+ });
+ let mut block_map = BlockMap::new(wraps_snapshot.clone(), 1, 1);
+
+ let mut writer = block_map.write(wraps_snapshot.clone(), Default::default());
+ writer.insert(vec![
+ BlockProperties {
+ style: BlockStyle::Fixed,
+ position: buffer_snapshot.anchor_after(Point::new(1, 12)),
+ disposition: BlockDisposition::Above,
+ render: Arc::new(|_| div().into_any()),
+ height: 1,
+ },
+ BlockProperties {
+ style: BlockStyle::Fixed,
+ position: buffer_snapshot.anchor_after(Point::new(1, 1)),
+ disposition: BlockDisposition::Below,
+ render: Arc::new(|_| div().into_any()),
+ height: 1,
+ },
+ ]);
+
+ // Blocks with an 'above' disposition go above their corresponding buffer line.
+ // Blocks with a 'below' disposition go below their corresponding buffer line.
+ let snapshot = block_map.read(wraps_snapshot, Default::default());
+ assert_eq!(
+ snapshot.text(),
+ "one two \nthree\n\nfour five \nsix\n\nseven \neight"
+ );
+ }
+
+ #[gpui::test(iterations = 100)]
+ fn test_random_blocks(cx: &mut gpui::TestAppContext, mut rng: StdRng) {
+ cx.update(|cx| init_test(cx));
+
+ let operations = env::var("OPERATIONS")
+ .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
+ .unwrap_or(10);
+
+ let wrap_width = if rng.gen_bool(0.2) {
+ None
+ } else {
+ Some(px(rng.gen_range(0.0..=100.0)))
+ };
+ let tab_size = 1.try_into().unwrap();
+ let font_size = px(14.0);
+ let buffer_start_header_height = rng.gen_range(1..=5);
+ let excerpt_header_height = rng.gen_range(1..=5);
+
+ log::info!("Wrap width: {:?}", wrap_width);
+ log::info!("Excerpt Header Height: {:?}", excerpt_header_height);
+
+ let buffer = if rng.gen() {
+ let len = rng.gen_range(0..10);
+ let text = RandomCharIter::new(&mut rng).take(len).collect::<String>();
+ log::info!("initial buffer text: {:?}", text);
+ cx.update(|cx| MultiBuffer::build_simple(&text, cx))
+ } else {
+ cx.update(|cx| MultiBuffer::build_random(&mut rng, cx))
+ };
+
+ let mut buffer_snapshot = cx.update(|cx| buffer.read(cx).snapshot(cx));
+ let (mut inlay_map, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
+ let (mut fold_map, fold_snapshot) = FoldMap::new(inlay_snapshot);
+ let (mut tab_map, tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap());
+ let (wrap_map, wraps_snapshot) = cx
+ .update(|cx| WrapMap::new(tab_snapshot, font("Helvetica"), font_size, wrap_width, cx));
+ let mut block_map = BlockMap::new(
+ wraps_snapshot,
+ buffer_start_header_height,
+ excerpt_header_height,
+ );
+ let mut custom_blocks = Vec::new();
+
+ for _ in 0..operations {
+ let mut buffer_edits = Vec::new();
+ match rng.gen_range(0..=100) {
+ 0..=19 => {
+ let wrap_width = if rng.gen_bool(0.2) {
+ None
+ } else {
+ Some(px(rng.gen_range(0.0..=100.0)))
+ };
+ log::info!("Setting wrap width to {:?}", wrap_width);
+ wrap_map.update(cx, |map, cx| map.set_wrap_width(wrap_width, cx));
+ }
+ 20..=39 => {
+ let block_count = rng.gen_range(1..=5);
+ let block_properties = (0..block_count)
+ .map(|_| {
+ let buffer = cx.update(|cx| buffer.read(cx).read(cx).clone());
+ let position = buffer.anchor_after(
+ buffer.clip_offset(rng.gen_range(0..=buffer.len()), Bias::Left),
+ );
+
+ let disposition = if rng.gen() {
+ BlockDisposition::Above
+ } else {
+ BlockDisposition::Below
+ };
+ let height = rng.gen_range(1..5);
+ log::info!(
+ "inserting block {:?} {:?} with height {}",
+ disposition,
+ position.to_point(&buffer),
+ height
+ );
+ BlockProperties {
+ style: BlockStyle::Fixed,
+ position,
+ height,
+ disposition,
+ render: Arc::new(|_| div().into_any()),
+ }
+ })
+ .collect::<Vec<_>>();
+
+ let (inlay_snapshot, inlay_edits) =
+ inlay_map.sync(buffer_snapshot.clone(), vec![]);
+ let (fold_snapshot, fold_edits) = fold_map.read(inlay_snapshot, inlay_edits);
+ let (tab_snapshot, tab_edits) =
+ tab_map.sync(fold_snapshot, fold_edits, tab_size);
+ let (wraps_snapshot, wrap_edits) = wrap_map.update(cx, |wrap_map, cx| {
+ wrap_map.sync(tab_snapshot, tab_edits, cx)
+ });
+ let mut block_map = block_map.write(wraps_snapshot, wrap_edits);
+ let block_ids = block_map.insert(block_properties.clone());
+ for (block_id, props) in block_ids.into_iter().zip(block_properties) {
+ custom_blocks.push((block_id, props));
+ }
+ }
+ 40..=59 if !custom_blocks.is_empty() => {
+ let block_count = rng.gen_range(1..=4.min(custom_blocks.len()));
+ let block_ids_to_remove = (0..block_count)
+ .map(|_| {
+ custom_blocks
+ .remove(rng.gen_range(0..custom_blocks.len()))
+ .0
+ })
+ .collect();
+
+ let (inlay_snapshot, inlay_edits) =
+ inlay_map.sync(buffer_snapshot.clone(), vec![]);
+ let (fold_snapshot, fold_edits) = fold_map.read(inlay_snapshot, inlay_edits);
+ let (tab_snapshot, tab_edits) =
+ tab_map.sync(fold_snapshot, fold_edits, tab_size);
+ let (wraps_snapshot, wrap_edits) = wrap_map.update(cx, |wrap_map, cx| {
+ wrap_map.sync(tab_snapshot, tab_edits, cx)
+ });
+ let mut block_map = block_map.write(wraps_snapshot, wrap_edits);
+ block_map.remove(block_ids_to_remove);
+ }
+ _ => {
+ buffer.update(cx, |buffer, cx| {
+ let mutation_count = rng.gen_range(1..=5);
+ let subscription = buffer.subscribe();
+ buffer.randomly_mutate(&mut rng, mutation_count, cx);
+ buffer_snapshot = buffer.snapshot(cx);
+ buffer_edits.extend(subscription.consume());
+ log::info!("buffer text: {:?}", buffer_snapshot.text());
+ });
+ }
+ }
+
+ let (inlay_snapshot, inlay_edits) =
+ inlay_map.sync(buffer_snapshot.clone(), buffer_edits);
+ let (fold_snapshot, fold_edits) = fold_map.read(inlay_snapshot, inlay_edits);
+ let (tab_snapshot, tab_edits) = tab_map.sync(fold_snapshot, fold_edits, tab_size);
+ let (wraps_snapshot, wrap_edits) = wrap_map.update(cx, |wrap_map, cx| {
+ wrap_map.sync(tab_snapshot, tab_edits, cx)
+ });
+ let blocks_snapshot = block_map.read(wraps_snapshot.clone(), wrap_edits);
+ assert_eq!(
+ blocks_snapshot.transforms.summary().input_rows,
+ wraps_snapshot.max_point().row() + 1
+ );
+ log::info!("blocks text: {:?}", blocks_snapshot.text());
+
+ let mut expected_blocks = Vec::new();
+ expected_blocks.extend(custom_blocks.iter().map(|(id, block)| {
+ let mut position = block.position.to_point(&buffer_snapshot);
+ match block.disposition {
+ BlockDisposition::Above => {
+ position.column = 0;
+ }
+ BlockDisposition::Below => {
+ position.column = buffer_snapshot.line_len(position.row);
+ }
+ };
+ let row = wraps_snapshot.make_wrap_point(position, Bias::Left).row();
+ (
+ row,
+ ExpectedBlock::Custom {
+ disposition: block.disposition,
+ id: *id,
+ height: block.height,
+ },
+ )
+ }));
+ expected_blocks.extend(buffer_snapshot.excerpt_boundaries_in_range(0..).map(
+ |boundary| {
+ let position =
+ wraps_snapshot.make_wrap_point(Point::new(boundary.row, 0), Bias::Left);
+ (
+ position.row(),
+ ExpectedBlock::ExcerptHeader {
+ height: if boundary.starts_new_buffer {
+ buffer_start_header_height
+ } else {
+ excerpt_header_height
+ },
+ starts_new_buffer: boundary.starts_new_buffer,
+ },
+ )
+ },
+ ));
+ expected_blocks.sort_unstable();
+ let mut sorted_blocks_iter = expected_blocks.into_iter().peekable();
+
+ let input_buffer_rows = buffer_snapshot.buffer_rows(0).collect::<Vec<_>>();
+ let mut expected_buffer_rows = Vec::new();
+ let mut expected_text = String::new();
+ let mut expected_block_positions = Vec::new();
+ let input_text = wraps_snapshot.text();
+ for (row, input_line) in input_text.split('\n').enumerate() {
+ let row = row as u32;
+ if row > 0 {
+ expected_text.push('\n');
+ }
+
+ let buffer_row = input_buffer_rows[wraps_snapshot
+ .to_point(WrapPoint::new(row, 0), Bias::Left)
+ .row as usize];
+
+ while let Some((block_row, block)) = sorted_blocks_iter.peek() {
+ if *block_row == row && block.disposition() == BlockDisposition::Above {
+ let (_, block) = sorted_blocks_iter.next().unwrap();
+ let height = block.height() as usize;
+ expected_block_positions
+ .push((expected_text.matches('\n').count() as u32, block));
+ let text = "\n".repeat(height);
+ expected_text.push_str(&text);
+ for _ in 0..height {
+ expected_buffer_rows.push(None);
+ }
+ } else {
+ break;
+ }
+ }
+
+ let soft_wrapped = wraps_snapshot.to_tab_point(WrapPoint::new(row, 0)).column() > 0;
+ expected_buffer_rows.push(if soft_wrapped { None } else { buffer_row });
+ expected_text.push_str(input_line);
+
+ while let Some((block_row, block)) = sorted_blocks_iter.peek() {
+ if *block_row == row && block.disposition() == BlockDisposition::Below {
+ let (_, block) = sorted_blocks_iter.next().unwrap();
+ let height = block.height() as usize;
+ expected_block_positions
+ .push((expected_text.matches('\n').count() as u32 + 1, block));
+ let text = "\n".repeat(height);
+ expected_text.push_str(&text);
+ for _ in 0..height {
+ expected_buffer_rows.push(None);
+ }
+ } else {
+ break;
+ }
+ }
+ }
+
+ let expected_lines = expected_text.split('\n').collect::<Vec<_>>();
+ let expected_row_count = expected_lines.len();
+ for start_row in 0..expected_row_count {
+ let expected_text = expected_lines[start_row..].join("\n");
+ let actual_text = blocks_snapshot
+ .chunks(
+ start_row as u32..blocks_snapshot.max_point().row + 1,
+ false,
+ Highlights::default(),
+ )
+ .map(|chunk| chunk.text)
+ .collect::<String>();
+ assert_eq!(
+ actual_text, expected_text,
+ "incorrect text starting from row {}",
+ start_row
+ );
+ assert_eq!(
+ blocks_snapshot
+ .buffer_rows(start_row as u32)
+ .collect::<Vec<_>>(),
+ &expected_buffer_rows[start_row..]
+ );
+ }
+
+ assert_eq!(
+ blocks_snapshot
+ .blocks_in_range(0..(expected_row_count as u32))
+ .map(|(row, block)| (row, block.clone().into()))
+ .collect::<Vec<_>>(),
+ expected_block_positions
+ );
+
+ let mut expected_longest_rows = Vec::new();
+ let mut longest_line_len = -1_isize;
+ for (row, line) in expected_lines.iter().enumerate() {
+ let row = row as u32;
+
+ assert_eq!(
+ blocks_snapshot.line_len(row),
+ line.len() as u32,
+ "invalid line len for row {}",
+ row
+ );
+
+ let line_char_count = line.chars().count() as isize;
+ match line_char_count.cmp(&longest_line_len) {
+ Ordering::Less => {}
+ Ordering::Equal => expected_longest_rows.push(row),
+ Ordering::Greater => {
+ longest_line_len = line_char_count;
+ expected_longest_rows.clear();
+ expected_longest_rows.push(row);
+ }
+ }
+ }
+
+ let longest_row = blocks_snapshot.longest_row();
+ assert!(
+ expected_longest_rows.contains(&longest_row),
+ "incorrect longest row {}. expected {:?} with length {}",
+ longest_row,
+ expected_longest_rows,
+ longest_line_len,
+ );
+
+ for row in 0..=blocks_snapshot.wrap_snapshot.max_point().row() {
+ let wrap_point = WrapPoint::new(row, 0);
+ let block_point = blocks_snapshot.to_block_point(wrap_point);
+ assert_eq!(blocks_snapshot.to_wrap_point(block_point), wrap_point);
+ }
+
+ let mut block_point = BlockPoint::new(0, 0);
+ for c in expected_text.chars() {
+ let left_point = blocks_snapshot.clip_point(block_point, Bias::Left);
+ let left_buffer_point = blocks_snapshot.to_point(left_point, Bias::Left);
+ assert_eq!(
+ blocks_snapshot.to_block_point(blocks_snapshot.to_wrap_point(left_point)),
+ left_point
+ );
+ assert_eq!(
+ left_buffer_point,
+ buffer_snapshot.clip_point(left_buffer_point, Bias::Right),
+ "{:?} is not valid in buffer coordinates",
+ left_point
+ );
+
+ let right_point = blocks_snapshot.clip_point(block_point, Bias::Right);
+ let right_buffer_point = blocks_snapshot.to_point(right_point, Bias::Right);
+ assert_eq!(
+ blocks_snapshot.to_block_point(blocks_snapshot.to_wrap_point(right_point)),
+ right_point
+ );
+ assert_eq!(
+ right_buffer_point,
+ buffer_snapshot.clip_point(right_buffer_point, Bias::Left),
+ "{:?} is not valid in buffer coordinates",
+ right_point
+ );
+
+ if c == '\n' {
+ block_point.0 += Point::new(1, 0);
+ } else {
+ block_point.column += c.len_utf8() as u32;
+ }
+ }
+ }
+
+ #[derive(Debug, Eq, PartialEq, Ord, PartialOrd)]
+ enum ExpectedBlock {
+ ExcerptHeader {
+ height: u8,
+ starts_new_buffer: bool,
+ },
+ Custom {
+ disposition: BlockDisposition,
+ id: BlockId,
+ height: u8,
+ },
+ }
+
+ impl ExpectedBlock {
+ fn height(&self) -> u8 {
+ match self {
+ ExpectedBlock::ExcerptHeader { height, .. } => *height,
+ ExpectedBlock::Custom { height, .. } => *height,
+ }
+ }
+
+ fn disposition(&self) -> BlockDisposition {
+ match self {
+ ExpectedBlock::ExcerptHeader { .. } => BlockDisposition::Above,
+ ExpectedBlock::Custom { disposition, .. } => *disposition,
+ }
+ }
+ }
+
+ impl From<TransformBlock> for ExpectedBlock {
+ fn from(block: TransformBlock) -> Self {
+ match block {
+ TransformBlock::Custom(block) => ExpectedBlock::Custom {
+ id: block.id,
+ disposition: block.disposition,
+ height: block.height,
+ },
+ TransformBlock::ExcerptHeader {
+ height,
+ starts_new_buffer,
+ ..
+ } => ExpectedBlock::ExcerptHeader {
+ height,
+ starts_new_buffer,
+ },
+ }
+ }
+ }
+ }
+
+ fn init_test(cx: &mut gpui::AppContext) {
+ let settings = SettingsStore::test(cx);
+ cx.set_global(settings);
+ theme::init(theme::LoadThemes::JustBase, cx);
+ }
+
+ impl TransformBlock {
+ fn as_custom(&self) -> Option<&Block> {
+ match self {
+ TransformBlock::Custom(block) => Some(block),
+ TransformBlock::ExcerptHeader { .. } => None,
+ }
+ }
+ }
+
+ impl BlockSnapshot {
+ fn to_point(&self, point: BlockPoint, bias: Bias) -> Point {
+ self.wrap_snapshot.to_point(self.to_wrap_point(point), bias)
+ }
+ }
+}
@@ -741,49 +741,48 @@ impl WrapSnapshot {
}
fn check_invariants(&self) {
- // todo!()
- // #[cfg(test)]
- // {
- // assert_eq!(
- // TabPoint::from(self.transforms.summary().input.lines),
- // self.tab_snapshot.max_point()
- // );
-
- // {
- // let mut transforms = self.transforms.cursor::<()>().peekable();
- // while let Some(transform) = transforms.next() {
- // if let Some(next_transform) = transforms.peek() {
- // assert!(transform.is_isomorphic() != next_transform.is_isomorphic());
- // }
- // }
- // }
-
- // let text = language::Rope::from(self.text().as_str());
- // let mut input_buffer_rows = self.tab_snapshot.buffer_rows(0);
- // let mut expected_buffer_rows = Vec::new();
- // let mut prev_tab_row = 0;
- // for display_row in 0..=self.max_point().row() {
- // let tab_point = self.to_tab_point(WrapPoint::new(display_row, 0));
- // if tab_point.row() == prev_tab_row && display_row != 0 {
- // expected_buffer_rows.push(None);
- // } else {
- // expected_buffer_rows.push(input_buffer_rows.next().unwrap());
- // }
-
- // prev_tab_row = tab_point.row();
- // assert_eq!(self.line_len(display_row), text.line_len(display_row));
- // }
-
- // for start_display_row in 0..expected_buffer_rows.len() {
- // assert_eq!(
- // self.buffer_rows(start_display_row as u32)
- // .collect::<Vec<_>>(),
- // &expected_buffer_rows[start_display_row..],
- // "invalid buffer_rows({}..)",
- // start_display_row
- // );
- // }
- // }
+ #[cfg(test)]
+ {
+ assert_eq!(
+ TabPoint::from(self.transforms.summary().input.lines),
+ self.tab_snapshot.max_point()
+ );
+
+ {
+ let mut transforms = self.transforms.cursor::<()>().peekable();
+ while let Some(transform) = transforms.next() {
+ if let Some(next_transform) = transforms.peek() {
+ assert!(transform.is_isomorphic() != next_transform.is_isomorphic());
+ }
+ }
+ }
+
+ let text = language::Rope::from(self.text().as_str());
+ let mut input_buffer_rows = self.tab_snapshot.buffer_rows(0);
+ let mut expected_buffer_rows = Vec::new();
+ let mut prev_tab_row = 0;
+ for display_row in 0..=self.max_point().row() {
+ let tab_point = self.to_tab_point(WrapPoint::new(display_row, 0));
+ if tab_point.row() == prev_tab_row && display_row != 0 {
+ expected_buffer_rows.push(None);
+ } else {
+ expected_buffer_rows.push(input_buffer_rows.next().unwrap());
+ }
+
+ prev_tab_row = tab_point.row();
+ assert_eq!(self.line_len(display_row), text.line_len(display_row));
+ }
+
+ for start_display_row in 0..expected_buffer_rows.len() {
+ assert_eq!(
+ self.buffer_rows(start_display_row as u32)
+ .collect::<Vec<_>>(),
+ &expected_buffer_rows[start_display_row..],
+ "invalid buffer_rows({}..)",
+ start_display_row
+ );
+ }
+ }
}
}
@@ -1026,337 +1025,334 @@ fn consolidate_wrap_edits(edits: &mut Vec<WrapEdit>) {
}
}
-// #[cfg(test)]
-// mod tests {
-// use super::*;
-// use crate::{
-// display_map::{fold_map::FoldMap, inlay_map::InlayMap, tab_map::TabMap},
-// MultiBuffer,
-// };
-// use gpui::test::observe;
-// use rand::prelude::*;
-// use settings::SettingsStore;
-// use smol::stream::StreamExt;
-// use std::{cmp, env, num::NonZeroU32};
-// use text::Rope;
-
-// #[gpui::test(iterations = 100)]
-// async fn test_random_wraps(cx: &mut gpui::TestAppContext, mut rng: StdRng) {
-// init_test(cx);
-
-// cx.foreground().set_block_on_ticks(0..=50);
-// let operations = env::var("OPERATIONS")
-// .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
-// .unwrap_or(10);
-
-// let font_cache = cx.font_cache().clone();
-// let font_system = cx.platform().fonts();
-// let mut wrap_width = if rng.gen_bool(0.1) {
-// None
-// } else {
-// Some(rng.gen_range(0.0..=1000.0))
-// };
-// let tab_size = NonZeroU32::new(rng.gen_range(1..=4)).unwrap();
-// let family_id = font_cache
-// .load_family(&["Helvetica"], &Default::default())
-// .unwrap();
-// let font_id = font_cache
-// .select_font(family_id, &Default::default())
-// .unwrap();
-// let font_size = 14.0;
-
-// log::info!("Tab size: {}", tab_size);
-// log::info!("Wrap width: {:?}", wrap_width);
-
-// let buffer = cx.update(|cx| {
-// if rng.gen() {
-// MultiBuffer::build_random(&mut rng, cx)
-// } else {
-// let len = rng.gen_range(0..10);
-// let text = util::RandomCharIter::new(&mut rng)
-// .take(len)
-// .collect::<String>();
-// MultiBuffer::build_simple(&text, cx)
-// }
-// });
-// let mut buffer_snapshot = buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx));
-// log::info!("Buffer text: {:?}", buffer_snapshot.text());
-// let (mut inlay_map, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
-// log::info!("InlayMap text: {:?}", inlay_snapshot.text());
-// let (mut fold_map, fold_snapshot) = FoldMap::new(inlay_snapshot.clone());
-// log::info!("FoldMap text: {:?}", fold_snapshot.text());
-// let (mut tab_map, _) = TabMap::new(fold_snapshot.clone(), tab_size);
-// let tabs_snapshot = tab_map.set_max_expansion_column(32);
-// log::info!("TabMap text: {:?}", tabs_snapshot.text());
-
-// let mut line_wrapper = LineWrapper::new(font_id, font_size, font_system);
-// let unwrapped_text = tabs_snapshot.text();
-// let expected_text = wrap_text(&unwrapped_text, wrap_width, &mut line_wrapper);
-
-// let (wrap_map, _) =
-// cx.update(|cx| WrapMap::new(tabs_snapshot.clone(), font_id, font_size, wrap_width, cx));
-// let mut notifications = observe(&wrap_map, cx);
-
-// if wrap_map.read_with(cx, |map, _| map.is_rewrapping()) {
-// notifications.next().await.unwrap();
-// }
-
-// let (initial_snapshot, _) = wrap_map.update(cx, |map, cx| {
-// assert!(!map.is_rewrapping());
-// map.sync(tabs_snapshot.clone(), Vec::new(), cx)
-// });
-
-// let actual_text = initial_snapshot.text();
-// assert_eq!(
-// actual_text, expected_text,
-// "unwrapped text is: {:?}",
-// unwrapped_text
-// );
-// log::info!("Wrapped text: {:?}", actual_text);
-
-// let mut next_inlay_id = 0;
-// let mut edits = Vec::new();
-// for _i in 0..operations {
-// log::info!("{} ==============================================", _i);
-
-// let mut buffer_edits = Vec::new();
-// match rng.gen_range(0..=100) {
-// 0..=19 => {
-// wrap_width = if rng.gen_bool(0.2) {
-// None
-// } else {
-// Some(rng.gen_range(0.0..=1000.0))
-// };
-// log::info!("Setting wrap width to {:?}", wrap_width);
-// wrap_map.update(cx, |map, cx| map.set_wrap_width(wrap_width, cx));
-// }
-// 20..=39 => {
-// for (fold_snapshot, fold_edits) in fold_map.randomly_mutate(&mut rng) {
-// let (tabs_snapshot, tab_edits) =
-// tab_map.sync(fold_snapshot, fold_edits, tab_size);
-// let (mut snapshot, wrap_edits) =
-// wrap_map.update(cx, |map, cx| map.sync(tabs_snapshot, tab_edits, cx));
-// snapshot.check_invariants();
-// snapshot.verify_chunks(&mut rng);
-// edits.push((snapshot, wrap_edits));
-// }
-// }
-// 40..=59 => {
-// let (inlay_snapshot, inlay_edits) =
-// inlay_map.randomly_mutate(&mut next_inlay_id, &mut rng);
-// let (fold_snapshot, fold_edits) = fold_map.read(inlay_snapshot, inlay_edits);
-// let (tabs_snapshot, tab_edits) =
-// tab_map.sync(fold_snapshot, fold_edits, tab_size);
-// let (mut snapshot, wrap_edits) =
-// wrap_map.update(cx, |map, cx| map.sync(tabs_snapshot, tab_edits, cx));
-// snapshot.check_invariants();
-// snapshot.verify_chunks(&mut rng);
-// edits.push((snapshot, wrap_edits));
-// }
-// _ => {
-// buffer.update(cx, |buffer, cx| {
-// let subscription = buffer.subscribe();
-// let edit_count = rng.gen_range(1..=5);
-// buffer.randomly_mutate(&mut rng, edit_count, cx);
-// buffer_snapshot = buffer.snapshot(cx);
-// buffer_edits.extend(subscription.consume());
-// });
-// }
-// }
-
-// log::info!("Buffer text: {:?}", buffer_snapshot.text());
-// let (inlay_snapshot, inlay_edits) =
-// inlay_map.sync(buffer_snapshot.clone(), buffer_edits);
-// log::info!("InlayMap text: {:?}", inlay_snapshot.text());
-// let (fold_snapshot, fold_edits) = fold_map.read(inlay_snapshot, inlay_edits);
-// log::info!("FoldMap text: {:?}", fold_snapshot.text());
-// let (tabs_snapshot, tab_edits) = tab_map.sync(fold_snapshot, fold_edits, tab_size);
-// log::info!("TabMap text: {:?}", tabs_snapshot.text());
-
-// let unwrapped_text = tabs_snapshot.text();
-// let expected_text = wrap_text(&unwrapped_text, wrap_width, &mut line_wrapper);
-// let (mut snapshot, wrap_edits) =
-// wrap_map.update(cx, |map, cx| map.sync(tabs_snapshot.clone(), tab_edits, cx));
-// snapshot.check_invariants();
-// snapshot.verify_chunks(&mut rng);
-// edits.push((snapshot, wrap_edits));
-
-// if wrap_map.read_with(cx, |map, _| map.is_rewrapping()) && rng.gen_bool(0.4) {
-// log::info!("Waiting for wrapping to finish");
-// while wrap_map.read_with(cx, |map, _| map.is_rewrapping()) {
-// notifications.next().await.unwrap();
-// }
-// wrap_map.read_with(cx, |map, _| assert!(map.pending_edits.is_empty()));
-// }
-
-// if !wrap_map.read_with(cx, |map, _| map.is_rewrapping()) {
-// let (mut wrapped_snapshot, wrap_edits) =
-// wrap_map.update(cx, |map, cx| map.sync(tabs_snapshot, Vec::new(), cx));
-// let actual_text = wrapped_snapshot.text();
-// let actual_longest_row = wrapped_snapshot.longest_row();
-// log::info!("Wrapping finished: {:?}", actual_text);
-// wrapped_snapshot.check_invariants();
-// wrapped_snapshot.verify_chunks(&mut rng);
-// edits.push((wrapped_snapshot.clone(), wrap_edits));
-// assert_eq!(
-// actual_text, expected_text,
-// "unwrapped text is: {:?}",
-// unwrapped_text
-// );
-
-// let mut summary = TextSummary::default();
-// for (ix, item) in wrapped_snapshot
-// .transforms
-// .items(&())
-// .into_iter()
-// .enumerate()
-// {
-// summary += &item.summary.output;
-// log::info!("{} summary: {:?}", ix, item.summary.output,);
-// }
-
-// if tab_size.get() == 1
-// || !wrapped_snapshot
-// .tab_snapshot
-// .fold_snapshot
-// .text()
-// .contains('\t')
-// {
-// let mut expected_longest_rows = Vec::new();
-// let mut longest_line_len = -1;
-// for (row, line) in expected_text.split('\n').enumerate() {
-// let line_char_count = line.chars().count() as isize;
-// if line_char_count > longest_line_len {
-// expected_longest_rows.clear();
-// longest_line_len = line_char_count;
-// }
-// if line_char_count >= longest_line_len {
-// expected_longest_rows.push(row as u32);
-// }
-// }
-
-// assert!(
-// expected_longest_rows.contains(&actual_longest_row),
-// "incorrect longest row {}. expected {:?} with length {}",
-// actual_longest_row,
-// expected_longest_rows,
-// longest_line_len,
-// )
-// }
-// }
-// }
-
-// let mut initial_text = Rope::from(initial_snapshot.text().as_str());
-// for (snapshot, patch) in edits {
-// let snapshot_text = Rope::from(snapshot.text().as_str());
-// for edit in &patch {
-// let old_start = initial_text.point_to_offset(Point::new(edit.new.start, 0));
-// let old_end = initial_text.point_to_offset(cmp::min(
-// Point::new(edit.new.start + edit.old.len() as u32, 0),
-// initial_text.max_point(),
-// ));
-// let new_start = snapshot_text.point_to_offset(Point::new(edit.new.start, 0));
-// let new_end = snapshot_text.point_to_offset(cmp::min(
-// Point::new(edit.new.end, 0),
-// snapshot_text.max_point(),
-// ));
-// let new_text = snapshot_text
-// .chunks_in_range(new_start..new_end)
-// .collect::<String>();
-
-// initial_text.replace(old_start..old_end, &new_text);
-// }
-// assert_eq!(initial_text.to_string(), snapshot_text.to_string());
-// }
-
-// if wrap_map.read_with(cx, |map, _| map.is_rewrapping()) {
-// log::info!("Waiting for wrapping to finish");
-// while wrap_map.read_with(cx, |map, _| map.is_rewrapping()) {
-// notifications.next().await.unwrap();
-// }
-// }
-// wrap_map.read_with(cx, |map, _| assert!(map.pending_edits.is_empty()));
-// }
-
-// fn init_test(cx: &mut gpui::TestAppContext) {
-// cx.foreground().forbid_parking();
-// cx.update(|cx| {
-// cx.set_global(SettingsStore::test(cx));
-// theme::init((), cx);
-// });
-// }
-
-// fn wrap_text(
-// unwrapped_text: &str,
-// wrap_width: Option<f32>,
-// line_wrapper: &mut LineWrapper,
-// ) -> String {
-// if let Some(wrap_width) = wrap_width {
-// let mut wrapped_text = String::new();
-// for (row, line) in unwrapped_text.split('\n').enumerate() {
-// if row > 0 {
-// wrapped_text.push('\n')
-// }
-
-// let mut prev_ix = 0;
-// for boundary in line_wrapper.wrap_line(line, wrap_width) {
-// wrapped_text.push_str(&line[prev_ix..boundary.ix]);
-// wrapped_text.push('\n');
-// wrapped_text.push_str(&" ".repeat(boundary.next_indent as usize));
-// prev_ix = boundary.ix;
-// }
-// wrapped_text.push_str(&line[prev_ix..]);
-// }
-// wrapped_text
-// } else {
-// unwrapped_text.to_string()
-// }
-// }
-
-// impl WrapSnapshot {
-// pub fn text(&self) -> String {
-// self.text_chunks(0).collect()
-// }
-
-// pub fn text_chunks(&self, wrap_row: u32) -> impl Iterator<Item = &str> {
-// self.chunks(
-// wrap_row..self.max_point().row() + 1,
-// false,
-// Highlights::default(),
-// )
-// .map(|h| h.text)
-// }
-
-// fn verify_chunks(&mut self, rng: &mut impl Rng) {
-// for _ in 0..5 {
-// let mut end_row = rng.gen_range(0..=self.max_point().row());
-// let start_row = rng.gen_range(0..=end_row);
-// end_row += 1;
-
-// let mut expected_text = self.text_chunks(start_row).collect::<String>();
-// if expected_text.ends_with('\n') {
-// expected_text.push('\n');
-// }
-// let mut expected_text = expected_text
-// .lines()
-// .take((end_row - start_row) as usize)
-// .collect::<Vec<_>>()
-// .join("\n");
-// if end_row <= self.max_point().row() {
-// expected_text.push('\n');
-// }
-
-// let actual_text = self
-// .chunks(start_row..end_row, true, Highlights::default())
-// .map(|c| c.text)
-// .collect::<String>();
-// assert_eq!(
-// expected_text,
-// actual_text,
-// "chunks != highlighted_chunks for rows {:?}",
-// start_row..end_row
-// );
-// }
-// }
-// }
-// }
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::{
+ display_map::{fold_map::FoldMap, inlay_map::InlayMap, tab_map::TabMap},
+ MultiBuffer,
+ };
+ use gpui::{font, px, test::observe, Platform};
+ use rand::prelude::*;
+ use settings::SettingsStore;
+ use smol::stream::StreamExt;
+ use std::{cmp, env, num::NonZeroU32};
+ use text::Rope;
+ use theme::LoadThemes;
+
+ #[gpui::test(iterations = 100)]
+ async fn test_random_wraps(cx: &mut gpui::TestAppContext, mut rng: StdRng) {
+ // todo!() this test is flaky
+ init_test(cx);
+
+ cx.background_executor.set_block_on_ticks(0..=50);
+ let operations = env::var("OPERATIONS")
+ .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
+ .unwrap_or(10);
+
+ let text_system = cx.read(|cx| cx.text_system().clone());
+ let mut wrap_width = if rng.gen_bool(0.1) {
+ None
+ } else {
+ Some(px(rng.gen_range(0.0..=1000.0)))
+ };
+ let tab_size = NonZeroU32::new(rng.gen_range(1..=4)).unwrap();
+ let font = font("Helvetica");
+ let font_id = text_system.font_id(&font).unwrap();
+ let font_size = px(14.0);
+
+ log::info!("Tab size: {}", tab_size);
+ log::info!("Wrap width: {:?}", wrap_width);
+
+ let buffer = cx.update(|cx| {
+ if rng.gen() {
+ MultiBuffer::build_random(&mut rng, cx)
+ } else {
+ let len = rng.gen_range(0..10);
+ let text = util::RandomCharIter::new(&mut rng)
+ .take(len)
+ .collect::<String>();
+ MultiBuffer::build_simple(&text, cx)
+ }
+ });
+ let mut buffer_snapshot = buffer.read_with(cx, |buffer, cx| buffer.snapshot(cx));
+ log::info!("Buffer text: {:?}", buffer_snapshot.text());
+ let (mut inlay_map, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
+ log::info!("InlayMap text: {:?}", inlay_snapshot.text());
+ let (mut fold_map, fold_snapshot) = FoldMap::new(inlay_snapshot.clone());
+ log::info!("FoldMap text: {:?}", fold_snapshot.text());
+ let (mut tab_map, _) = TabMap::new(fold_snapshot.clone(), tab_size);
+ let tabs_snapshot = tab_map.set_max_expansion_column(32);
+ log::info!("TabMap text: {:?}", tabs_snapshot.text());
+
+ let mut line_wrapper = text_system.line_wrapper(font.clone(), font_size).unwrap();
+ let unwrapped_text = tabs_snapshot.text();
+ let expected_text = wrap_text(&unwrapped_text, wrap_width, &mut line_wrapper);
+
+ let (wrap_map, _) =
+ cx.update(|cx| WrapMap::new(tabs_snapshot.clone(), font, font_size, wrap_width, cx));
+ let mut notifications = observe(&wrap_map, cx);
+
+ if wrap_map.read_with(cx, |map, _| map.is_rewrapping()) {
+ notifications.next().await.unwrap();
+ }
+
+ let (initial_snapshot, _) = wrap_map.update(cx, |map, cx| {
+ assert!(!map.is_rewrapping());
+ map.sync(tabs_snapshot.clone(), Vec::new(), cx)
+ });
+
+ let actual_text = initial_snapshot.text();
+ assert_eq!(
+ actual_text, expected_text,
+ "unwrapped text is: {:?}",
+ unwrapped_text
+ );
+ log::info!("Wrapped text: {:?}", actual_text);
+
+ let mut next_inlay_id = 0;
+ let mut edits = Vec::new();
+ for _i in 0..operations {
+ log::info!("{} ==============================================", _i);
+
+ let mut buffer_edits = Vec::new();
+ match rng.gen_range(0..=100) {
+ 0..=19 => {
+ wrap_width = if rng.gen_bool(0.2) {
+ None
+ } else {
+ Some(px(rng.gen_range(0.0..=1000.0)))
+ };
+ log::info!("Setting wrap width to {:?}", wrap_width);
+ wrap_map.update(cx, |map, cx| map.set_wrap_width(wrap_width, cx));
+ }
+ 20..=39 => {
+ for (fold_snapshot, fold_edits) in fold_map.randomly_mutate(&mut rng) {
+ let (tabs_snapshot, tab_edits) =
+ tab_map.sync(fold_snapshot, fold_edits, tab_size);
+ let (mut snapshot, wrap_edits) =
+ wrap_map.update(cx, |map, cx| map.sync(tabs_snapshot, tab_edits, cx));
+ snapshot.check_invariants();
+ snapshot.verify_chunks(&mut rng);
+ edits.push((snapshot, wrap_edits));
+ }
+ }
+ 40..=59 => {
+ let (inlay_snapshot, inlay_edits) =
+ inlay_map.randomly_mutate(&mut next_inlay_id, &mut rng);
+ let (fold_snapshot, fold_edits) = fold_map.read(inlay_snapshot, inlay_edits);
+ let (tabs_snapshot, tab_edits) =
+ tab_map.sync(fold_snapshot, fold_edits, tab_size);
+ let (mut snapshot, wrap_edits) =
+ wrap_map.update(cx, |map, cx| map.sync(tabs_snapshot, tab_edits, cx));
+ snapshot.check_invariants();
+ snapshot.verify_chunks(&mut rng);
+ edits.push((snapshot, wrap_edits));
+ }
+ _ => {
+ buffer.update(cx, |buffer, cx| {
+ let subscription = buffer.subscribe();
+ let edit_count = rng.gen_range(1..=5);
+ buffer.randomly_mutate(&mut rng, edit_count, cx);
+ buffer_snapshot = buffer.snapshot(cx);
+ buffer_edits.extend(subscription.consume());
+ });
+ }
+ }
+
+ log::info!("Buffer text: {:?}", buffer_snapshot.text());
+ let (inlay_snapshot, inlay_edits) =
+ inlay_map.sync(buffer_snapshot.clone(), buffer_edits);
+ log::info!("InlayMap text: {:?}", inlay_snapshot.text());
+ let (fold_snapshot, fold_edits) = fold_map.read(inlay_snapshot, inlay_edits);
+ log::info!("FoldMap text: {:?}", fold_snapshot.text());
+ let (tabs_snapshot, tab_edits) = tab_map.sync(fold_snapshot, fold_edits, tab_size);
+ log::info!("TabMap text: {:?}", tabs_snapshot.text());
+
+ let unwrapped_text = tabs_snapshot.text();
+ let expected_text = wrap_text(&unwrapped_text, wrap_width, &mut line_wrapper);
+ let (mut snapshot, wrap_edits) =
+ wrap_map.update(cx, |map, cx| map.sync(tabs_snapshot.clone(), tab_edits, cx));
+ snapshot.check_invariants();
+ snapshot.verify_chunks(&mut rng);
+ edits.push((snapshot, wrap_edits));
+
+ if wrap_map.read_with(cx, |map, _| map.is_rewrapping()) && rng.gen_bool(0.4) {
+ log::info!("Waiting for wrapping to finish");
+ while wrap_map.read_with(cx, |map, _| map.is_rewrapping()) {
+ notifications.next().await.unwrap();
+ }
+ wrap_map.read_with(cx, |map, _| assert!(map.pending_edits.is_empty()));
+ }
+
+ if !wrap_map.read_with(cx, |map, _| map.is_rewrapping()) {
+ let (mut wrapped_snapshot, wrap_edits) =
+ wrap_map.update(cx, |map, cx| map.sync(tabs_snapshot, Vec::new(), cx));
+ let actual_text = wrapped_snapshot.text();
+ let actual_longest_row = wrapped_snapshot.longest_row();
+ log::info!("Wrapping finished: {:?}", actual_text);
+ wrapped_snapshot.check_invariants();
+ wrapped_snapshot.verify_chunks(&mut rng);
+ edits.push((wrapped_snapshot.clone(), wrap_edits));
+ assert_eq!(
+ actual_text, expected_text,
+ "unwrapped text is: {:?}",
+ unwrapped_text
+ );
+
+ let mut summary = TextSummary::default();
+ for (ix, item) in wrapped_snapshot
+ .transforms
+ .items(&())
+ .into_iter()
+ .enumerate()
+ {
+ summary += &item.summary.output;
+ log::info!("{} summary: {:?}", ix, item.summary.output,);
+ }
+
+ if tab_size.get() == 1
+ || !wrapped_snapshot
+ .tab_snapshot
+ .fold_snapshot
+ .text()
+ .contains('\t')
+ {
+ let mut expected_longest_rows = Vec::new();
+ let mut longest_line_len = -1;
+ for (row, line) in expected_text.split('\n').enumerate() {
+ let line_char_count = line.chars().count() as isize;
+ if line_char_count > longest_line_len {
+ expected_longest_rows.clear();
+ longest_line_len = line_char_count;
+ }
+ if line_char_count >= longest_line_len {
+ expected_longest_rows.push(row as u32);
+ }
+ }
+
+ assert!(
+ expected_longest_rows.contains(&actual_longest_row),
+ "incorrect longest row {}. expected {:?} with length {}",
+ actual_longest_row,
+ expected_longest_rows,
+ longest_line_len,
+ )
+ }
+ }
+ }
+
+ let mut initial_text = Rope::from(initial_snapshot.text().as_str());
+ for (snapshot, patch) in edits {
+ let snapshot_text = Rope::from(snapshot.text().as_str());
+ for edit in &patch {
+ let old_start = initial_text.point_to_offset(Point::new(edit.new.start, 0));
+ let old_end = initial_text.point_to_offset(cmp::min(
+ Point::new(edit.new.start + edit.old.len() as u32, 0),
+ initial_text.max_point(),
+ ));
+ let new_start = snapshot_text.point_to_offset(Point::new(edit.new.start, 0));
+ let new_end = snapshot_text.point_to_offset(cmp::min(
+ Point::new(edit.new.end, 0),
+ snapshot_text.max_point(),
+ ));
+ let new_text = snapshot_text
+ .chunks_in_range(new_start..new_end)
+ .collect::<String>();
+
+ initial_text.replace(old_start..old_end, &new_text);
+ }
+ assert_eq!(initial_text.to_string(), snapshot_text.to_string());
+ }
+
+ if wrap_map.read_with(cx, |map, _| map.is_rewrapping()) {
+ log::info!("Waiting for wrapping to finish");
+ while wrap_map.read_with(cx, |map, _| map.is_rewrapping()) {
+ notifications.next().await.unwrap();
+ }
+ }
+ wrap_map.read_with(cx, |map, _| assert!(map.pending_edits.is_empty()));
+ }
+
+ fn init_test(cx: &mut gpui::TestAppContext) {
+ cx.update(|cx| {
+ let settings = SettingsStore::test(cx);
+ cx.set_global(settings);
+ theme::init(LoadThemes::JustBase, cx);
+ });
+ }
+
+ fn wrap_text(
+ unwrapped_text: &str,
+ wrap_width: Option<Pixels>,
+ line_wrapper: &mut LineWrapper,
+ ) -> String {
+ if let Some(wrap_width) = wrap_width {
+ let mut wrapped_text = String::new();
+ for (row, line) in unwrapped_text.split('\n').enumerate() {
+ if row > 0 {
+ wrapped_text.push('\n')
+ }
+
+ let mut prev_ix = 0;
+ for boundary in line_wrapper.wrap_line(line, wrap_width) {
+ wrapped_text.push_str(&line[prev_ix..boundary.ix]);
+ wrapped_text.push('\n');
+ wrapped_text.push_str(&" ".repeat(boundary.next_indent as usize));
+ prev_ix = boundary.ix;
+ }
+ wrapped_text.push_str(&line[prev_ix..]);
+ }
+ wrapped_text
+ } else {
+ unwrapped_text.to_string()
+ }
+ }
+
+ impl WrapSnapshot {
+ pub fn text(&self) -> String {
+ self.text_chunks(0).collect()
+ }
+
+ pub fn text_chunks(&self, wrap_row: u32) -> impl Iterator<Item = &str> {
+ self.chunks(
+ wrap_row..self.max_point().row() + 1,
+ false,
+ Highlights::default(),
+ )
+ .map(|h| h.text)
+ }
+
+ fn verify_chunks(&mut self, rng: &mut impl Rng) {
+ for _ in 0..5 {
+ let mut end_row = rng.gen_range(0..=self.max_point().row());
+ let start_row = rng.gen_range(0..=end_row);
+ end_row += 1;
+
+ let mut expected_text = self.text_chunks(start_row).collect::<String>();
+ if expected_text.ends_with('\n') {
+ expected_text.push('\n');
+ }
+ let mut expected_text = expected_text
+ .lines()
+ .take((end_row - start_row) as usize)
+ .collect::<Vec<_>>()
+ .join("\n");
+ if end_row <= self.max_point().row() {
+ expected_text.push('\n');
+ }
+
+ let actual_text = self
+ .chunks(start_row..end_row, true, Highlights::default())
+ .map(|c| c.text)
+ .collect::<String>();
+ assert_eq!(
+ expected_text,
+ actual_text,
+ "chunks != highlighted_chunks for rows {:?}",
+ start_row..end_row
+ );
+ }
+ }
+ }
+}
@@ -3486,7 +3486,7 @@ impl Editor {
drop(context_menu);
this.discard_copilot_suggestion(cx);
cx.notify();
- } else if this.completion_tasks.is_empty() {
+ } else if this.completion_tasks.len() <= 1 {
// If there are no more completion tasks and the last menu was
// empty, we should hide it. If it was already hidden, we should
// also show the copilot suggestion when available.
@@ -8240,6 +8240,11 @@ impl Editor {
self.style = Some(style);
}
+ #[cfg(any(test, feature = "test-support"))]
+ pub fn style(&self) -> Option<&EditorStyle> {
+ self.style.as_ref()
+ }
+
pub fn set_wrap_width(&self, width: Option<Pixels>, cx: &mut AppContext) -> bool {
self.display_map
.update(cx, |map, cx| map.set_wrap_width(width, cx))
@@ -12,7 +12,7 @@ use futures::StreamExt;
use gpui::{
div,
serde_json::{self, json},
- Div, TestAppContext, VisualTestContext, WindowBounds, WindowOptions,
+ Div, Flatten, Platform, TestAppContext, VisualTestContext, WindowBounds, WindowOptions,
};
use indoc::indoc;
use language::{
@@ -36,121 +36,120 @@ use workspace::{
NavigationEntry, ViewId,
};
-// todo(finish edit tests)
-// #[gpui::test]
-// fn test_edit_events(cx: &mut TestAppContext) {
-// init_test(cx, |_| {});
-
-// let buffer = cx.build_model(|cx| {
-// let mut buffer = language::Buffer::new(0, cx.entity_id().as_u64(), "123456");
-// buffer.set_group_interval(Duration::from_secs(1));
-// buffer
-// });
-
-// let events = Rc::new(RefCell::new(Vec::new()));
-// let editor1 = cx.add_window({
-// let events = events.clone();
-// |cx| {
-// let view = cx.view().clone();
-// cx.subscribe(&view, move |_, _, event, _| {
-// if matches!(event, Event::Edited | Event::BufferEdited) {
-// events.borrow_mut().push(("editor1", event.clone()));
-// }
-// })
-// .detach();
-// Editor::for_buffer(buffer.clone(), None, cx)
-// }
-// });
-
-// let editor2 = cx.add_window({
-// let events = events.clone();
-// |cx| {
-// cx.subscribe(&cx.view().clone(), move |_, _, event, _| {
-// if matches!(event, Event::Edited | Event::BufferEdited) {
-// events.borrow_mut().push(("editor2", event.clone()));
-// }
-// })
-// .detach();
-// Editor::for_buffer(buffer.clone(), None, cx)
-// }
-// });
-
-// assert_eq!(mem::take(&mut *events.borrow_mut()), []);
-
-// // Mutating editor 1 will emit an `Edited` event only for that editor.
-// editor1.update(cx, |editor, cx| editor.insert("X", cx));
-// assert_eq!(
-// mem::take(&mut *events.borrow_mut()),
-// [
-// ("editor1", Event::Edited),
-// ("editor1", Event::BufferEdited),
-// ("editor2", Event::BufferEdited),
-// ]
-// );
-
-// // Mutating editor 2 will emit an `Edited` event only for that editor.
-// editor2.update(cx, |editor, cx| editor.delete(&Delete, cx));
-// assert_eq!(
-// mem::take(&mut *events.borrow_mut()),
-// [
-// ("editor2", Event::Edited),
-// ("editor1", Event::BufferEdited),
-// ("editor2", Event::BufferEdited),
-// ]
-// );
-
-// // Undoing on editor 1 will emit an `Edited` event only for that editor.
-// editor1.update(cx, |editor, cx| editor.undo(&Undo, cx));
-// assert_eq!(
-// mem::take(&mut *events.borrow_mut()),
-// [
-// ("editor1", Event::Edited),
-// ("editor1", Event::BufferEdited),
-// ("editor2", Event::BufferEdited),
-// ]
-// );
-
-// // Redoing on editor 1 will emit an `Edited` event only for that editor.
-// editor1.update(cx, |editor, cx| editor.redo(&Redo, cx));
-// assert_eq!(
-// mem::take(&mut *events.borrow_mut()),
-// [
-// ("editor1", Event::Edited),
-// ("editor1", Event::BufferEdited),
-// ("editor2", Event::BufferEdited),
-// ]
-// );
-
-// // Undoing on editor 2 will emit an `Edited` event only for that editor.
-// editor2.update(cx, |editor, cx| editor.undo(&Undo, cx));
-// assert_eq!(
-// mem::take(&mut *events.borrow_mut()),
-// [
-// ("editor2", Event::Edited),
-// ("editor1", Event::BufferEdited),
-// ("editor2", Event::BufferEdited),
-// ]
-// );
-
-// // Redoing on editor 2 will emit an `Edited` event only for that editor.
-// editor2.update(cx, |editor, cx| editor.redo(&Redo, cx));
-// assert_eq!(
-// mem::take(&mut *events.borrow_mut()),
-// [
-// ("editor2", Event::Edited),
-// ("editor1", Event::BufferEdited),
-// ("editor2", Event::BufferEdited),
-// ]
-// );
-
-// // No event is emitted when the mutation is a no-op.
-// editor2.update(cx, |editor, cx| {
-// editor.change_selections(None, cx, |s| s.select_ranges([0..0]));
-
-// editor.backspace(&Backspace, cx);
-// });
-// assert_eq!(mem::take(&mut *events.borrow_mut()), []);
-// }
+#[gpui::test]
+fn test_edit_events(cx: &mut TestAppContext) {
+ init_test(cx, |_| {});
+
+ let buffer = cx.build_model(|cx| {
+ let mut buffer = language::Buffer::new(0, cx.entity_id().as_u64(), "123456");
+ buffer.set_group_interval(Duration::from_secs(1));
+ buffer
+ });
+
+ let events = Rc::new(RefCell::new(Vec::new()));
+ let editor1 = cx.add_window({
+ let events = events.clone();
+ |cx| {
+ let view = cx.view().clone();
+ cx.subscribe(&view, move |_, _, event: &EditorEvent, _| {
+ if matches!(event, EditorEvent::Edited | EditorEvent::BufferEdited) {
+ events.borrow_mut().push(("editor1", event.clone()));
+ }
+ })
+ .detach();
+ Editor::for_buffer(buffer.clone(), None, cx)
+ }
+ });
+
+ let editor2 = cx.add_window({
+ let events = events.clone();
+ |cx| {
+ cx.subscribe(&cx.view().clone(), move |_, _, event: &EditorEvent, _| {
+ if matches!(event, EditorEvent::Edited | EditorEvent::BufferEdited) {
+ events.borrow_mut().push(("editor2", event.clone()));
+ }
+ })
+ .detach();
+ Editor::for_buffer(buffer.clone(), None, cx)
+ }
+ });
+
+ assert_eq!(mem::take(&mut *events.borrow_mut()), []);
+
+ // Mutating editor 1 will emit an `Edited` event only for that editor.
+ editor1.update(cx, |editor, cx| editor.insert("X", cx));
+ assert_eq!(
+ mem::take(&mut *events.borrow_mut()),
+ [
+ ("editor1", EditorEvent::Edited),
+ ("editor1", EditorEvent::BufferEdited),
+ ("editor2", EditorEvent::BufferEdited),
+ ]
+ );
+
+ // Mutating editor 2 will emit an `Edited` event only for that editor.
+ editor2.update(cx, |editor, cx| editor.delete(&Delete, cx));
+ assert_eq!(
+ mem::take(&mut *events.borrow_mut()),
+ [
+ ("editor2", EditorEvent::Edited),
+ ("editor1", EditorEvent::BufferEdited),
+ ("editor2", EditorEvent::BufferEdited),
+ ]
+ );
+
+ // Undoing on editor 1 will emit an `Edited` event only for that editor.
+ editor1.update(cx, |editor, cx| editor.undo(&Undo, cx));
+ assert_eq!(
+ mem::take(&mut *events.borrow_mut()),
+ [
+ ("editor1", EditorEvent::Edited),
+ ("editor1", EditorEvent::BufferEdited),
+ ("editor2", EditorEvent::BufferEdited),
+ ]
+ );
+
+ // Redoing on editor 1 will emit an `Edited` event only for that editor.
+ editor1.update(cx, |editor, cx| editor.redo(&Redo, cx));
+ assert_eq!(
+ mem::take(&mut *events.borrow_mut()),
+ [
+ ("editor1", EditorEvent::Edited),
+ ("editor1", EditorEvent::BufferEdited),
+ ("editor2", EditorEvent::BufferEdited),
+ ]
+ );
+
+ // Undoing on editor 2 will emit an `Edited` event only for that editor.
+ editor2.update(cx, |editor, cx| editor.undo(&Undo, cx));
+ assert_eq!(
+ mem::take(&mut *events.borrow_mut()),
+ [
+ ("editor2", EditorEvent::Edited),
+ ("editor1", EditorEvent::BufferEdited),
+ ("editor2", EditorEvent::BufferEdited),
+ ]
+ );
+
+ // Redoing on editor 2 will emit an `Edited` event only for that editor.
+ editor2.update(cx, |editor, cx| editor.redo(&Redo, cx));
+ assert_eq!(
+ mem::take(&mut *events.borrow_mut()),
+ [
+ ("editor2", EditorEvent::Edited),
+ ("editor1", EditorEvent::BufferEdited),
+ ("editor2", EditorEvent::BufferEdited),
+ ]
+ );
+
+ // No event is emitted when the mutation is a no-op.
+ editor2.update(cx, |editor, cx| {
+ editor.change_selections(None, cx, |s| s.select_ranges([0..0]));
+
+ editor.backspace(&Backspace, cx);
+ });
+ assert_eq!(mem::take(&mut *events.borrow_mut()), []);
+}
#[gpui::test]
fn test_undo_redo_with_selection_restoration(cx: &mut TestAppContext) {
@@ -515,123 +514,123 @@ fn test_clone(cx: &mut TestAppContext) {
}
//todo!(editor navigate)
-// #[gpui::test]
-// async fn test_navigation_history(cx: &mut TestAppContext) {
-// init_test(cx, |_| {});
-
-// use workspace::item::Item;
-
-// let fs = FakeFs::new(cx.executor());
-// let project = Project::test(fs, [], cx).await;
-// let workspace = cx.add_window(|cx| Workspace::test_new(project, cx));
-// let pane = workspace
-// .update(cx, |workspace, _| workspace.active_pane().clone())
-// .unwrap();
-
-// workspace.update(cx, |v, cx| {
-// cx.build_view(|cx| {
-// let buffer = MultiBuffer::build_simple(&sample_text(300, 5, 'a'), cx);
-// let mut editor = build_editor(buffer.clone(), cx);
-// let handle = cx.view();
-// editor.set_nav_history(Some(pane.read(cx).nav_history_for_item(&handle)));
-
-// fn pop_history(editor: &mut Editor, cx: &mut WindowContext) -> Option<NavigationEntry> {
-// editor.nav_history.as_mut().unwrap().pop_backward(cx)
-// }
-
-// // Move the cursor a small distance.
-// // Nothing is added to the navigation history.
-// editor.change_selections(None, cx, |s| {
-// s.select_display_ranges([DisplayPoint::new(1, 0)..DisplayPoint::new(1, 0)])
-// });
-// editor.change_selections(None, cx, |s| {
-// s.select_display_ranges([DisplayPoint::new(3, 0)..DisplayPoint::new(3, 0)])
-// });
-// assert!(pop_history(&mut editor, cx).is_none());
-
-// // Move the cursor a large distance.
-// // The history can jump back to the previous position.
-// editor.change_selections(None, cx, |s| {
-// s.select_display_ranges([DisplayPoint::new(13, 0)..DisplayPoint::new(13, 3)])
-// });
-// let nav_entry = pop_history(&mut editor, cx).unwrap();
-// editor.navigate(nav_entry.data.unwrap(), cx);
-// assert_eq!(nav_entry.item.id(), cx.entity_id());
-// assert_eq!(
-// editor.selections.display_ranges(cx),
-// &[DisplayPoint::new(3, 0)..DisplayPoint::new(3, 0)]
-// );
-// assert!(pop_history(&mut editor, cx).is_none());
-
-// // Move the cursor a small distance via the mouse.
-// // Nothing is added to the navigation history.
-// editor.begin_selection(DisplayPoint::new(5, 0), false, 1, cx);
-// editor.end_selection(cx);
-// assert_eq!(
-// editor.selections.display_ranges(cx),
-// &[DisplayPoint::new(5, 0)..DisplayPoint::new(5, 0)]
-// );
-// assert!(pop_history(&mut editor, cx).is_none());
-
-// // Move the cursor a large distance via the mouse.
-// // The history can jump back to the previous position.
-// editor.begin_selection(DisplayPoint::new(15, 0), false, 1, cx);
-// editor.end_selection(cx);
-// assert_eq!(
-// editor.selections.display_ranges(cx),
-// &[DisplayPoint::new(15, 0)..DisplayPoint::new(15, 0)]
-// );
-// let nav_entry = pop_history(&mut editor, cx).unwrap();
-// editor.navigate(nav_entry.data.unwrap(), cx);
-// assert_eq!(nav_entry.item.id(), cx.entity_id());
-// assert_eq!(
-// editor.selections.display_ranges(cx),
-// &[DisplayPoint::new(5, 0)..DisplayPoint::new(5, 0)]
-// );
-// assert!(pop_history(&mut editor, cx).is_none());
-
-// // Set scroll position to check later
-// editor.set_scroll_position(gpui::Point::<f32>::new(5.5, 5.5), cx);
-// let original_scroll_position = editor.scroll_manager.anchor();
-
-// // Jump to the end of the document and adjust scroll
-// editor.move_to_end(&MoveToEnd, cx);
-// editor.set_scroll_position(gpui::Point::<f32>::new(-2.5, -0.5), cx);
-// assert_ne!(editor.scroll_manager.anchor(), original_scroll_position);
-
-// let nav_entry = pop_history(&mut editor, cx).unwrap();
-// editor.navigate(nav_entry.data.unwrap(), cx);
-// assert_eq!(editor.scroll_manager.anchor(), original_scroll_position);
-
-// // Ensure we don't panic when navigation data contains invalid anchors *and* points.
-// let mut invalid_anchor = editor.scroll_manager.anchor().anchor;
-// invalid_anchor.text_anchor.buffer_id = Some(999);
-// let invalid_point = Point::new(9999, 0);
-// editor.navigate(
-// Box::new(NavigationData {
-// cursor_anchor: invalid_anchor,
-// cursor_position: invalid_point,
-// scroll_anchor: ScrollAnchor {
-// anchor: invalid_anchor,
-// offset: Default::default(),
-// },
-// scroll_top_row: invalid_point.row,
-// }),
-// cx,
-// );
-// assert_eq!(
-// editor.selections.display_ranges(cx),
-// &[editor.max_point(cx)..editor.max_point(cx)]
-// );
-// assert_eq!(
-// editor.scroll_position(cx),
-// gpui::Point::new(0., editor.max_point(cx).row() as f32)
-// );
-
-// editor
-// })
-// });
-// }
+#[gpui::test]
+async fn test_navigation_history(cx: &mut TestAppContext) {
+ init_test(cx, |_| {});
+
+ use workspace::item::Item;
+
+ let fs = FakeFs::new(cx.executor());
+ let project = Project::test(fs, [], cx).await;
+ let workspace = cx.add_window(|cx| Workspace::test_new(project, cx));
+ let pane = workspace
+ .update(cx, |workspace, _| workspace.active_pane().clone())
+ .unwrap();
+
+ workspace.update(cx, |v, cx| {
+ cx.build_view(|cx| {
+ let buffer = MultiBuffer::build_simple(&sample_text(300, 5, 'a'), cx);
+ let mut editor = build_editor(buffer.clone(), cx);
+ let handle = cx.view();
+ editor.set_nav_history(Some(pane.read(cx).nav_history_for_item(&handle)));
+
+ fn pop_history(editor: &mut Editor, cx: &mut WindowContext) -> Option<NavigationEntry> {
+ editor.nav_history.as_mut().unwrap().pop_backward(cx)
+ }
+
+ // Move the cursor a small distance.
+ // Nothing is added to the navigation history.
+ editor.change_selections(None, cx, |s| {
+ s.select_display_ranges([DisplayPoint::new(1, 0)..DisplayPoint::new(1, 0)])
+ });
+ editor.change_selections(None, cx, |s| {
+ s.select_display_ranges([DisplayPoint::new(3, 0)..DisplayPoint::new(3, 0)])
+ });
+ assert!(pop_history(&mut editor, cx).is_none());
+
+ // Move the cursor a large distance.
+ // The history can jump back to the previous position.
+ editor.change_selections(None, cx, |s| {
+ s.select_display_ranges([DisplayPoint::new(13, 0)..DisplayPoint::new(13, 3)])
+ });
+ let nav_entry = pop_history(&mut editor, cx).unwrap();
+ editor.navigate(nav_entry.data.unwrap(), cx);
+ assert_eq!(nav_entry.item.id(), cx.entity_id());
+ assert_eq!(
+ editor.selections.display_ranges(cx),
+ &[DisplayPoint::new(3, 0)..DisplayPoint::new(3, 0)]
+ );
+ assert!(pop_history(&mut editor, cx).is_none());
+
+ // Move the cursor a small distance via the mouse.
+ // Nothing is added to the navigation history.
+ editor.begin_selection(DisplayPoint::new(5, 0), false, 1, cx);
+ editor.end_selection(cx);
+ assert_eq!(
+ editor.selections.display_ranges(cx),
+ &[DisplayPoint::new(5, 0)..DisplayPoint::new(5, 0)]
+ );
+ assert!(pop_history(&mut editor, cx).is_none());
+
+ // Move the cursor a large distance via the mouse.
+ // The history can jump back to the previous position.
+ editor.begin_selection(DisplayPoint::new(15, 0), false, 1, cx);
+ editor.end_selection(cx);
+ assert_eq!(
+ editor.selections.display_ranges(cx),
+ &[DisplayPoint::new(15, 0)..DisplayPoint::new(15, 0)]
+ );
+ let nav_entry = pop_history(&mut editor, cx).unwrap();
+ editor.navigate(nav_entry.data.unwrap(), cx);
+ assert_eq!(nav_entry.item.id(), cx.entity_id());
+ assert_eq!(
+ editor.selections.display_ranges(cx),
+ &[DisplayPoint::new(5, 0)..DisplayPoint::new(5, 0)]
+ );
+ assert!(pop_history(&mut editor, cx).is_none());
+
+ // Set scroll position to check later
+ editor.set_scroll_position(gpui::Point::<f32>::new(5.5, 5.5), cx);
+ let original_scroll_position = editor.scroll_manager.anchor();
+
+ // Jump to the end of the document and adjust scroll
+ editor.move_to_end(&MoveToEnd, cx);
+ editor.set_scroll_position(gpui::Point::<f32>::new(-2.5, -0.5), cx);
+ assert_ne!(editor.scroll_manager.anchor(), original_scroll_position);
+
+ let nav_entry = pop_history(&mut editor, cx).unwrap();
+ editor.navigate(nav_entry.data.unwrap(), cx);
+ assert_eq!(editor.scroll_manager.anchor(), original_scroll_position);
+
+ // Ensure we don't panic when navigation data contains invalid anchors *and* points.
+ let mut invalid_anchor = editor.scroll_manager.anchor().anchor;
+ invalid_anchor.text_anchor.buffer_id = Some(999);
+ let invalid_point = Point::new(9999, 0);
+ editor.navigate(
+ Box::new(NavigationData {
+ cursor_anchor: invalid_anchor,
+ cursor_position: invalid_point,
+ scroll_anchor: ScrollAnchor {
+ anchor: invalid_anchor,
+ offset: Default::default(),
+ },
+ scroll_top_row: invalid_point.row,
+ }),
+ cx,
+ );
+ assert_eq!(
+ editor.selections.display_ranges(cx),
+ &[editor.max_point(cx)..editor.max_point(cx)]
+ );
+ assert_eq!(
+ editor.scroll_position(cx),
+ gpui::Point::new(0., editor.max_point(cx).row() as f32)
+ );
+
+ editor
+ })
+ });
+}
#[gpui::test]
fn test_cancel(cx: &mut TestAppContext) {
@@ -959,55 +958,55 @@ fn test_move_cursor_multibyte(cx: &mut TestAppContext) {
}
//todo!(finish editor tests)
-// #[gpui::test]
-// fn test_move_cursor_different_line_lengths(cx: &mut TestAppContext) {
-// init_test(cx, |_| {});
-
-// let view = cx.add_window(|cx| {
-// let buffer = MultiBuffer::build_simple("ⓐⓑⓒⓓⓔ\nabcd\nαβγ\nabcd\nⓐⓑⓒⓓⓔ\n", cx);
-// build_editor(buffer.clone(), cx)
-// });
-// view.update(cx, |view, cx| {
-// view.change_selections(None, cx, |s| {
-// s.select_display_ranges([empty_range(0, "ⓐⓑⓒⓓⓔ".len())]);
-// });
-// view.move_down(&MoveDown, cx);
-// assert_eq!(
-// view.selections.display_ranges(cx),
-// &[empty_range(1, "abcd".len())]
-// );
-
-// view.move_down(&MoveDown, cx);
-// assert_eq!(
-// view.selections.display_ranges(cx),
-// &[empty_range(2, "αβγ".len())]
-// );
-
-// view.move_down(&MoveDown, cx);
-// assert_eq!(
-// view.selections.display_ranges(cx),
-// &[empty_range(3, "abcd".len())]
-// );
-
-// view.move_down(&MoveDown, cx);
-// assert_eq!(
-// view.selections.display_ranges(cx),
-// &[empty_range(4, "ⓐⓑⓒⓓⓔ".len())]
-// );
-
-// view.move_up(&MoveUp, cx);
-// assert_eq!(
-// view.selections.display_ranges(cx),
-// &[empty_range(3, "abcd".len())]
-// );
-
-// view.move_up(&MoveUp, cx);
-// assert_eq!(
-// view.selections.display_ranges(cx),
-// &[empty_range(2, "αβγ".len())]
-// );
-// });
-// }
+#[gpui::test]
+fn test_move_cursor_different_line_lengths(cx: &mut TestAppContext) {
+ init_test(cx, |_| {});
+
+ let view = cx.add_window(|cx| {
+ let buffer = MultiBuffer::build_simple("ⓐⓑⓒⓓⓔ\nabcd\nαβγ\nabcd\nⓐⓑⓒⓓⓔ\n", cx);
+ build_editor(buffer.clone(), cx)
+ });
+ view.update(cx, |view, cx| {
+ view.change_selections(None, cx, |s| {
+ s.select_display_ranges([empty_range(0, "ⓐⓑⓒⓓⓔ".len())]);
+ });
+ view.move_down(&MoveDown, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ &[empty_range(1, "abcd".len())]
+ );
+
+ view.move_down(&MoveDown, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ &[empty_range(2, "αβγ".len())]
+ );
+
+ view.move_down(&MoveDown, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ &[empty_range(3, "abcd".len())]
+ );
+
+ view.move_down(&MoveDown, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ &[empty_range(4, "ⓐⓑⓒⓓⓔ".len())]
+ );
+
+ view.move_up(&MoveUp, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ &[empty_range(3, "abcd".len())]
+ );
+
+ view.move_up(&MoveUp, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ &[empty_range(2, "αβγ".len())]
+ );
+ });
+}
#[gpui::test]
fn test_beginning_end_of_line(cx: &mut TestAppContext) {
@@ -1225,532 +1224,551 @@ fn test_prev_next_word_boundary(cx: &mut TestAppContext) {
}
//todo!(finish editor tests)
-// #[gpui::test]
-// fn test_prev_next_word_bounds_with_soft_wrap(cx: &mut TestAppContext) {
-// init_test(cx, |_| {});
-
-// let view = cx.add_window(|cx| {
-// let buffer = MultiBuffer::build_simple("use one::{\n two::three::four::five\n};", cx);
-// build_editor(buffer, cx)
-// });
-
-// view.update(cx, |view, cx| {
-// view.set_wrap_width(Some(140.0.into()), cx);
-// assert_eq!(
-// view.display_text(cx),
-// "use one::{\n two::three::\n four::five\n};"
-// );
-
-// view.change_selections(None, cx, |s| {
-// s.select_display_ranges([DisplayPoint::new(1, 7)..DisplayPoint::new(1, 7)]);
-// });
-
-// view.move_to_next_word_end(&MoveToNextWordEnd, cx);
-// assert_eq!(
-// view.selections.display_ranges(cx),
-// &[DisplayPoint::new(1, 9)..DisplayPoint::new(1, 9)]
-// );
-
-// view.move_to_next_word_end(&MoveToNextWordEnd, cx);
-// assert_eq!(
-// view.selections.display_ranges(cx),
-// &[DisplayPoint::new(1, 14)..DisplayPoint::new(1, 14)]
-// );
-
-// view.move_to_next_word_end(&MoveToNextWordEnd, cx);
-// assert_eq!(
-// view.selections.display_ranges(cx),
-// &[DisplayPoint::new(2, 4)..DisplayPoint::new(2, 4)]
-// );
-
-// view.move_to_next_word_end(&MoveToNextWordEnd, cx);
-// assert_eq!(
-// view.selections.display_ranges(cx),
-// &[DisplayPoint::new(2, 8)..DisplayPoint::new(2, 8)]
-// );
-
-// view.move_to_previous_word_start(&MoveToPreviousWordStart, cx);
-// assert_eq!(
-// view.selections.display_ranges(cx),
-// &[DisplayPoint::new(2, 4)..DisplayPoint::new(2, 4)]
-// );
-
-// view.move_to_previous_word_start(&MoveToPreviousWordStart, cx);
-// assert_eq!(
-// view.selections.display_ranges(cx),
-// &[DisplayPoint::new(1, 14)..DisplayPoint::new(1, 14)]
-// );
-// });
-// }
-
-//todo!(simulate_resize)
-// #[gpui::test]
-// async fn test_move_start_of_paragraph_end_of_paragraph(cx: &mut gpui::TestAppContext) {
-// init_test(cx, |_| {});
-// let mut cx = EditorTestContext::new(cx).await;
-
-// let line_height = cx.editor(|editor, cx| editor.style(cx).text.line_height(cx.font_cache()));
-// let window = cx.window;
-// window.simulate_resize(gpui::Point::new(100., 4. * line_height), &mut cx);
-
-// cx.set_state(
-// &r#"ˇone
-// two
-
-// three
-// fourˇ
-// five
-
-// six"#
-// .unindent(),
-// );
-
-// cx.update_editor(|editor, cx| editor.move_to_end_of_paragraph(&MoveToEndOfParagraph, cx));
-// cx.assert_editor_state(
-// &r#"one
-// two
-// ˇ
-// three
-// four
-// five
-// ˇ
-// six"#
-// .unindent(),
-// );
-
-// cx.update_editor(|editor, cx| editor.move_to_end_of_paragraph(&MoveToEndOfParagraph, cx));
-// cx.assert_editor_state(
-// &r#"one
-// two
-
-// three
-// four
-// five
-// ˇ
-// sixˇ"#
-// .unindent(),
-// );
-
-// cx.update_editor(|editor, cx| editor.move_to_end_of_paragraph(&MoveToEndOfParagraph, cx));
-// cx.assert_editor_state(
-// &r#"one
-// two
-
-// three
-// four
-// five
-
-// sixˇ"#
-// .unindent(),
-// );
-
-// cx.update_editor(|editor, cx| editor.move_to_start_of_paragraph(&MoveToStartOfParagraph, cx));
-// cx.assert_editor_state(
-// &r#"one
-// two
-
-// three
-// four
-// five
-// ˇ
-// six"#
-// .unindent(),
-// );
-
-// cx.update_editor(|editor, cx| editor.move_to_start_of_paragraph(&MoveToStartOfParagraph, cx));
-// cx.assert_editor_state(
-// &r#"one
-// two
-// ˇ
-// three
-// four
-// five
-
-// six"#
-// .unindent(),
-// );
-
-// cx.update_editor(|editor, cx| editor.move_to_start_of_paragraph(&MoveToStartOfParagraph, cx));
-// cx.assert_editor_state(
-// &r#"ˇone
-// two
-
-// three
-// four
-// five
-
-// six"#
-// .unindent(),
-// );
-// }
-
-// #[gpui::test]
-// async fn test_scroll_page_up_page_down(cx: &mut gpui::TestAppContext) {
-// init_test(cx, |_| {});
-// let mut cx = EditorTestContext::new(cx).await;
-// let line_height = cx.editor(|editor, cx| editor.style(cx).text.line_height(cx.font_cache()));
-// let window = cx.window;
-// window.simulate_resize(Point::new(1000., 4. * line_height + 0.5), &mut cx);
-
-// cx.set_state(
-// &r#"ˇone
-// two
-// three
-// four
-// five
-// six
-// seven
-// eight
-// nine
-// ten
-// "#,
-// );
-
-// cx.update_editor(|editor, cx| {
-// assert_eq!(
-// editor.snapshot(cx).scroll_position(),
-// gpui::Point::new(0., 0.)
-// );
-// editor.scroll_screen(&ScrollAmount::Page(1.), cx);
-// assert_eq!(
-// editor.snapshot(cx).scroll_position(),
-// gpui::Point::new(0., 3.)
-// );
-// editor.scroll_screen(&ScrollAmount::Page(1.), cx);
-// assert_eq!(
-// editor.snapshot(cx).scroll_position(),
-// gpui::Point::new(0., 6.)
-// );
-// editor.scroll_screen(&ScrollAmount::Page(-1.), cx);
-// assert_eq!(
-// editor.snapshot(cx).scroll_position(),
-// gpui::Point::new(0., 3.)
-// );
-
-// editor.scroll_screen(&ScrollAmount::Page(-0.5), cx);
-// assert_eq!(
-// editor.snapshot(cx).scroll_position(),
-// gpui::Point::new(0., 1.)
-// );
-// editor.scroll_screen(&ScrollAmount::Page(0.5), cx);
-// assert_eq!(
-// editor.snapshot(cx).scroll_position(),
-// gpui::Point::new(0., 3.)
-// );
-// });
-// }
-
-// #[gpui::test]
-// async fn test_autoscroll(cx: &mut gpui::TestAppContext) {
-// init_test(cx, |_| {});
-// let mut cx = EditorTestContext::new(cx).await;
-
-// let line_height = cx.update_editor(|editor, cx| {
-// editor.set_vertical_scroll_margin(2, cx);
-// editor.style(cx).text.line_height(cx.font_cache())
-// });
-
-// let window = cx.window;
-// window.simulate_resize(gpui::Point::new(1000., 6.0 * line_height), &mut cx);
-
-// cx.set_state(
-// &r#"ˇone
-// two
-// three
-// four
-// five
-// six
-// seven
-// eight
-// nine
-// ten
-// "#,
-// );
-// cx.update_editor(|editor, cx| {
-// assert_eq!(
-// editor.snapshot(cx).scroll_position(),
-// gpui::Point::new(0., 0.0)
-// );
-// });
-
-// // Add a cursor below the visible area. Since both cursors cannot fit
-// // on screen, the editor autoscrolls to reveal the newest cursor, and
-// // allows the vertical scroll margin below that cursor.
-// cx.update_editor(|editor, cx| {
-// editor.change_selections(Some(Autoscroll::fit()), cx, |selections| {
-// selections.select_ranges([
-// Point::new(0, 0)..Point::new(0, 0),
-// Point::new(6, 0)..Point::new(6, 0),
-// ]);
-// })
-// });
-// cx.update_editor(|editor, cx| {
-// assert_eq!(
-// editor.snapshot(cx).scroll_position(),
-// gpui::Point::new(0., 3.0)
-// );
-// });
-
-// // Move down. The editor cursor scrolls down to track the newest cursor.
-// cx.update_editor(|editor, cx| {
-// editor.move_down(&Default::default(), cx);
-// });
-// cx.update_editor(|editor, cx| {
-// assert_eq!(
-// editor.snapshot(cx).scroll_position(),
-// gpui::Point::new(0., 4.0)
-// );
-// });
-
-// // Add a cursor above the visible area. Since both cursors fit on screen,
-// // the editor scrolls to show both.
-// cx.update_editor(|editor, cx| {
-// editor.change_selections(Some(Autoscroll::fit()), cx, |selections| {
-// selections.select_ranges([
-// Point::new(1, 0)..Point::new(1, 0),
-// Point::new(6, 0)..Point::new(6, 0),
-// ]);
-// })
-// });
-// cx.update_editor(|editor, cx| {
-// assert_eq!(
-// editor.snapshot(cx).scroll_position(),
-// gpui::Point::new(0., 1.0)
-// );
-// });
-// }
-
-// #[gpui::test]
-// async fn test_move_page_up_page_down(cx: &mut gpui::TestAppContext) {
-// init_test(cx, |_| {});
-// let mut cx = EditorTestContext::new(cx).await;
-
-// let line_height = cx.editor(|editor, cx| editor.style(cx).text.line_height(cx.font_cache()));
-// let window = cx.window;
-// window.simulate_resize(gpui::Point::new(100., 4. * line_height), &mut cx);
-
-// cx.set_state(
-// &r#"
-// ˇone
-// two
-// threeˇ
-// four
-// five
-// six
-// seven
-// eight
-// nine
-// ten
-// "#
-// .unindent(),
-// );
-
-// cx.update_editor(|editor, cx| editor.move_page_down(&MovePageDown::default(), cx));
-// cx.assert_editor_state(
-// &r#"
-// one
-// two
-// three
-// ˇfour
-// five
-// sixˇ
-// seven
-// eight
-// nine
-// ten
-// "#
-// .unindent(),
-// );
-
-// cx.update_editor(|editor, cx| editor.move_page_down(&MovePageDown::default(), cx));
-// cx.assert_editor_state(
-// &r#"
-// one
-// two
-// three
-// four
-// five
-// six
-// ˇseven
-// eight
-// nineˇ
-// ten
-// "#
-// .unindent(),
-// );
-
-// cx.update_editor(|editor, cx| editor.move_page_up(&MovePageUp::default(), cx));
-// cx.assert_editor_state(
-// &r#"
-// one
-// two
-// three
-// ˇfour
-// five
-// sixˇ
-// seven
-// eight
-// nine
-// ten
-// "#
-// .unindent(),
-// );
-
-// cx.update_editor(|editor, cx| editor.move_page_up(&MovePageUp::default(), cx));
-// cx.assert_editor_state(
-// &r#"
-// ˇone
-// two
-// threeˇ
-// four
-// five
-// six
-// seven
-// eight
-// nine
-// ten
-// "#
-// .unindent(),
-// );
-
-// // Test select collapsing
-// cx.update_editor(|editor, cx| {
-// editor.move_page_down(&MovePageDown::default(), cx);
-// editor.move_page_down(&MovePageDown::default(), cx);
-// editor.move_page_down(&MovePageDown::default(), cx);
-// });
-// cx.assert_editor_state(
-// &r#"
-// one
-// two
-// three
-// four
-// five
-// six
-// seven
-// eight
-// nine
-// ˇten
-// ˇ"#
-// .unindent(),
-// );
-// }
-
-#[gpui::test]
-async fn test_delete_to_beginning_of_line(cx: &mut gpui::TestAppContext) {
- init_test(cx, |_| {});
- let mut cx = EditorTestContext::new(cx).await;
- cx.set_state("one «two threeˇ» four");
- cx.update_editor(|editor, cx| {
- editor.delete_to_beginning_of_line(&DeleteToBeginningOfLine, cx);
- assert_eq!(editor.text(cx), " four");
- });
-}
-
#[gpui::test]
-fn test_delete_to_word_boundary(cx: &mut TestAppContext) {
+fn test_prev_next_word_bounds_with_soft_wrap(cx: &mut TestAppContext) {
init_test(cx, |_| {});
let view = cx.add_window(|cx| {
- let buffer = MultiBuffer::build_simple("one two three four", cx);
- build_editor(buffer.clone(), cx)
+ let buffer = MultiBuffer::build_simple("use one::{\n two::three::four::five\n};", cx);
+ build_editor(buffer, cx)
});
view.update(cx, |view, cx| {
- view.change_selections(None, cx, |s| {
- s.select_display_ranges([
- // an empty selection - the preceding word fragment is deleted
- DisplayPoint::new(0, 2)..DisplayPoint::new(0, 2),
- // characters selected - they are deleted
- DisplayPoint::new(0, 9)..DisplayPoint::new(0, 12),
- ])
- });
- view.delete_to_previous_word_start(&DeleteToPreviousWordStart, cx);
- assert_eq!(view.buffer.read(cx).read(cx).text(), "e two te four");
- });
+ view.set_wrap_width(Some(140.0.into()), cx);
+ assert_eq!(
+ view.display_text(cx),
+ "use one::{\n two::three::\n four::five\n};"
+ );
- view.update(cx, |view, cx| {
view.change_selections(None, cx, |s| {
- s.select_display_ranges([
- // an empty selection - the following word fragment is deleted
- DisplayPoint::new(0, 3)..DisplayPoint::new(0, 3),
- // characters selected - they are deleted
- DisplayPoint::new(0, 9)..DisplayPoint::new(0, 10),
- ])
+ s.select_display_ranges([DisplayPoint::new(1, 7)..DisplayPoint::new(1, 7)]);
});
- view.delete_to_next_word_end(&DeleteToNextWordEnd, cx);
- assert_eq!(view.buffer.read(cx).read(cx).text(), "e t te our");
- });
-}
-#[gpui::test]
-fn test_newline(cx: &mut TestAppContext) {
- init_test(cx, |_| {});
+ view.move_to_next_word_end(&MoveToNextWordEnd, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ &[DisplayPoint::new(1, 9)..DisplayPoint::new(1, 9)]
+ );
- let view = cx.add_window(|cx| {
- let buffer = MultiBuffer::build_simple("aaaa\n bbbb\n", cx);
- build_editor(buffer.clone(), cx)
- });
+ view.move_to_next_word_end(&MoveToNextWordEnd, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ &[DisplayPoint::new(1, 14)..DisplayPoint::new(1, 14)]
+ );
- view.update(cx, |view, cx| {
- view.change_selections(None, cx, |s| {
- s.select_display_ranges([
- DisplayPoint::new(0, 2)..DisplayPoint::new(0, 2),
- DisplayPoint::new(1, 2)..DisplayPoint::new(1, 2),
- DisplayPoint::new(1, 6)..DisplayPoint::new(1, 6),
- ])
- });
+ view.move_to_next_word_end(&MoveToNextWordEnd, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ &[DisplayPoint::new(2, 4)..DisplayPoint::new(2, 4)]
+ );
- view.newline(&Newline, cx);
- assert_eq!(view.text(cx), "aa\naa\n \n bb\n bb\n");
+ view.move_to_next_word_end(&MoveToNextWordEnd, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ &[DisplayPoint::new(2, 8)..DisplayPoint::new(2, 8)]
+ );
+
+ view.move_to_previous_word_start(&MoveToPreviousWordStart, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ &[DisplayPoint::new(2, 4)..DisplayPoint::new(2, 4)]
+ );
+
+ view.move_to_previous_word_start(&MoveToPreviousWordStart, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ &[DisplayPoint::new(1, 14)..DisplayPoint::new(1, 14)]
+ );
});
}
+//todo!(simulate_resize)
#[gpui::test]
-fn test_newline_with_old_selections(cx: &mut TestAppContext) {
+async fn test_move_start_of_paragraph_end_of_paragraph(cx: &mut gpui::TestAppContext) {
init_test(cx, |_| {});
+ let mut cx = EditorTestContext::new(cx).await;
- let editor = cx.add_window(|cx| {
- let buffer = MultiBuffer::build_simple(
- "
- a
- b(
- X
- )
- c(
- X
- )
- "
- .unindent()
- .as_str(),
- cx,
- );
- let mut editor = build_editor(buffer.clone(), cx);
- editor.change_selections(None, cx, |s| {
- s.select_ranges([
- Point::new(2, 4)..Point::new(2, 5),
- Point::new(5, 4)..Point::new(5, 5),
- ])
- });
+ let line_height = cx.editor(|editor, cx| {
editor
+ .style()
+ .unwrap()
+ .text
+ .line_height_in_pixels(cx.rem_size())
});
+ cx.simulate_window_resize(cx.window, size(px(100.), 4. * line_height));
- editor.update(cx, |editor, cx| {
- // Edit the buffer directly, deleting ranges surrounding the editor's selections
- editor.buffer.update(cx, |buffer, cx| {
- buffer.edit(
- [
- (Point::new(1, 2)..Point::new(3, 0), ""),
- (Point::new(4, 2)..Point::new(6, 0), ""),
- ],
- None,
- cx,
- );
- assert_eq!(
- buffer.read(cx).text(),
- "
- a
- b()
+ cx.set_state(
+ &r#"ˇone
+ two
+
+ three
+ fourˇ
+ five
+
+ six"#
+ .unindent(),
+ );
+
+ cx.update_editor(|editor, cx| editor.move_to_end_of_paragraph(&MoveToEndOfParagraph, cx));
+ cx.assert_editor_state(
+ &r#"one
+ two
+ ˇ
+ three
+ four
+ five
+ ˇ
+ six"#
+ .unindent(),
+ );
+
+ cx.update_editor(|editor, cx| editor.move_to_end_of_paragraph(&MoveToEndOfParagraph, cx));
+ cx.assert_editor_state(
+ &r#"one
+ two
+
+ three
+ four
+ five
+ ˇ
+ sixˇ"#
+ .unindent(),
+ );
+
+ cx.update_editor(|editor, cx| editor.move_to_end_of_paragraph(&MoveToEndOfParagraph, cx));
+ cx.assert_editor_state(
+ &r#"one
+ two
+
+ three
+ four
+ five
+
+ sixˇ"#
+ .unindent(),
+ );
+
+ cx.update_editor(|editor, cx| editor.move_to_start_of_paragraph(&MoveToStartOfParagraph, cx));
+ cx.assert_editor_state(
+ &r#"one
+ two
+
+ three
+ four
+ five
+ ˇ
+ six"#
+ .unindent(),
+ );
+
+ cx.update_editor(|editor, cx| editor.move_to_start_of_paragraph(&MoveToStartOfParagraph, cx));
+ cx.assert_editor_state(
+ &r#"one
+ two
+ ˇ
+ three
+ four
+ five
+
+ six"#
+ .unindent(),
+ );
+
+ cx.update_editor(|editor, cx| editor.move_to_start_of_paragraph(&MoveToStartOfParagraph, cx));
+ cx.assert_editor_state(
+ &r#"ˇone
+ two
+
+ three
+ four
+ five
+
+ six"#
+ .unindent(),
+ );
+}
+
+#[gpui::test]
+async fn test_scroll_page_up_page_down(cx: &mut gpui::TestAppContext) {
+ init_test(cx, |_| {});
+ let mut cx = EditorTestContext::new(cx).await;
+ let line_height = cx.editor(|editor, cx| {
+ editor
+ .style()
+ .unwrap()
+ .text
+ .line_height_in_pixels(cx.rem_size())
+ });
+ let window = cx.window;
+ cx.simulate_window_resize(window, size(px(1000.), 4. * line_height + px(0.5)));
+
+ cx.set_state(
+ &r#"ˇone
+ two
+ three
+ four
+ five
+ six
+ seven
+ eight
+ nine
+ ten
+ "#,
+ );
+
+ cx.update_editor(|editor, cx| {
+ assert_eq!(
+ editor.snapshot(cx).scroll_position(),
+ gpui::Point::new(0., 0.)
+ );
+ editor.scroll_screen(&ScrollAmount::Page(1.), cx);
+ assert_eq!(
+ editor.snapshot(cx).scroll_position(),
+ gpui::Point::new(0., 3.)
+ );
+ editor.scroll_screen(&ScrollAmount::Page(1.), cx);
+ assert_eq!(
+ editor.snapshot(cx).scroll_position(),
+ gpui::Point::new(0., 6.)
+ );
+ editor.scroll_screen(&ScrollAmount::Page(-1.), cx);
+ assert_eq!(
+ editor.snapshot(cx).scroll_position(),
+ gpui::Point::new(0., 3.)
+ );
+
+ editor.scroll_screen(&ScrollAmount::Page(-0.5), cx);
+ assert_eq!(
+ editor.snapshot(cx).scroll_position(),
+ gpui::Point::new(0., 1.)
+ );
+ editor.scroll_screen(&ScrollAmount::Page(0.5), cx);
+ assert_eq!(
+ editor.snapshot(cx).scroll_position(),
+ gpui::Point::new(0., 3.)
+ );
+ });
+}
+
+#[gpui::test]
+async fn test_autoscroll(cx: &mut gpui::TestAppContext) {
+ init_test(cx, |_| {});
+ let mut cx = EditorTestContext::new(cx).await;
+
+ let line_height = cx.update_editor(|editor, cx| {
+ editor.set_vertical_scroll_margin(2, cx);
+ editor
+ .style()
+ .unwrap()
+ .text
+ .line_height_in_pixels(cx.rem_size())
+ });
+ let window = cx.window;
+ cx.simulate_window_resize(window, size(px(1000.), 6. * line_height));
+
+ cx.set_state(
+ &r#"ˇone
+ two
+ three
+ four
+ five
+ six
+ seven
+ eight
+ nine
+ ten
+ "#,
+ );
+ cx.update_editor(|editor, cx| {
+ assert_eq!(
+ editor.snapshot(cx).scroll_position(),
+ gpui::Point::new(0., 0.0)
+ );
+ });
+
+ // Add a cursor below the visible area. Since both cursors cannot fit
+ // on screen, the editor autoscrolls to reveal the newest cursor, and
+ // allows the vertical scroll margin below that cursor.
+ cx.update_editor(|editor, cx| {
+ editor.change_selections(Some(Autoscroll::fit()), cx, |selections| {
+ selections.select_ranges([
+ Point::new(0, 0)..Point::new(0, 0),
+ Point::new(6, 0)..Point::new(6, 0),
+ ]);
+ })
+ });
+ cx.update_editor(|editor, cx| {
+ assert_eq!(
+ editor.snapshot(cx).scroll_position(),
+ gpui::Point::new(0., 3.0)
+ );
+ });
+
+ // Move down. The editor cursor scrolls down to track the newest cursor.
+ cx.update_editor(|editor, cx| {
+ editor.move_down(&Default::default(), cx);
+ });
+ cx.update_editor(|editor, cx| {
+ assert_eq!(
+ editor.snapshot(cx).scroll_position(),
+ gpui::Point::new(0., 4.0)
+ );
+ });
+
+ // Add a cursor above the visible area. Since both cursors fit on screen,
+ // the editor scrolls to show both.
+ cx.update_editor(|editor, cx| {
+ editor.change_selections(Some(Autoscroll::fit()), cx, |selections| {
+ selections.select_ranges([
+ Point::new(1, 0)..Point::new(1, 0),
+ Point::new(6, 0)..Point::new(6, 0),
+ ]);
+ })
+ });
+ cx.update_editor(|editor, cx| {
+ assert_eq!(
+ editor.snapshot(cx).scroll_position(),
+ gpui::Point::new(0., 1.0)
+ );
+ });
+}
+
+#[gpui::test]
+async fn test_move_page_up_page_down(cx: &mut gpui::TestAppContext) {
+ init_test(cx, |_| {});
+ let mut cx = EditorTestContext::new(cx).await;
+
+ let line_height = cx.editor(|editor, cx| {
+ editor
+ .style()
+ .unwrap()
+ .text
+ .line_height_in_pixels(cx.rem_size())
+ });
+ let window = cx.window;
+ cx.simulate_window_resize(window, size(px(100.), 4. * line_height));
+ cx.set_state(
+ &r#"
+ ˇone
+ two
+ threeˇ
+ four
+ five
+ six
+ seven
+ eight
+ nine
+ ten
+ "#
+ .unindent(),
+ );
+
+ cx.update_editor(|editor, cx| editor.move_page_down(&MovePageDown::default(), cx));
+ cx.assert_editor_state(
+ &r#"
+ one
+ two
+ three
+ ˇfour
+ five
+ sixˇ
+ seven
+ eight
+ nine
+ ten
+ "#
+ .unindent(),
+ );
+
+ cx.update_editor(|editor, cx| editor.move_page_down(&MovePageDown::default(), cx));
+ cx.assert_editor_state(
+ &r#"
+ one
+ two
+ three
+ four
+ five
+ six
+ ˇseven
+ eight
+ nineˇ
+ ten
+ "#
+ .unindent(),
+ );
+
+ cx.update_editor(|editor, cx| editor.move_page_up(&MovePageUp::default(), cx));
+ cx.assert_editor_state(
+ &r#"
+ one
+ two
+ three
+ ˇfour
+ five
+ sixˇ
+ seven
+ eight
+ nine
+ ten
+ "#
+ .unindent(),
+ );
+
+ cx.update_editor(|editor, cx| editor.move_page_up(&MovePageUp::default(), cx));
+ cx.assert_editor_state(
+ &r#"
+ ˇone
+ two
+ threeˇ
+ four
+ five
+ six
+ seven
+ eight
+ nine
+ ten
+ "#
+ .unindent(),
+ );
+
+ // Test select collapsing
+ cx.update_editor(|editor, cx| {
+ editor.move_page_down(&MovePageDown::default(), cx);
+ editor.move_page_down(&MovePageDown::default(), cx);
+ editor.move_page_down(&MovePageDown::default(), cx);
+ });
+ cx.assert_editor_state(
+ &r#"
+ one
+ two
+ three
+ four
+ five
+ six
+ seven
+ eight
+ nine
+ ˇten
+ ˇ"#
+ .unindent(),
+ );
+}
+
+#[gpui::test]
+async fn test_delete_to_beginning_of_line(cx: &mut gpui::TestAppContext) {
+ init_test(cx, |_| {});
+ let mut cx = EditorTestContext::new(cx).await;
+ cx.set_state("one «two threeˇ» four");
+ cx.update_editor(|editor, cx| {
+ editor.delete_to_beginning_of_line(&DeleteToBeginningOfLine, cx);
+ assert_eq!(editor.text(cx), " four");
+ });
+}
+
+#[gpui::test]
+fn test_delete_to_word_boundary(cx: &mut TestAppContext) {
+ init_test(cx, |_| {});
+
+ let view = cx.add_window(|cx| {
+ let buffer = MultiBuffer::build_simple("one two three four", cx);
+ build_editor(buffer.clone(), cx)
+ });
+
+ view.update(cx, |view, cx| {
+ view.change_selections(None, cx, |s| {
+ s.select_display_ranges([
+ // an empty selection - the preceding word fragment is deleted
+ DisplayPoint::new(0, 2)..DisplayPoint::new(0, 2),
+ // characters selected - they are deleted
+ DisplayPoint::new(0, 9)..DisplayPoint::new(0, 12),
+ ])
+ });
+ view.delete_to_previous_word_start(&DeleteToPreviousWordStart, cx);
+ assert_eq!(view.buffer.read(cx).read(cx).text(), "e two te four");
+ });
+
+ view.update(cx, |view, cx| {
+ view.change_selections(None, cx, |s| {
+ s.select_display_ranges([
+ // an empty selection - the following word fragment is deleted
+ DisplayPoint::new(0, 3)..DisplayPoint::new(0, 3),
+ // characters selected - they are deleted
+ DisplayPoint::new(0, 9)..DisplayPoint::new(0, 10),
+ ])
+ });
+ view.delete_to_next_word_end(&DeleteToNextWordEnd, cx);
+ assert_eq!(view.buffer.read(cx).read(cx).text(), "e t te our");
+ });
+}
+
+#[gpui::test]
+fn test_newline(cx: &mut TestAppContext) {
+ init_test(cx, |_| {});
+
+ let view = cx.add_window(|cx| {
+ let buffer = MultiBuffer::build_simple("aaaa\n bbbb\n", cx);
+ build_editor(buffer.clone(), cx)
+ });
+
+ view.update(cx, |view, cx| {
+ view.change_selections(None, cx, |s| {
+ s.select_display_ranges([
+ DisplayPoint::new(0, 2)..DisplayPoint::new(0, 2),
+ DisplayPoint::new(1, 2)..DisplayPoint::new(1, 2),
+ DisplayPoint::new(1, 6)..DisplayPoint::new(1, 6),
+ ])
+ });
+
+ view.newline(&Newline, cx);
+ assert_eq!(view.text(cx), "aa\naa\n \n bb\n bb\n");
+ });
+}
+
+#[gpui::test]
+fn test_newline_with_old_selections(cx: &mut TestAppContext) {
+ init_test(cx, |_| {});
+
+ let editor = cx.add_window(|cx| {
+ let buffer = MultiBuffer::build_simple(
+ "
+ a
+ b(
+ X
+ )
+ c(
+ X
+ )
+ "
+ .unindent()
+ .as_str(),
+ cx,
+ );
+ let mut editor = build_editor(buffer.clone(), cx);
+ editor.change_selections(None, cx, |s| {
+ s.select_ranges([
+ Point::new(2, 4)..Point::new(2, 5),
+ Point::new(5, 4)..Point::new(5, 5),
+ ])
+ });
+ editor
+ });
+
+ editor.update(cx, |editor, cx| {
+ // Edit the buffer directly, deleting ranges surrounding the editor's selections
+ editor.buffer.update(cx, |buffer, cx| {
+ buffer.edit(
+ [
+ (Point::new(1, 2)..Point::new(3, 0), ""),
+ (Point::new(4, 2)..Point::new(6, 0), ""),
+ ],
+ None,
+ cx,
+ );
+ assert_eq!(
+ buffer.read(cx).text(),
+ "
+ a
+ b()
c()
"
.unindent()
@@ -330,7 +330,7 @@ impl EditorElement {
});
}
- fn modifiers_changed(
+ pub(crate) fn modifiers_changed(
editor: &mut Editor,
event: &ModifiersChangedEvent,
cx: &mut ViewContext<Editor>,
@@ -1755,7 +1755,7 @@ impl EditorElement {
let gutter_width;
let gutter_margin;
if snapshot.show_gutter {
- let descent = cx.text_system().descent(font_id, font_size).unwrap();
+ let descent = cx.text_system().descent(font_id, font_size);
let gutter_padding_factor = 3.5;
gutter_padding = (em_width * gutter_padding_factor).round();
@@ -3227,448 +3227,491 @@ fn scale_horizontal_mouse_autoscroll_delta(delta: Pixels) -> f32 {
(delta.pow(1.2) / 300.0).into()
}
-// #[cfg(test)]
-// mod tests {
-// use super::*;
-// use crate::{
-// display_map::{BlockDisposition, BlockProperties},
-// editor_tests::{init_test, update_test_language_settings},
-// Editor, MultiBuffer,
-// };
-// use gpui::TestAppContext;
-// use language::language_settings;
-// use log::info;
-// use std::{num::NonZeroU32, sync::Arc};
-// use util::test::sample_text;
-
-// #[gpui::test]
-// fn test_layout_line_numbers(cx: &mut TestAppContext) {
-// init_test(cx, |_| {});
-// let editor = cx
-// .add_window(|cx| {
-// let buffer = MultiBuffer::build_simple(&sample_text(6, 6, 'a'), cx);
-// Editor::new(EditorMode::Full, buffer, None, None, cx)
-// })
-// .root(cx);
-// let element = EditorElement::new(editor.read_with(cx, |editor, cx| editor.style(cx)));
-
-// let layouts = editor.update(cx, |editor, cx| {
-// let snapshot = editor.snapshot(cx);
-// element
-// .layout_line_numbers(
-// 0..6,
-// &Default::default(),
-// DisplayPoint::new(0, 0),
-// false,
-// &snapshot,
-// cx,
-// )
-// .0
-// });
-// assert_eq!(layouts.len(), 6);
-
-// let relative_rows = editor.update(cx, |editor, cx| {
-// let snapshot = editor.snapshot(cx);
-// element.calculate_relative_line_numbers(&snapshot, &(0..6), Some(3))
-// });
-// assert_eq!(relative_rows[&0], 3);
-// assert_eq!(relative_rows[&1], 2);
-// assert_eq!(relative_rows[&2], 1);
-// // current line has no relative number
-// assert_eq!(relative_rows[&4], 1);
-// assert_eq!(relative_rows[&5], 2);
-
-// // works if cursor is before screen
-// let relative_rows = editor.update(cx, |editor, cx| {
-// let snapshot = editor.snapshot(cx);
-
-// element.calculate_relative_line_numbers(&snapshot, &(3..6), Some(1))
-// });
-// assert_eq!(relative_rows.len(), 3);
-// assert_eq!(relative_rows[&3], 2);
-// assert_eq!(relative_rows[&4], 3);
-// assert_eq!(relative_rows[&5], 4);
-
-// // works if cursor is after screen
-// let relative_rows = editor.update(cx, |editor, cx| {
-// let snapshot = editor.snapshot(cx);
-
-// element.calculate_relative_line_numbers(&snapshot, &(0..3), Some(6))
-// });
-// assert_eq!(relative_rows.len(), 3);
-// assert_eq!(relative_rows[&0], 5);
-// assert_eq!(relative_rows[&1], 4);
-// assert_eq!(relative_rows[&2], 3);
-// }
-
-// #[gpui::test]
-// async fn test_vim_visual_selections(cx: &mut TestAppContext) {
-// init_test(cx, |_| {});
-
-// let editor = cx
-// .add_window(|cx| {
-// let buffer = MultiBuffer::build_simple(&(sample_text(6, 6, 'a') + "\n"), cx);
-// Editor::new(EditorMode::Full, buffer, None, None, cx)
-// })
-// .root(cx);
-// let mut element = EditorElement::new(editor.read_with(cx, |editor, cx| editor.style(cx)));
-// let (_, state) = editor.update(cx, |editor, cx| {
-// editor.cursor_shape = CursorShape::Block;
-// editor.change_selections(None, cx, |s| {
-// s.select_ranges([
-// Point::new(0, 0)..Point::new(1, 0),
-// Point::new(3, 2)..Point::new(3, 3),
-// Point::new(5, 6)..Point::new(6, 0),
-// ]);
-// });
-// element.layout(
-// SizeConstraint::new(point(500., 500.), point(500., 500.)),
-// editor,
-// cx,
-// )
-// });
-// assert_eq!(state.selections.len(), 1);
-// let local_selections = &state.selections[0].1;
-// assert_eq!(local_selections.len(), 3);
-// // moves cursor back one line
-// assert_eq!(local_selections[0].head, DisplayPoint::new(0, 6));
-// assert_eq!(
-// local_selections[0].range,
-// DisplayPoint::new(0, 0)..DisplayPoint::new(1, 0)
-// );
-
-// // moves cursor back one column
-// assert_eq!(
-// local_selections[1].range,
-// DisplayPoint::new(3, 2)..DisplayPoint::new(3, 3)
-// );
-// assert_eq!(local_selections[1].head, DisplayPoint::new(3, 2));
-
-// // leaves cursor on the max point
-// assert_eq!(
-// local_selections[2].range,
-// DisplayPoint::new(5, 6)..DisplayPoint::new(6, 0)
-// );
-// assert_eq!(local_selections[2].head, DisplayPoint::new(6, 0));
-
-// // active lines does not include 1 (even though the range of the selection does)
-// assert_eq!(
-// state.active_rows.keys().cloned().collect::<Vec<u32>>(),
-// vec![0, 3, 5, 6]
-// );
-
-// // multi-buffer support
-// // in DisplayPoint co-ordinates, this is what we're dealing with:
-// // 0: [[file
-// // 1: header]]
-// // 2: aaaaaa
-// // 3: bbbbbb
-// // 4: cccccc
-// // 5:
-// // 6: ...
-// // 7: ffffff
-// // 8: gggggg
-// // 9: hhhhhh
-// // 10:
-// // 11: [[file
-// // 12: header]]
-// // 13: bbbbbb
-// // 14: cccccc
-// // 15: dddddd
-// let editor = cx
-// .add_window(|cx| {
-// let buffer = MultiBuffer::build_multi(
-// [
-// (
-// &(sample_text(8, 6, 'a') + "\n"),
-// vec![
-// Point::new(0, 0)..Point::new(3, 0),
-// Point::new(4, 0)..Point::new(7, 0),
-// ],
-// ),
-// (
-// &(sample_text(8, 6, 'a') + "\n"),
-// vec![Point::new(1, 0)..Point::new(3, 0)],
-// ),
-// ],
-// cx,
-// );
-// Editor::new(EditorMode::Full, buffer, None, None, cx)
-// })
-// .root(cx);
-// let mut element = EditorElement::new(editor.read_with(cx, |editor, cx| editor.style(cx)));
-// let (_, state) = editor.update(cx, |editor, cx| {
-// editor.cursor_shape = CursorShape::Block;
-// editor.change_selections(None, cx, |s| {
-// s.select_display_ranges([
-// DisplayPoint::new(4, 0)..DisplayPoint::new(7, 0),
-// DisplayPoint::new(10, 0)..DisplayPoint::new(13, 0),
-// ]);
-// });
-// element.layout(
-// SizeConstraint::new(point(500., 500.), point(500., 500.)),
-// editor,
-// cx,
-// )
-// });
-
-// assert_eq!(state.selections.len(), 1);
-// let local_selections = &state.selections[0].1;
-// assert_eq!(local_selections.len(), 2);
-
-// // moves cursor on excerpt boundary back a line
-// // and doesn't allow selection to bleed through
-// assert_eq!(
-// local_selections[0].range,
-// DisplayPoint::new(4, 0)..DisplayPoint::new(6, 0)
-// );
-// assert_eq!(local_selections[0].head, DisplayPoint::new(5, 0));
-
-// // moves cursor on buffer boundary back two lines
-// // and doesn't allow selection to bleed through
-// assert_eq!(
-// local_selections[1].range,
-// DisplayPoint::new(10, 0)..DisplayPoint::new(11, 0)
-// );
-// assert_eq!(local_selections[1].head, DisplayPoint::new(10, 0));
-// }
-
-// #[gpui::test]
-// fn test_layout_with_placeholder_text_and_blocks(cx: &mut TestAppContext) {
-// init_test(cx, |_| {});
-
-// let editor = cx
-// .add_window(|cx| {
-// let buffer = MultiBuffer::build_simple("", cx);
-// Editor::new(EditorMode::Full, buffer, None, None, cx)
-// })
-// .root(cx);
-
-// editor.update(cx, |editor, cx| {
-// editor.set_placeholder_text("hello", cx);
-// editor.insert_blocks(
-// [BlockProperties {
-// style: BlockStyle::Fixed,
-// disposition: BlockDisposition::Above,
-// height: 3,
-// position: Anchor::min(),
-// render: Arc::new(|_| Empty::new().into_any),
-// }],
-// None,
-// cx,
-// );
-
-// // Blur the editor so that it displays placeholder text.
-// cx.blur();
-// });
-
-// let mut element = EditorElement::new(editor.read_with(cx, |editor, cx| editor.style(cx)));
-// let (size, mut state) = editor.update(cx, |editor, cx| {
-// element.layout(
-// SizeConstraint::new(point(500., 500.), point(500., 500.)),
-// editor,
-// cx,
-// )
-// });
-
-// assert_eq!(state.position_map.line_layouts.len(), 4);
-// assert_eq!(
-// state
-// .line_number_layouts
-// .iter()
-// .map(Option::is_some)
-// .collect::<Vec<_>>(),
-// &[false, false, false, true]
-// );
-
-// // Don't panic.
-// let bounds = Bounds::<Pixels>::new(Default::default(), size);
-// editor.update(cx, |editor, cx| {
-// element.paint(bounds, bounds, &mut state, editor, cx);
-// });
-// }
-
-// #[gpui::test]
-// fn test_all_invisibles_drawing(cx: &mut TestAppContext) {
-// const TAB_SIZE: u32 = 4;
-
-// let input_text = "\t \t|\t| a b";
-// let expected_invisibles = vec![
-// Invisible::Tab {
-// line_start_offset: 0,
-// },
-// Invisible::Whitespace {
-// line_offset: TAB_SIZE as usize,
-// },
-// Invisible::Tab {
-// line_start_offset: TAB_SIZE as usize + 1,
-// },
-// Invisible::Tab {
-// line_start_offset: TAB_SIZE as usize * 2 + 1,
-// },
-// Invisible::Whitespace {
-// line_offset: TAB_SIZE as usize * 3 + 1,
-// },
-// Invisible::Whitespace {
-// line_offset: TAB_SIZE as usize * 3 + 3,
-// },
-// ];
-// assert_eq!(
-// expected_invisibles.len(),
-// input_text
-// .chars()
-// .filter(|initial_char| initial_char.is_whitespace())
-// .count(),
-// "Hardcoded expected invisibles differ from the actual ones in '{input_text}'"
-// );
-
-// init_test(cx, |s| {
-// s.defaults.show_whitespaces = Some(ShowWhitespaceSetting::All);
-// s.defaults.tab_size = NonZeroU32::new(TAB_SIZE);
-// });
-
-// let actual_invisibles =
-// collect_invisibles_from_new_editor(cx, EditorMode::Full, &input_text, 500.0);
-
-// assert_eq!(expected_invisibles, actual_invisibles);
-// }
-
-// #[gpui::test]
-// fn test_invisibles_dont_appear_in_certain_editors(cx: &mut TestAppContext) {
-// init_test(cx, |s| {
-// s.defaults.show_whitespaces = Some(ShowWhitespaceSetting::All);
-// s.defaults.tab_size = NonZeroU32::new(4);
-// });
-
-// for editor_mode_without_invisibles in [
-// EditorMode::SingleLine,
-// EditorMode::AutoHeight { max_lines: 100 },
-// ] {
-// let invisibles = collect_invisibles_from_new_editor(
-// cx,
-// editor_mode_without_invisibles,
-// "\t\t\t| | a b",
-// 500.0,
-// );
-// assert!(invisibles.is_empty,
-// "For editor mode {editor_mode_without_invisibles:?} no invisibles was expected but got {invisibles:?}");
-// }
-// }
-
-// #[gpui::test]
-// fn test_wrapped_invisibles_drawing(cx: &mut TestAppContext) {
-// let tab_size = 4;
-// let input_text = "a\tbcd ".repeat(9);
-// let repeated_invisibles = [
-// Invisible::Tab {
-// line_start_offset: 1,
-// },
-// Invisible::Whitespace {
-// line_offset: tab_size as usize + 3,
-// },
-// Invisible::Whitespace {
-// line_offset: tab_size as usize + 4,
-// },
-// Invisible::Whitespace {
-// line_offset: tab_size as usize + 5,
-// },
-// ];
-// let expected_invisibles = std::iter::once(repeated_invisibles)
-// .cycle()
-// .take(9)
-// .flatten()
-// .collect::<Vec<_>>();
-// assert_eq!(
-// expected_invisibles.len(),
-// input_text
-// .chars()
-// .filter(|initial_char| initial_char.is_whitespace())
-// .count(),
-// "Hardcoded expected invisibles differ from the actual ones in '{input_text}'"
-// );
-// info!("Expected invisibles: {expected_invisibles:?}");
-
-// init_test(cx, |_| {});
-
-// // Put the same string with repeating whitespace pattern into editors of various size,
-// // take deliberately small steps during resizing, to put all whitespace kinds near the wrap point.
-// let resize_step = 10.0;
-// let mut editor_width = 200.0;
-// while editor_width <= 1000.0 {
-// update_test_language_settings(cx, |s| {
-// s.defaults.tab_size = NonZeroU32::new(tab_size);
-// s.defaults.show_whitespaces = Some(ShowWhitespaceSetting::All);
-// s.defaults.preferred_line_length = Some(editor_width as u32);
-// s.defaults.soft_wrap = Some(language_settings::SoftWrap::PreferredLineLength);
-// });
-
-// let actual_invisibles =
-// collect_invisibles_from_new_editor(cx, EditorMode::Full, &input_text, editor_width);
-
-// // Whatever the editor size is, ensure it has the same invisible kinds in the same order
-// // (no good guarantees about the offsets: wrapping could trigger padding and its tests should check the offsets).
-// let mut i = 0;
-// for (actual_index, actual_invisible) in actual_invisibles.iter().enumerate() {
-// i = actual_index;
-// match expected_invisibles.get(i) {
-// Some(expected_invisible) => match (expected_invisible, actual_invisible) {
-// (Invisible::Whitespace { .. }, Invisible::Whitespace { .. })
-// | (Invisible::Tab { .. }, Invisible::Tab { .. }) => {}
-// _ => {
-// panic!("At index {i}, expected invisible {expected_invisible:?} does not match actual {actual_invisible:?} by kind. Actual invisibles: {actual_invisibles:?}")
-// }
-// },
-// None => panic!("Unexpected extra invisible {actual_invisible:?} at index {i}"),
-// }
-// }
-// let missing_expected_invisibles = &expected_invisibles[i + 1..];
-// assert!(
-// missing_expected_invisibles.is_empty,
-// "Missing expected invisibles after index {i}: {missing_expected_invisibles:?}"
-// );
-
-// editor_width += resize_step;
-// }
-// }
-
-// fn collect_invisibles_from_new_editor(
-// cx: &mut TestAppContext,
-// editor_mode: EditorMode,
-// input_text: &str,
-// editor_width: f32,
-// ) -> Vec<Invisible> {
-// info!(
-// "Creating editor with mode {editor_mode:?}, width {editor_width} and text '{input_text}'"
-// );
-// let editor = cx
-// .add_window(|cx| {
-// let buffer = MultiBuffer::build_simple(&input_text, cx);
-// Editor::new(editor_mode, buffer, None, None, cx)
-// })
-// .root(cx);
-
-// let mut element = EditorElement::new(editor.read_with(cx, |editor, cx| editor.style(cx)));
-// let (_, layout_state) = editor.update(cx, |editor, cx| {
-// editor.set_soft_wrap_mode(language_settings::SoftWrap::EditorWidth, cx);
-// editor.set_wrap_width(Some(editor_width), cx);
-
-// element.layout(
-// SizeConstraint::new(point(editor_width, 500.), point(editor_width, 500.)),
-// editor,
-// cx,
-// )
-// });
-
-// layout_state
-// .position_map
-// .line_layouts
-// .iter()
-// .map(|line_with_invisibles| &line_with_invisibles.invisibles)
-// .flatten()
-// .cloned()
-// .collect()
-// }
-// }
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::{
+ display_map::{BlockDisposition, BlockProperties},
+ editor_tests::{init_test, update_test_language_settings},
+ Editor, MultiBuffer,
+ };
+ use gpui::{EmptyView, TestAppContext};
+ use language::language_settings;
+ use log::info;
+ use std::{num::NonZeroU32, sync::Arc};
+ use util::test::sample_text;
+
+ #[gpui::test]
+ fn test_shape_line_numbers(cx: &mut TestAppContext) {
+ init_test(cx, |_| {});
+ let window = cx.add_window(|cx| {
+ let buffer = MultiBuffer::build_simple(&sample_text(6, 6, 'a'), cx);
+ Editor::new(EditorMode::Full, buffer, None, cx)
+ });
+
+ let editor = window.root(cx).unwrap();
+ let style = cx.update(|cx| editor.read(cx).style().unwrap().clone());
+ let element = EditorElement::new(&editor, style);
+
+ let layouts = window
+ .update(cx, |editor, cx| {
+ let snapshot = editor.snapshot(cx);
+ element
+ .shape_line_numbers(
+ 0..6,
+ &Default::default(),
+ DisplayPoint::new(0, 0),
+ false,
+ &snapshot,
+ cx,
+ )
+ .0
+ })
+ .unwrap();
+ assert_eq!(layouts.len(), 6);
+
+ let relative_rows = window
+ .update(cx, |editor, cx| {
+ let snapshot = editor.snapshot(cx);
+ element.calculate_relative_line_numbers(&snapshot, &(0..6), Some(3))
+ })
+ .unwrap();
+ assert_eq!(relative_rows[&0], 3);
+ assert_eq!(relative_rows[&1], 2);
+ assert_eq!(relative_rows[&2], 1);
+ // current line has no relative number
+ assert_eq!(relative_rows[&4], 1);
+ assert_eq!(relative_rows[&5], 2);
+
+ // works if cursor is before screen
+ let relative_rows = window
+ .update(cx, |editor, cx| {
+ let snapshot = editor.snapshot(cx);
+
+ element.calculate_relative_line_numbers(&snapshot, &(3..6), Some(1))
+ })
+ .unwrap();
+ assert_eq!(relative_rows.len(), 3);
+ assert_eq!(relative_rows[&3], 2);
+ assert_eq!(relative_rows[&4], 3);
+ assert_eq!(relative_rows[&5], 4);
+
+ // works if cursor is after screen
+ let relative_rows = window
+ .update(cx, |editor, cx| {
+ let snapshot = editor.snapshot(cx);
+
+ element.calculate_relative_line_numbers(&snapshot, &(0..3), Some(6))
+ })
+ .unwrap();
+ assert_eq!(relative_rows.len(), 3);
+ assert_eq!(relative_rows[&0], 5);
+ assert_eq!(relative_rows[&1], 4);
+ assert_eq!(relative_rows[&2], 3);
+ }
+
+ #[gpui::test]
+ async fn test_vim_visual_selections(cx: &mut TestAppContext) {
+ init_test(cx, |_| {});
+
+ let window = cx.add_window(|cx| {
+ let buffer = MultiBuffer::build_simple(&(sample_text(6, 6, 'a') + "\n"), cx);
+ Editor::new(EditorMode::Full, buffer, None, cx)
+ });
+ let editor = window.root(cx).unwrap();
+ let style = cx.update(|cx| editor.read(cx).style().unwrap().clone());
+ let mut element = EditorElement::new(&editor, style);
+
+ window
+ .update(cx, |editor, cx| {
+ editor.cursor_shape = CursorShape::Block;
+ editor.change_selections(None, cx, |s| {
+ s.select_ranges([
+ Point::new(0, 0)..Point::new(1, 0),
+ Point::new(3, 2)..Point::new(3, 3),
+ Point::new(5, 6)..Point::new(6, 0),
+ ]);
+ });
+ })
+ .unwrap();
+ let state = cx
+ .update_window(window.into(), |_, cx| {
+ element.compute_layout(
+ Bounds {
+ origin: point(px(500.), px(500.)),
+ size: size(px(500.), px(500.)),
+ },
+ cx,
+ )
+ })
+ .unwrap();
+
+ assert_eq!(state.selections.len(), 1);
+ let local_selections = &state.selections[0].1;
+ assert_eq!(local_selections.len(), 3);
+ // moves cursor back one line
+ assert_eq!(local_selections[0].head, DisplayPoint::new(0, 6));
+ assert_eq!(
+ local_selections[0].range,
+ DisplayPoint::new(0, 0)..DisplayPoint::new(1, 0)
+ );
+
+ // moves cursor back one column
+ assert_eq!(
+ local_selections[1].range,
+ DisplayPoint::new(3, 2)..DisplayPoint::new(3, 3)
+ );
+ assert_eq!(local_selections[1].head, DisplayPoint::new(3, 2));
+
+ // leaves cursor on the max point
+ assert_eq!(
+ local_selections[2].range,
+ DisplayPoint::new(5, 6)..DisplayPoint::new(6, 0)
+ );
+ assert_eq!(local_selections[2].head, DisplayPoint::new(6, 0));
+
+ // active lines does not include 1 (even though the range of the selection does)
+ assert_eq!(
+ state.active_rows.keys().cloned().collect::<Vec<u32>>(),
+ vec![0, 3, 5, 6]
+ );
+
+ // multi-buffer support
+ // in DisplayPoint co-ordinates, this is what we're dealing with:
+ // 0: [[file
+ // 1: header]]
+ // 2: aaaaaa
+ // 3: bbbbbb
+ // 4: cccccc
+ // 5:
+ // 6: ...
+ // 7: ffffff
+ // 8: gggggg
+ // 9: hhhhhh
+ // 10:
+ // 11: [[file
+ // 12: header]]
+ // 13: bbbbbb
+ // 14: cccccc
+ // 15: dddddd
+ let window = cx.add_window(|cx| {
+ let buffer = MultiBuffer::build_multi(
+ [
+ (
+ &(sample_text(8, 6, 'a') + "\n"),
+ vec![
+ Point::new(0, 0)..Point::new(3, 0),
+ Point::new(4, 0)..Point::new(7, 0),
+ ],
+ ),
+ (
+ &(sample_text(8, 6, 'a') + "\n"),
+ vec![Point::new(1, 0)..Point::new(3, 0)],
+ ),
+ ],
+ cx,
+ );
+ Editor::new(EditorMode::Full, buffer, None, cx)
+ });
+ let editor = window.root(cx).unwrap();
+ let style = cx.update(|cx| editor.read(cx).style().unwrap().clone());
+ let mut element = EditorElement::new(&editor, style);
+ let state = window.update(cx, |editor, cx| {
+ editor.cursor_shape = CursorShape::Block;
+ editor.change_selections(None, cx, |s| {
+ s.select_display_ranges([
+ DisplayPoint::new(4, 0)..DisplayPoint::new(7, 0),
+ DisplayPoint::new(10, 0)..DisplayPoint::new(13, 0),
+ ]);
+ });
+ });
+
+ let state = cx
+ .update_window(window.into(), |_, cx| {
+ element.compute_layout(
+ Bounds {
+ origin: point(px(500.), px(500.)),
+ size: size(px(500.), px(500.)),
+ },
+ cx,
+ )
+ })
+ .unwrap();
+ assert_eq!(state.selections.len(), 1);
+ let local_selections = &state.selections[0].1;
+ assert_eq!(local_selections.len(), 2);
+
+ // moves cursor on excerpt boundary back a line
+ // and doesn't allow selection to bleed through
+ assert_eq!(
+ local_selections[0].range,
+ DisplayPoint::new(4, 0)..DisplayPoint::new(6, 0)
+ );
+ assert_eq!(local_selections[0].head, DisplayPoint::new(5, 0));
+ dbg!("Hi");
+ // moves cursor on buffer boundary back two lines
+ // and doesn't allow selection to bleed through
+ assert_eq!(
+ local_selections[1].range,
+ DisplayPoint::new(10, 0)..DisplayPoint::new(11, 0)
+ );
+ assert_eq!(local_selections[1].head, DisplayPoint::new(10, 0));
+ }
+
+ #[gpui::test]
+ fn test_layout_with_placeholder_text_and_blocks(cx: &mut TestAppContext) {
+ init_test(cx, |_| {});
+
+ let window = cx.add_window(|cx| {
+ let buffer = MultiBuffer::build_simple("", cx);
+ Editor::new(EditorMode::Full, buffer, None, cx)
+ });
+ let editor = window.root(cx).unwrap();
+ let style = cx.update(|cx| editor.read(cx).style().unwrap().clone());
+ window
+ .update(cx, |editor, cx| {
+ editor.set_placeholder_text("hello", cx);
+ editor.insert_blocks(
+ [BlockProperties {
+ style: BlockStyle::Fixed,
+ disposition: BlockDisposition::Above,
+ height: 3,
+ position: Anchor::min(),
+ render: Arc::new(|_| div().into_any()),
+ }],
+ None,
+ cx,
+ );
+
+ // Blur the editor so that it displays placeholder text.
+ cx.blur();
+ })
+ .unwrap();
+
+ let mut element = EditorElement::new(&editor, style);
+ let mut state = cx
+ .update_window(window.into(), |_, cx| {
+ element.compute_layout(
+ Bounds {
+ origin: point(px(500.), px(500.)),
+ size: size(px(500.), px(500.)),
+ },
+ cx,
+ )
+ })
+ .unwrap();
+ let size = state.position_map.size;
+
+ assert_eq!(state.position_map.line_layouts.len(), 4);
+ assert_eq!(
+ state
+ .line_numbers
+ .iter()
+ .map(Option::is_some)
+ .collect::<Vec<_>>(),
+ &[false, false, false, true]
+ );
+
+ // Don't panic.
+ let bounds = Bounds::<Pixels>::new(Default::default(), size);
+ cx.update_window(window.into(), |_, cx| {
+ element.paint(bounds, &mut (), cx);
+ })
+ .unwrap()
+ }
+
+ #[gpui::test]
+ fn test_all_invisibles_drawing(cx: &mut TestAppContext) {
+ const TAB_SIZE: u32 = 4;
+
+ let input_text = "\t \t|\t| a b";
+ let expected_invisibles = vec![
+ Invisible::Tab {
+ line_start_offset: 0,
+ },
+ Invisible::Whitespace {
+ line_offset: TAB_SIZE as usize,
+ },
+ Invisible::Tab {
+ line_start_offset: TAB_SIZE as usize + 1,
+ },
+ Invisible::Tab {
+ line_start_offset: TAB_SIZE as usize * 2 + 1,
+ },
+ Invisible::Whitespace {
+ line_offset: TAB_SIZE as usize * 3 + 1,
+ },
+ Invisible::Whitespace {
+ line_offset: TAB_SIZE as usize * 3 + 3,
+ },
+ ];
+ assert_eq!(
+ expected_invisibles.len(),
+ input_text
+ .chars()
+ .filter(|initial_char| initial_char.is_whitespace())
+ .count(),
+ "Hardcoded expected invisibles differ from the actual ones in '{input_text}'"
+ );
+
+ init_test(cx, |s| {
+ s.defaults.show_whitespaces = Some(ShowWhitespaceSetting::All);
+ s.defaults.tab_size = NonZeroU32::new(TAB_SIZE);
+ });
+
+ let actual_invisibles =
+ collect_invisibles_from_new_editor(cx, EditorMode::Full, &input_text, px(500.0));
+
+ assert_eq!(expected_invisibles, actual_invisibles);
+ }
+
+ #[gpui::test]
+ fn test_invisibles_dont_appear_in_certain_editors(cx: &mut TestAppContext) {
+ init_test(cx, |s| {
+ s.defaults.show_whitespaces = Some(ShowWhitespaceSetting::All);
+ s.defaults.tab_size = NonZeroU32::new(4);
+ });
+
+ for editor_mode_without_invisibles in [
+ EditorMode::SingleLine,
+ EditorMode::AutoHeight { max_lines: 100 },
+ ] {
+ let invisibles = collect_invisibles_from_new_editor(
+ cx,
+ editor_mode_without_invisibles,
+ "\t\t\t| | a b",
+ px(500.0),
+ );
+ assert!(invisibles.is_empty(),
+ "For editor mode {editor_mode_without_invisibles:?} no invisibles was expected but got {invisibles:?}");
+ }
+ }
+
+ #[gpui::test]
+ fn test_wrapped_invisibles_drawing(cx: &mut TestAppContext) {
+ let tab_size = 4;
+ let input_text = "a\tbcd ".repeat(9);
+ let repeated_invisibles = [
+ Invisible::Tab {
+ line_start_offset: 1,
+ },
+ Invisible::Whitespace {
+ line_offset: tab_size as usize + 3,
+ },
+ Invisible::Whitespace {
+ line_offset: tab_size as usize + 4,
+ },
+ Invisible::Whitespace {
+ line_offset: tab_size as usize + 5,
+ },
+ ];
+ let expected_invisibles = std::iter::once(repeated_invisibles)
+ .cycle()
+ .take(9)
+ .flatten()
+ .collect::<Vec<_>>();
+ assert_eq!(
+ expected_invisibles.len(),
+ input_text
+ .chars()
+ .filter(|initial_char| initial_char.is_whitespace())
+ .count(),
+ "Hardcoded expected invisibles differ from the actual ones in '{input_text}'"
+ );
+ info!("Expected invisibles: {expected_invisibles:?}");
+
+ init_test(cx, |_| {});
+
+ // Put the same string with repeating whitespace pattern into editors of various size,
+ // take deliberately small steps during resizing, to put all whitespace kinds near the wrap point.
+ let resize_step = 10.0;
+ let mut editor_width = 200.0;
+ while editor_width <= 1000.0 {
+ update_test_language_settings(cx, |s| {
+ s.defaults.tab_size = NonZeroU32::new(tab_size);
+ s.defaults.show_whitespaces = Some(ShowWhitespaceSetting::All);
+ s.defaults.preferred_line_length = Some(editor_width as u32);
+ s.defaults.soft_wrap = Some(language_settings::SoftWrap::PreferredLineLength);
+ });
+
+ let actual_invisibles = collect_invisibles_from_new_editor(
+ cx,
+ EditorMode::Full,
+ &input_text,
+ px(editor_width),
+ );
+
+ // Whatever the editor size is, ensure it has the same invisible kinds in the same order
+ // (no good guarantees about the offsets: wrapping could trigger padding and its tests should check the offsets).
+ let mut i = 0;
+ for (actual_index, actual_invisible) in actual_invisibles.iter().enumerate() {
+ i = actual_index;
+ match expected_invisibles.get(i) {
+ Some(expected_invisible) => match (expected_invisible, actual_invisible) {
+ (Invisible::Whitespace { .. }, Invisible::Whitespace { .. })
+ | (Invisible::Tab { .. }, Invisible::Tab { .. }) => {}
+ _ => {
+ panic!("At index {i}, expected invisible {expected_invisible:?} does not match actual {actual_invisible:?} by kind. Actual invisibles: {actual_invisibles:?}")
+ }
+ },
+ None => panic!("Unexpected extra invisible {actual_invisible:?} at index {i}"),
+ }
+ }
+ let missing_expected_invisibles = &expected_invisibles[i + 1..];
+ assert!(
+ missing_expected_invisibles.is_empty(),
+ "Missing expected invisibles after index {i}: {missing_expected_invisibles:?}"
+ );
+
+ editor_width += resize_step;
+ }
+ }
+
+ fn collect_invisibles_from_new_editor(
+ cx: &mut TestAppContext,
+ editor_mode: EditorMode,
+ input_text: &str,
+ editor_width: Pixels,
+ ) -> Vec<Invisible> {
+ info!(
+ "Creating editor with mode {editor_mode:?}, width {}px and text '{input_text}'",
+ editor_width.0
+ );
+ let window = cx.add_window(|cx| {
+ let buffer = MultiBuffer::build_simple(&input_text, cx);
+ Editor::new(editor_mode, buffer, None, cx)
+ });
+ let editor = window.root(cx).unwrap();
+ let style = cx.update(|cx| editor.read(cx).style().unwrap().clone());
+ let mut element = EditorElement::new(&editor, style);
+ window
+ .update(cx, |editor, cx| {
+ editor.set_soft_wrap_mode(language_settings::SoftWrap::EditorWidth, cx);
+ editor.set_wrap_width(Some(editor_width), cx);
+ })
+ .unwrap();
+ let layout_state = cx
+ .update_window(window.into(), |_, cx| {
+ element.compute_layout(
+ Bounds {
+ origin: point(px(500.), px(500.)),
+ size: size(px(500.), px(500.)),
+ },
+ cx,
+ )
+ })
+ .unwrap();
+
+ layout_state
+ .position_map
+ .line_layouts
+ .iter()
+ .map(|line_with_invisibles| &line_with_invisibles.invisibles)
+ .flatten()
+ .cloned()
+ .collect()
+ }
+}
pub fn register_action<T: Action>(
view: &View<Editor>,
@@ -3714,7 +3757,7 @@ fn compute_auto_height_layout(
let gutter_width;
let gutter_margin;
if snapshot.show_gutter {
- let descent = cx.text_system().descent(font_id, font_size).unwrap();
+ let descent = cx.text_system().descent(font_id, font_size);
let gutter_padding_factor = 3.5;
gutter_padding = (em_width * gutter_padding_factor).round();
gutter_width = max_line_number_width + gutter_padding * 2.0;
@@ -88,195 +88,195 @@ pub fn diff_hunk_to_display(hunk: DiffHunk<u32>, snapshot: &DisplaySnapshot) ->
}
}
-// #[cfg(any(test, feature = "test_support"))]
-// mod tests {
-// // use crate::editor_tests::init_test;
-// use crate::Point;
-// use gpui::TestAppContext;
-// use multi_buffer::{ExcerptRange, MultiBuffer};
-// use project::{FakeFs, Project};
-// use unindent::Unindent;
-// #[gpui::test]
-// async fn test_diff_hunks_in_range(cx: &mut TestAppContext) {
-// use git::diff::DiffHunkStatus;
-// init_test(cx, |_| {});
+#[cfg(test)]
+mod tests {
+ use crate::editor_tests::init_test;
+ use crate::Point;
+ use gpui::{Context, TestAppContext};
+ use multi_buffer::{ExcerptRange, MultiBuffer};
+ use project::{FakeFs, Project};
+ use unindent::Unindent;
+ #[gpui::test]
+ async fn test_diff_hunks_in_range(cx: &mut TestAppContext) {
+ use git::diff::DiffHunkStatus;
+ init_test(cx, |_| {});
-// let fs = FakeFs::new(cx.background());
-// let project = Project::test(fs, [], cx).await;
+ let fs = FakeFs::new(cx.background_executor.clone());
+ let project = Project::test(fs, [], cx).await;
-// // buffer has two modified hunks with two rows each
-// let buffer_1 = project
-// .update(cx, |project, cx| {
-// project.create_buffer(
-// "
-// 1.zero
-// 1.ONE
-// 1.TWO
-// 1.three
-// 1.FOUR
-// 1.FIVE
-// 1.six
-// "
-// .unindent()
-// .as_str(),
-// None,
-// cx,
-// )
-// })
-// .unwrap();
-// buffer_1.update(cx, |buffer, cx| {
-// buffer.set_diff_base(
-// Some(
-// "
-// 1.zero
-// 1.one
-// 1.two
-// 1.three
-// 1.four
-// 1.five
-// 1.six
-// "
-// .unindent(),
-// ),
-// cx,
-// );
-// });
+ // buffer has two modified hunks with two rows each
+ let buffer_1 = project
+ .update(cx, |project, cx| {
+ project.create_buffer(
+ "
+ 1.zero
+ 1.ONE
+ 1.TWO
+ 1.three
+ 1.FOUR
+ 1.FIVE
+ 1.six
+ "
+ .unindent()
+ .as_str(),
+ None,
+ cx,
+ )
+ })
+ .unwrap();
+ buffer_1.update(cx, |buffer, cx| {
+ buffer.set_diff_base(
+ Some(
+ "
+ 1.zero
+ 1.one
+ 1.two
+ 1.three
+ 1.four
+ 1.five
+ 1.six
+ "
+ .unindent(),
+ ),
+ cx,
+ );
+ });
-// // buffer has a deletion hunk and an insertion hunk
-// let buffer_2 = project
-// .update(cx, |project, cx| {
-// project.create_buffer(
-// "
-// 2.zero
-// 2.one
-// 2.two
-// 2.three
-// 2.four
-// 2.five
-// 2.six
-// "
-// .unindent()
-// .as_str(),
-// None,
-// cx,
-// )
-// })
-// .unwrap();
-// buffer_2.update(cx, |buffer, cx| {
-// buffer.set_diff_base(
-// Some(
-// "
-// 2.zero
-// 2.one
-// 2.one-and-a-half
-// 2.two
-// 2.three
-// 2.four
-// 2.six
-// "
-// .unindent(),
-// ),
-// cx,
-// );
-// });
+ // buffer has a deletion hunk and an insertion hunk
+ let buffer_2 = project
+ .update(cx, |project, cx| {
+ project.create_buffer(
+ "
+ 2.zero
+ 2.one
+ 2.two
+ 2.three
+ 2.four
+ 2.five
+ 2.six
+ "
+ .unindent()
+ .as_str(),
+ None,
+ cx,
+ )
+ })
+ .unwrap();
+ buffer_2.update(cx, |buffer, cx| {
+ buffer.set_diff_base(
+ Some(
+ "
+ 2.zero
+ 2.one
+ 2.one-and-a-half
+ 2.two
+ 2.three
+ 2.four
+ 2.six
+ "
+ .unindent(),
+ ),
+ cx,
+ );
+ });
-// cx.foreground().run_until_parked();
+ cx.background_executor.run_until_parked();
-// let multibuffer = cx.add_model(|cx| {
-// let mut multibuffer = MultiBuffer::new(0);
-// multibuffer.push_excerpts(
-// buffer_1.clone(),
-// [
-// // excerpt ends in the middle of a modified hunk
-// ExcerptRange {
-// context: Point::new(0, 0)..Point::new(1, 5),
-// primary: Default::default(),
-// },
-// // excerpt begins in the middle of a modified hunk
-// ExcerptRange {
-// context: Point::new(5, 0)..Point::new(6, 5),
-// primary: Default::default(),
-// },
-// ],
-// cx,
-// );
-// multibuffer.push_excerpts(
-// buffer_2.clone(),
-// [
-// // excerpt ends at a deletion
-// ExcerptRange {
-// context: Point::new(0, 0)..Point::new(1, 5),
-// primary: Default::default(),
-// },
-// // excerpt starts at a deletion
-// ExcerptRange {
-// context: Point::new(2, 0)..Point::new(2, 5),
-// primary: Default::default(),
-// },
-// // excerpt fully contains a deletion hunk
-// ExcerptRange {
-// context: Point::new(1, 0)..Point::new(2, 5),
-// primary: Default::default(),
-// },
-// // excerpt fully contains an insertion hunk
-// ExcerptRange {
-// context: Point::new(4, 0)..Point::new(6, 5),
-// primary: Default::default(),
-// },
-// ],
-// cx,
-// );
-// multibuffer
-// });
+ let multibuffer = cx.build_model(|cx| {
+ let mut multibuffer = MultiBuffer::new(0);
+ multibuffer.push_excerpts(
+ buffer_1.clone(),
+ [
+ // excerpt ends in the middle of a modified hunk
+ ExcerptRange {
+ context: Point::new(0, 0)..Point::new(1, 5),
+ primary: Default::default(),
+ },
+ // excerpt begins in the middle of a modified hunk
+ ExcerptRange {
+ context: Point::new(5, 0)..Point::new(6, 5),
+ primary: Default::default(),
+ },
+ ],
+ cx,
+ );
+ multibuffer.push_excerpts(
+ buffer_2.clone(),
+ [
+ // excerpt ends at a deletion
+ ExcerptRange {
+ context: Point::new(0, 0)..Point::new(1, 5),
+ primary: Default::default(),
+ },
+ // excerpt starts at a deletion
+ ExcerptRange {
+ context: Point::new(2, 0)..Point::new(2, 5),
+ primary: Default::default(),
+ },
+ // excerpt fully contains a deletion hunk
+ ExcerptRange {
+ context: Point::new(1, 0)..Point::new(2, 5),
+ primary: Default::default(),
+ },
+ // excerpt fully contains an insertion hunk
+ ExcerptRange {
+ context: Point::new(4, 0)..Point::new(6, 5),
+ primary: Default::default(),
+ },
+ ],
+ cx,
+ );
+ multibuffer
+ });
-// let snapshot = multibuffer.read_with(cx, |b, cx| b.snapshot(cx));
+ let snapshot = multibuffer.read_with(cx, |b, cx| b.snapshot(cx));
-// assert_eq!(
-// snapshot.text(),
-// "
-// 1.zero
-// 1.ONE
-// 1.FIVE
-// 1.six
-// 2.zero
-// 2.one
-// 2.two
-// 2.one
-// 2.two
-// 2.four
-// 2.five
-// 2.six"
-// .unindent()
-// );
+ assert_eq!(
+ snapshot.text(),
+ "
+ 1.zero
+ 1.ONE
+ 1.FIVE
+ 1.six
+ 2.zero
+ 2.one
+ 2.two
+ 2.one
+ 2.two
+ 2.four
+ 2.five
+ 2.six"
+ .unindent()
+ );
-// let expected = [
-// (DiffHunkStatus::Modified, 1..2),
-// (DiffHunkStatus::Modified, 2..3),
-// //TODO: Define better when and where removed hunks show up at range extremities
-// (DiffHunkStatus::Removed, 6..6),
-// (DiffHunkStatus::Removed, 8..8),
-// (DiffHunkStatus::Added, 10..11),
-// ];
+ let expected = [
+ (DiffHunkStatus::Modified, 1..2),
+ (DiffHunkStatus::Modified, 2..3),
+ //TODO: Define better when and where removed hunks show up at range extremities
+ (DiffHunkStatus::Removed, 6..6),
+ (DiffHunkStatus::Removed, 8..8),
+ (DiffHunkStatus::Added, 10..11),
+ ];
-// assert_eq!(
-// snapshot
-// .git_diff_hunks_in_range(0..12)
-// .map(|hunk| (hunk.status(), hunk.buffer_range))
-// .collect::<Vec<_>>(),
-// &expected,
-// );
+ assert_eq!(
+ snapshot
+ .git_diff_hunks_in_range(0..12)
+ .map(|hunk| (hunk.status(), hunk.buffer_range))
+ .collect::<Vec<_>>(),
+ &expected,
+ );
-// assert_eq!(
-// snapshot
-// .git_diff_hunks_in_range_rev(0..12)
-// .map(|hunk| (hunk.status(), hunk.buffer_range))
-// .collect::<Vec<_>>(),
-// expected
-// .iter()
-// .rev()
-// .cloned()
-// .collect::<Vec<_>>()
-// .as_slice(),
-// );
-// }
-// }
+ assert_eq!(
+ snapshot
+ .git_diff_hunks_in_range_rev(0..12)
+ .map(|hunk| (hunk.status(), hunk.buffer_range))
+ .collect::<Vec<_>>(),
+ expected
+ .iter()
+ .rev()
+ .cloned()
+ .collect::<Vec<_>>()
+ .as_slice(),
+ );
+ }
+}
@@ -5,7 +5,7 @@ use crate::{Editor, RangeToAnchorExt};
enum MatchingBracketHighlight {}
pub fn refresh_matching_bracket_highlights(editor: &mut Editor, cx: &mut ViewContext<Editor>) {
- // editor.clear_background_highlights::<MatchingBracketHighlight>(cx);
+ editor.clear_background_highlights::<MatchingBracketHighlight>(cx);
let newest_selection = editor.selections.newest::<usize>(cx);
// Don't highlight brackets if the selection isn't empty
@@ -30,109 +30,109 @@ pub fn refresh_matching_bracket_highlights(editor: &mut Editor, cx: &mut ViewCon
}
}
-// #[cfg(test)]
-// mod tests {
-// use super::*;
-// use crate::{editor_tests::init_test, test::editor_lsp_test_context::EditorLspTestContext};
-// use indoc::indoc;
-// use language::{BracketPair, BracketPairConfig, Language, LanguageConfig};
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::{editor_tests::init_test, test::editor_lsp_test_context::EditorLspTestContext};
+ use indoc::indoc;
+ use language::{BracketPair, BracketPairConfig, Language, LanguageConfig};
-// #[gpui::test]
-// async fn test_matching_bracket_highlights(cx: &mut gpui::TestAppContext) {
-// init_test(cx, |_| {});
+ #[gpui::test]
+ async fn test_matching_bracket_highlights(cx: &mut gpui::TestAppContext) {
+ init_test(cx, |_| {});
-// let mut cx = EditorLspTestContext::new(
-// Language::new(
-// LanguageConfig {
-// name: "Rust".into(),
-// path_suffixes: vec!["rs".to_string()],
-// brackets: BracketPairConfig {
-// pairs: vec![
-// BracketPair {
-// start: "{".to_string(),
-// end: "}".to_string(),
-// close: false,
-// newline: true,
-// },
-// BracketPair {
-// start: "(".to_string(),
-// end: ")".to_string(),
-// close: false,
-// newline: true,
-// },
-// ],
-// ..Default::default()
-// },
-// ..Default::default()
-// },
-// Some(tree_sitter_rust::language()),
-// )
-// .with_brackets_query(indoc! {r#"
-// ("{" @open "}" @close)
-// ("(" @open ")" @close)
-// "#})
-// .unwrap(),
-// Default::default(),
-// cx,
-// )
-// .await;
+ let mut cx = EditorLspTestContext::new(
+ Language::new(
+ LanguageConfig {
+ name: "Rust".into(),
+ path_suffixes: vec!["rs".to_string()],
+ brackets: BracketPairConfig {
+ pairs: vec![
+ BracketPair {
+ start: "{".to_string(),
+ end: "}".to_string(),
+ close: false,
+ newline: true,
+ },
+ BracketPair {
+ start: "(".to_string(),
+ end: ")".to_string(),
+ close: false,
+ newline: true,
+ },
+ ],
+ ..Default::default()
+ },
+ ..Default::default()
+ },
+ Some(tree_sitter_rust::language()),
+ )
+ .with_brackets_query(indoc! {r#"
+ ("{" @open "}" @close)
+ ("(" @open ")" @close)
+ "#})
+ .unwrap(),
+ Default::default(),
+ cx,
+ )
+ .await;
-// // positioning cursor inside bracket highlights both
-// cx.set_state(indoc! {r#"
-// pub fn test("Test ˇargument") {
-// another_test(1, 2, 3);
-// }
-// "#});
-// cx.assert_editor_background_highlights::<MatchingBracketHighlight>(indoc! {r#"
-// pub fn test«(»"Test argument"«)» {
-// another_test(1, 2, 3);
-// }
-// "#});
+ // positioning cursor inside bracket highlights both
+ cx.set_state(indoc! {r#"
+ pub fn test("Test ˇargument") {
+ another_test(1, 2, 3);
+ }
+ "#});
+ cx.assert_editor_background_highlights::<MatchingBracketHighlight>(indoc! {r#"
+ pub fn test«(»"Test argument"«)» {
+ another_test(1, 2, 3);
+ }
+ "#});
-// cx.set_state(indoc! {r#"
-// pub fn test("Test argument") {
-// another_test(1, ˇ2, 3);
-// }
-// "#});
-// cx.assert_editor_background_highlights::<MatchingBracketHighlight>(indoc! {r#"
-// pub fn test("Test argument") {
-// another_test«(»1, 2, 3«)»;
-// }
-// "#});
+ cx.set_state(indoc! {r#"
+ pub fn test("Test argument") {
+ another_test(1, ˇ2, 3);
+ }
+ "#});
+ cx.assert_editor_background_highlights::<MatchingBracketHighlight>(indoc! {r#"
+ pub fn test("Test argument") {
+ another_test«(»1, 2, 3«)»;
+ }
+ "#});
-// cx.set_state(indoc! {r#"
-// pub fn test("Test argument") {
-// anotherˇ_test(1, 2, 3);
-// }
-// "#});
-// cx.assert_editor_background_highlights::<MatchingBracketHighlight>(indoc! {r#"
-// pub fn test("Test argument") «{»
-// another_test(1, 2, 3);
-// «}»
-// "#});
+ cx.set_state(indoc! {r#"
+ pub fn test("Test argument") {
+ anotherˇ_test(1, 2, 3);
+ }
+ "#});
+ cx.assert_editor_background_highlights::<MatchingBracketHighlight>(indoc! {r#"
+ pub fn test("Test argument") «{»
+ another_test(1, 2, 3);
+ «}»
+ "#});
-// // positioning outside of brackets removes highlight
-// cx.set_state(indoc! {r#"
-// pub fˇn test("Test argument") {
-// another_test(1, 2, 3);
-// }
-// "#});
-// cx.assert_editor_background_highlights::<MatchingBracketHighlight>(indoc! {r#"
-// pub fn test("Test argument") {
-// another_test(1, 2, 3);
-// }
-// "#});
+ // positioning outside of brackets removes highlight
+ cx.set_state(indoc! {r#"
+ pub fˇn test("Test argument") {
+ another_test(1, 2, 3);
+ }
+ "#});
+ cx.assert_editor_background_highlights::<MatchingBracketHighlight>(indoc! {r#"
+ pub fn test("Test argument") {
+ another_test(1, 2, 3);
+ }
+ "#});
-// // non empty selection dismisses highlight
-// cx.set_state(indoc! {r#"
-// pub fn test("Te«st argˇ»ument") {
-// another_test(1, 2, 3);
-// }
-// "#});
-// cx.assert_editor_background_highlights::<MatchingBracketHighlight>(indoc! {r#"
-// pub fn test("Test argument") {
-// another_test(1, 2, 3);
-// }
-// "#});
-// }
-// }
+ // non empty selection dismisses highlight
+ cx.set_state(indoc! {r#"
+ pub fn test("Te«st argˇ»ument") {
+ another_test(1, 2, 3);
+ }
+ "#});
+ cx.assert_editor_background_highlights::<MatchingBracketHighlight>(indoc! {r#"
+ pub fn test("Test argument") {
+ another_test(1, 2, 3);
+ }
+ "#});
+ }
+}
@@ -2432,13 +2432,13 @@ pub mod tests {
let language = Arc::new(language);
let fs = FakeFs::new(cx.background_executor.clone());
fs.insert_tree(
- "/a",
- json!({
- "main.rs": format!("fn main() {{\n{}\n}}", (0..501).map(|i| format!("let i = {i};\n")).collect::<Vec<_>>().join("")),
- "other.rs": format!("fn main() {{\n{}\n}}", (0..501).map(|j| format!("let j = {j};\n")).collect::<Vec<_>>().join("")),
- }),
- )
- .await;
+ "/a",
+ json!({
+ "main.rs": format!("fn main() {{\n{}\n}}", (0..501).map(|i| format!("let i = {i};\n")).collect::<Vec<_>>().join("")),
+ "other.rs": format!("fn main() {{\n{}\n}}", (0..501).map(|j| format!("let j = {j};\n")).collect::<Vec<_>>().join("")),
+ }),
+ )
+ .await;
let project = Project::test(fs, ["/a".as_ref()], cx).await;
project.update(cx, |project, _| {
project.languages().add(Arc::clone(&language))
@@ -2598,24 +2598,22 @@ pub mod tests {
cx.executor().run_until_parked();
editor.update(cx, |editor, cx| {
- let expected_hints = vec![
- "main hint #0".to_string(),
- "main hint #1".to_string(),
- "main hint #2".to_string(),
- "main hint #3".to_string(),
- // todo!() there used to be no these hints, but new gpui2 presumably scrolls a bit farther
- // (or renders less?) note that tests below pass
- "main hint #4".to_string(),
- "main hint #5".to_string(),
- ];
- assert_eq!(
- expected_hints,
- cached_hint_labels(editor),
- "When scroll is at the edge of a multibuffer, its visible excerpts only should be queried for inlay hints"
- );
- assert_eq!(expected_hints, visible_hint_labels(editor, cx));
- assert_eq!(editor.inlay_hint_cache().version, expected_hints.len(), "Every visible excerpt hints should bump the verison");
- });
+ let expected_hints = vec![
+ "main hint #0".to_string(),
+ "main hint #1".to_string(),
+ "main hint #2".to_string(),
+ "main hint #3".to_string(),
+ "main hint #4".to_string(),
+ "main hint #5".to_string(),
+ ];
+ assert_eq!(
+ expected_hints,
+ cached_hint_labels(editor),
+ "When scroll is at the edge of a multibuffer, its visible excerpts only should be queried for inlay hints"
+ );
+ assert_eq!(expected_hints, visible_hint_labels(editor, cx));
+ assert_eq!(editor.inlay_hint_cache().version, expected_hints.len(), "Every visible excerpt hints should bump the verison");
+ });
editor.update(cx, |editor, cx| {
editor.change_selections(Some(Autoscroll::Next), cx, |s| {
@@ -2630,23 +2628,23 @@ pub mod tests {
});
cx.executor().run_until_parked();
editor.update(cx, |editor, cx| {
- let expected_hints = vec![
- "main hint #0".to_string(),
- "main hint #1".to_string(),
- "main hint #2".to_string(),
- "main hint #3".to_string(),
- "main hint #4".to_string(),
- "main hint #5".to_string(),
- "other hint #0".to_string(),
- "other hint #1".to_string(),
- "other hint #2".to_string(),
- ];
- assert_eq!(expected_hints, cached_hint_labels(editor),
- "With more scrolls of the multibuffer, more hints should be added into the cache and nothing invalidated without edits");
- assert_eq!(expected_hints, visible_hint_labels(editor, cx));
- assert_eq!(editor.inlay_hint_cache().version, expected_hints.len(),
- "Due to every excerpt having one hint, we update cache per new excerpt scrolled");
- });
+ let expected_hints = vec![
+ "main hint #0".to_string(),
+ "main hint #1".to_string(),
+ "main hint #2".to_string(),
+ "main hint #3".to_string(),
+ "main hint #4".to_string(),
+ "main hint #5".to_string(),
+ "other hint #0".to_string(),
+ "other hint #1".to_string(),
+ "other hint #2".to_string(),
+ ];
+ assert_eq!(expected_hints, cached_hint_labels(editor),
+ "With more scrolls of the multibuffer, more hints should be added into the cache and nothing invalidated without edits");
+ assert_eq!(expected_hints, visible_hint_labels(editor, cx));
+ assert_eq!(editor.inlay_hint_cache().version, expected_hints.len(),
+ "Due to every excerpt having one hint, we update cache per new excerpt scrolled");
+ });
editor.update(cx, |editor, cx| {
editor.change_selections(Some(Autoscroll::Next), cx, |s| {
@@ -2658,26 +2656,26 @@ pub mod tests {
));
cx.executor().run_until_parked();
let last_scroll_update_version = editor.update(cx, |editor, cx| {
- let expected_hints = vec![
- "main hint #0".to_string(),
- "main hint #1".to_string(),
- "main hint #2".to_string(),
- "main hint #3".to_string(),
- "main hint #4".to_string(),
- "main hint #5".to_string(),
- "other hint #0".to_string(),
- "other hint #1".to_string(),
- "other hint #2".to_string(),
- "other hint #3".to_string(),
- "other hint #4".to_string(),
- "other hint #5".to_string(),
- ];
- assert_eq!(expected_hints, cached_hint_labels(editor),
- "After multibuffer was scrolled to the end, all hints for all excerpts should be fetched");
- assert_eq!(expected_hints, visible_hint_labels(editor, cx));
- assert_eq!(editor.inlay_hint_cache().version, expected_hints.len());
- expected_hints.len()
- }).unwrap();
+ let expected_hints = vec![
+ "main hint #0".to_string(),
+ "main hint #1".to_string(),
+ "main hint #2".to_string(),
+ "main hint #3".to_string(),
+ "main hint #4".to_string(),
+ "main hint #5".to_string(),
+ "other hint #0".to_string(),
+ "other hint #1".to_string(),
+ "other hint #2".to_string(),
+ "other hint #3".to_string(),
+ "other hint #4".to_string(),
+ "other hint #5".to_string(),
+ ];
+ assert_eq!(expected_hints, cached_hint_labels(editor),
+ "After multibuffer was scrolled to the end, all hints for all excerpts should be fetched");
+ assert_eq!(expected_hints, visible_hint_labels(editor, cx));
+ assert_eq!(editor.inlay_hint_cache().version, expected_hints.len());
+ expected_hints.len()
+ }).unwrap();
editor.update(cx, |editor, cx| {
editor.change_selections(Some(Autoscroll::Next), cx, |s| {
@@ -2686,30 +2684,31 @@ pub mod tests {
});
cx.executor().run_until_parked();
editor.update(cx, |editor, cx| {
- let expected_hints = vec![
- "main hint #0".to_string(),
- "main hint #1".to_string(),
- "main hint #2".to_string(),
- "main hint #3".to_string(),
- "main hint #4".to_string(),
- "main hint #5".to_string(),
- "other hint #0".to_string(),
- "other hint #1".to_string(),
- "other hint #2".to_string(),
- "other hint #3".to_string(),
- "other hint #4".to_string(),
- "other hint #5".to_string(),
- ];
- assert_eq!(expected_hints, cached_hint_labels(editor),
- "After multibuffer was scrolled to the end, further scrolls up should not bring more hints");
- assert_eq!(expected_hints, visible_hint_labels(editor, cx));
- assert_eq!(editor.inlay_hint_cache().version, last_scroll_update_version, "No updates should happen during scrolling already scolled buffer");
- });
+ let expected_hints = vec![
+ "main hint #0".to_string(),
+ "main hint #1".to_string(),
+ "main hint #2".to_string(),
+ "main hint #3".to_string(),
+ "main hint #4".to_string(),
+ "main hint #5".to_string(),
+ "other hint #0".to_string(),
+ "other hint #1".to_string(),
+ "other hint #2".to_string(),
+ "other hint #3".to_string(),
+ "other hint #4".to_string(),
+ "other hint #5".to_string(),
+ ];
+ assert_eq!(expected_hints, cached_hint_labels(editor),
+ "After multibuffer was scrolled to the end, further scrolls up should not bring more hints");
+ assert_eq!(expected_hints, visible_hint_labels(editor, cx));
+ assert_eq!(editor.inlay_hint_cache().version, last_scroll_update_version, "No updates should happen during scrolling already scolled buffer");
+ });
editor_edited.store(true, Ordering::Release);
editor.update(cx, |editor, cx| {
editor.change_selections(None, cx, |s| {
- s.select_ranges([Point::new(56, 0)..Point::new(56, 0)])
+ // TODO if this gets set to hint boundary (e.g. 56) we sometimes get an extra cache version bump, why?
+ s.select_ranges([Point::new(57, 0)..Point::new(57, 0)])
});
editor.handle_input("++++more text++++", cx);
});
@@ -2729,15 +2728,15 @@ pub mod tests {
expected_hints,
cached_hint_labels(editor),
"After multibuffer edit, editor gets scolled back to the last selection; \
-all hints should be invalidated and requeried for all of its visible excerpts"
+ all hints should be invalidated and requeried for all of its visible excerpts"
);
assert_eq!(expected_hints, visible_hint_labels(editor, cx));
let current_cache_version = editor.inlay_hint_cache().version;
- let minimum_expected_version = last_scroll_update_version + expected_hints.len();
- assert!(
- current_cache_version == minimum_expected_version || current_cache_version == minimum_expected_version + 1,
- "Due to every excerpt having one hint, cache should update per new excerpt received + 1 potential sporadic update"
+ assert_eq!(
+ current_cache_version,
+ last_scroll_update_version + expected_hints.len(),
+ "We should have updated cache N times == N of new hints arrived (separately from each excerpt)"
);
});
}
@@ -608,671 +608,672 @@ fn go_to_fetched_definition_of_kind(
}
}
-// #[cfg(test)]
-// mod tests {
-// use super::*;
-// use crate::{
-// display_map::ToDisplayPoint,
-// editor_tests::init_test,
-// inlay_hint_cache::tests::{cached_hint_labels, visible_hint_labels},
-// test::editor_lsp_test_context::EditorLspTestContext,
-// };
-// use futures::StreamExt;
-// use gpui::{
-// platform::{self, Modifiers, ModifiersChangedEvent},
-// View,
-// };
-// use indoc::indoc;
-// use language::language_settings::InlayHintSettings;
-// use lsp::request::{GotoDefinition, GotoTypeDefinition};
-// use util::assert_set_eq;
-
-// #[gpui::test]
-// async fn test_link_go_to_type_definition(cx: &mut gpui::TestAppContext) {
-// init_test(cx, |_| {});
-
-// let mut cx = EditorLspTestContext::new_rust(
-// lsp::ServerCapabilities {
-// hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
-// type_definition_provider: Some(lsp::TypeDefinitionProviderCapability::Simple(true)),
-// ..Default::default()
-// },
-// cx,
-// )
-// .await;
-
-// cx.set_state(indoc! {"
-// struct A;
-// let vˇariable = A;
-// "});
-
-// // Basic hold cmd+shift, expect highlight in region if response contains type definition
-// let hover_point = cx.display_point(indoc! {"
-// struct A;
-// let vˇariable = A;
-// "});
-// let symbol_range = cx.lsp_range(indoc! {"
-// struct A;
-// let «variable» = A;
-// "});
-// let target_range = cx.lsp_range(indoc! {"
-// struct «A»;
-// let variable = A;
-// "});
-
-// let mut requests =
-// cx.handle_request::<GotoTypeDefinition, _, _>(move |url, _, _| async move {
-// Ok(Some(lsp::GotoTypeDefinitionResponse::Link(vec![
-// lsp::LocationLink {
-// origin_selection_range: Some(symbol_range),
-// target_uri: url.clone(),
-// target_range,
-// target_selection_range: target_range,
-// },
-// ])))
-// });
-
-// // Press cmd+shift to trigger highlight
-// cx.update_editor(|editor, cx| {
-// update_go_to_definition_link(
-// editor,
-// Some(GoToDefinitionTrigger::Text(hover_point)),
-// true,
-// true,
-// cx,
-// );
-// });
-// requests.next().await;
-// cx.foreground().run_until_parked();
-// cx.assert_editor_text_highlights::<LinkGoToDefinitionState>(indoc! {"
-// struct A;
-// let «variable» = A;
-// "});
-
-// // Unpress shift causes highlight to go away (normal goto-definition is not valid here)
-// cx.update_editor(|editor, cx| {
-// editor.modifiers_changed(
-// &platform::ModifiersChangedEvent {
-// modifiers: Modifiers {
-// cmd: true,
-// ..Default::default()
-// },
-// ..Default::default()
-// },
-// cx,
-// );
-// });
-// // Assert no link highlights
-// cx.assert_editor_text_highlights::<LinkGoToDefinitionState>(indoc! {"
-// struct A;
-// let variable = A;
-// "});
-
-// // Cmd+shift click without existing definition requests and jumps
-// let hover_point = cx.display_point(indoc! {"
-// struct A;
-// let vˇariable = A;
-// "});
-// let target_range = cx.lsp_range(indoc! {"
-// struct «A»;
-// let variable = A;
-// "});
-
-// let mut requests =
-// cx.handle_request::<GotoTypeDefinition, _, _>(move |url, _, _| async move {
-// Ok(Some(lsp::GotoTypeDefinitionResponse::Link(vec![
-// lsp::LocationLink {
-// origin_selection_range: None,
-// target_uri: url,
-// target_range,
-// target_selection_range: target_range,
-// },
-// ])))
-// });
-
-// cx.update_editor(|editor, cx| {
-// go_to_fetched_type_definition(editor, PointForPosition::valid(hover_point), false, cx);
-// });
-// requests.next().await;
-// cx.foreground().run_until_parked();
-
-// cx.assert_editor_state(indoc! {"
-// struct «Aˇ»;
-// let variable = A;
-// "});
-// }
-
-// #[gpui::test]
-// async fn test_link_go_to_definition(cx: &mut gpui::TestAppContext) {
-// init_test(cx, |_| {});
-
-// let mut cx = EditorLspTestContext::new_rust(
-// lsp::ServerCapabilities {
-// hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
-// ..Default::default()
-// },
-// cx,
-// )
-// .await;
-
-// cx.set_state(indoc! {"
-// fn ˇtest() { do_work(); }
-// fn do_work() { test(); }
-// "});
-
-// // Basic hold cmd, expect highlight in region if response contains definition
-// let hover_point = cx.display_point(indoc! {"
-// fn test() { do_wˇork(); }
-// fn do_work() { test(); }
-// "});
-// let symbol_range = cx.lsp_range(indoc! {"
-// fn test() { «do_work»(); }
-// fn do_work() { test(); }
-// "});
-// let target_range = cx.lsp_range(indoc! {"
-// fn test() { do_work(); }
-// fn «do_work»() { test(); }
-// "});
-
-// let mut requests = cx.handle_request::<GotoDefinition, _, _>(move |url, _, _| async move {
-// Ok(Some(lsp::GotoDefinitionResponse::Link(vec![
-// lsp::LocationLink {
-// origin_selection_range: Some(symbol_range),
-// target_uri: url.clone(),
-// target_range,
-// target_selection_range: target_range,
-// },
-// ])))
-// });
-
-// cx.update_editor(|editor, cx| {
-// update_go_to_definition_link(
-// editor,
-// Some(GoToDefinitionTrigger::Text(hover_point)),
-// true,
-// false,
-// cx,
-// );
-// });
-// requests.next().await;
-// cx.foreground().run_until_parked();
-// cx.assert_editor_text_highlights::<LinkGoToDefinitionState>(indoc! {"
-// fn test() { «do_work»(); }
-// fn do_work() { test(); }
-// "});
-
-// // Unpress cmd causes highlight to go away
-// cx.update_editor(|editor, cx| {
-// editor.modifiers_changed(&Default::default(), cx);
-// });
-
-// // Assert no link highlights
-// cx.assert_editor_text_highlights::<LinkGoToDefinitionState>(indoc! {"
-// fn test() { do_work(); }
-// fn do_work() { test(); }
-// "});
-
-// // Response without source range still highlights word
-// cx.update_editor(|editor, _| editor.link_go_to_definition_state.last_trigger_point = None);
-// let mut requests = cx.handle_request::<GotoDefinition, _, _>(move |url, _, _| async move {
-// Ok(Some(lsp::GotoDefinitionResponse::Link(vec![
-// lsp::LocationLink {
-// // No origin range
-// origin_selection_range: None,
-// target_uri: url.clone(),
-// target_range,
-// target_selection_range: target_range,
-// },
-// ])))
-// });
-// cx.update_editor(|editor, cx| {
-// update_go_to_definition_link(
-// editor,
-// Some(GoToDefinitionTrigger::Text(hover_point)),
-// true,
-// false,
-// cx,
-// );
-// });
-// requests.next().await;
-// cx.foreground().run_until_parked();
-
-// cx.assert_editor_text_highlights::<LinkGoToDefinitionState>(indoc! {"
-// fn test() { «do_work»(); }
-// fn do_work() { test(); }
-// "});
-
-// // Moving mouse to location with no response dismisses highlight
-// let hover_point = cx.display_point(indoc! {"
-// fˇn test() { do_work(); }
-// fn do_work() { test(); }
-// "});
-// let mut requests = cx
-// .lsp
-// .handle_request::<GotoDefinition, _, _>(move |_, _| async move {
-// // No definitions returned
-// Ok(Some(lsp::GotoDefinitionResponse::Link(vec![])))
-// });
-// cx.update_editor(|editor, cx| {
-// update_go_to_definition_link(
-// editor,
-// Some(GoToDefinitionTrigger::Text(hover_point)),
-// true,
-// false,
-// cx,
-// );
-// });
-// requests.next().await;
-// cx.foreground().run_until_parked();
-
-// // Assert no link highlights
-// cx.assert_editor_text_highlights::<LinkGoToDefinitionState>(indoc! {"
-// fn test() { do_work(); }
-// fn do_work() { test(); }
-// "});
-
-// // Move mouse without cmd and then pressing cmd triggers highlight
-// let hover_point = cx.display_point(indoc! {"
-// fn test() { do_work(); }
-// fn do_work() { teˇst(); }
-// "});
-// cx.update_editor(|editor, cx| {
-// update_go_to_definition_link(
-// editor,
-// Some(GoToDefinitionTrigger::Text(hover_point)),
-// false,
-// false,
-// cx,
-// );
-// });
-// cx.foreground().run_until_parked();
-
-// // Assert no link highlights
-// cx.assert_editor_text_highlights::<LinkGoToDefinitionState>(indoc! {"
-// fn test() { do_work(); }
-// fn do_work() { test(); }
-// "});
-
-// let symbol_range = cx.lsp_range(indoc! {"
-// fn test() { do_work(); }
-// fn do_work() { «test»(); }
-// "});
-// let target_range = cx.lsp_range(indoc! {"
-// fn «test»() { do_work(); }
-// fn do_work() { test(); }
-// "});
-
-// let mut requests = cx.handle_request::<GotoDefinition, _, _>(move |url, _, _| async move {
-// Ok(Some(lsp::GotoDefinitionResponse::Link(vec![
-// lsp::LocationLink {
-// origin_selection_range: Some(symbol_range),
-// target_uri: url,
-// target_range,
-// target_selection_range: target_range,
-// },
-// ])))
-// });
-// cx.update_editor(|editor, cx| {
-// editor.modifiers_changed(
-// &ModifiersChangedEvent {
-// modifiers: Modifiers {
-// cmd: true,
-// ..Default::default()
-// },
-// },
-// cx,
-// );
-// });
-// requests.next().await;
-// cx.foreground().run_until_parked();
-
-// cx.assert_editor_text_highlights::<LinkGoToDefinitionState>(indoc! {"
-// fn test() { do_work(); }
-// fn do_work() { «test»(); }
-// "});
-
-// // Deactivating the window dismisses the highlight
-// cx.update_workspace(|workspace, cx| {
-// workspace.on_window_activation_changed(false, cx);
-// });
-// cx.assert_editor_text_highlights::<LinkGoToDefinitionState>(indoc! {"
-// fn test() { do_work(); }
-// fn do_work() { test(); }
-// "});
-
-// // Moving the mouse restores the highlights.
-// cx.update_editor(|editor, cx| {
-// update_go_to_definition_link(
-// editor,
-// Some(GoToDefinitionTrigger::Text(hover_point)),
-// true,
-// false,
-// cx,
-// );
-// });
-// cx.foreground().run_until_parked();
-// cx.assert_editor_text_highlights::<LinkGoToDefinitionState>(indoc! {"
-// fn test() { do_work(); }
-// fn do_work() { «test»(); }
-// "});
-
-// // Moving again within the same symbol range doesn't re-request
-// let hover_point = cx.display_point(indoc! {"
-// fn test() { do_work(); }
-// fn do_work() { tesˇt(); }
-// "});
-// cx.update_editor(|editor, cx| {
-// update_go_to_definition_link(
-// editor,
-// Some(GoToDefinitionTrigger::Text(hover_point)),
-// true,
-// false,
-// cx,
-// );
-// });
-// cx.foreground().run_until_parked();
-// cx.assert_editor_text_highlights::<LinkGoToDefinitionState>(indoc! {"
-// fn test() { do_work(); }
-// fn do_work() { «test»(); }
-// "});
-
-// // Cmd click with existing definition doesn't re-request and dismisses highlight
-// cx.update_editor(|editor, cx| {
-// go_to_fetched_definition(editor, PointForPosition::valid(hover_point), false, cx);
-// });
-// // Assert selection moved to to definition
-// cx.lsp
-// .handle_request::<GotoDefinition, _, _>(move |_, _| async move {
-// // Empty definition response to make sure we aren't hitting the lsp and using
-// // the cached location instead
-// Ok(Some(lsp::GotoDefinitionResponse::Link(vec![])))
-// });
-// cx.foreground().run_until_parked();
-// cx.assert_editor_state(indoc! {"
-// fn «testˇ»() { do_work(); }
-// fn do_work() { test(); }
-// "});
-
-// // Assert no link highlights after jump
-// cx.assert_editor_text_highlights::<LinkGoToDefinitionState>(indoc! {"
-// fn test() { do_work(); }
-// fn do_work() { test(); }
-// "});
-
-// // Cmd click without existing definition requests and jumps
-// let hover_point = cx.display_point(indoc! {"
-// fn test() { do_wˇork(); }
-// fn do_work() { test(); }
-// "});
-// let target_range = cx.lsp_range(indoc! {"
-// fn test() { do_work(); }
-// fn «do_work»() { test(); }
-// "});
-
-// let mut requests = cx.handle_request::<GotoDefinition, _, _>(move |url, _, _| async move {
-// Ok(Some(lsp::GotoDefinitionResponse::Link(vec![
-// lsp::LocationLink {
-// origin_selection_range: None,
-// target_uri: url,
-// target_range,
-// target_selection_range: target_range,
-// },
-// ])))
-// });
-// cx.update_editor(|editor, cx| {
-// go_to_fetched_definition(editor, PointForPosition::valid(hover_point), false, cx);
-// });
-// requests.next().await;
-// cx.foreground().run_until_parked();
-// cx.assert_editor_state(indoc! {"
-// fn test() { do_work(); }
-// fn «do_workˇ»() { test(); }
-// "});
-
-// // 1. We have a pending selection, mouse point is over a symbol that we have a response for, hitting cmd and nothing happens
-// // 2. Selection is completed, hovering
-// let hover_point = cx.display_point(indoc! {"
-// fn test() { do_wˇork(); }
-// fn do_work() { test(); }
-// "});
-// let target_range = cx.lsp_range(indoc! {"
-// fn test() { do_work(); }
-// fn «do_work»() { test(); }
-// "});
-// let mut requests = cx.handle_request::<GotoDefinition, _, _>(move |url, _, _| async move {
-// Ok(Some(lsp::GotoDefinitionResponse::Link(vec![
-// lsp::LocationLink {
-// origin_selection_range: None,
-// target_uri: url,
-// target_range,
-// target_selection_range: target_range,
-// },
-// ])))
-// });
-
-// // create a pending selection
-// let selection_range = cx.ranges(indoc! {"
-// fn «test() { do_w»ork(); }
-// fn do_work() { test(); }
-// "})[0]
-// .clone();
-// cx.update_editor(|editor, cx| {
-// let snapshot = editor.buffer().read(cx).snapshot(cx);
-// let anchor_range = snapshot.anchor_before(selection_range.start)
-// ..snapshot.anchor_after(selection_range.end);
-// editor.change_selections(Some(crate::Autoscroll::fit()), cx, |s| {
-// s.set_pending_anchor_range(anchor_range, crate::SelectMode::Character)
-// });
-// });
-// cx.update_editor(|editor, cx| {
-// update_go_to_definition_link(
-// editor,
-// Some(GoToDefinitionTrigger::Text(hover_point)),
-// true,
-// false,
-// cx,
-// );
-// });
-// cx.foreground().run_until_parked();
-// assert!(requests.try_next().is_err());
-// cx.assert_editor_text_highlights::<LinkGoToDefinitionState>(indoc! {"
-// fn test() { do_work(); }
-// fn do_work() { test(); }
-// "});
-// cx.foreground().run_until_parked();
-// }
-
-// #[gpui::test]
-// async fn test_link_go_to_inlay(cx: &mut gpui::TestAppContext) {
-// init_test(cx, |settings| {
-// settings.defaults.inlay_hints = Some(InlayHintSettings {
-// enabled: true,
-// show_type_hints: true,
-// show_parameter_hints: true,
-// show_other_hints: true,
-// })
-// });
-
-// let mut cx = EditorLspTestContext::new_rust(
-// lsp::ServerCapabilities {
-// inlay_hint_provider: Some(lsp::OneOf::Left(true)),
-// ..Default::default()
-// },
-// cx,
-// )
-// .await;
-// cx.set_state(indoc! {"
-// struct TestStruct;
-
-// fn main() {
-// let variableˇ = TestStruct;
-// }
-// "});
-// let hint_start_offset = cx.ranges(indoc! {"
-// struct TestStruct;
-
-// fn main() {
-// let variableˇ = TestStruct;
-// }
-// "})[0]
-// .start;
-// let hint_position = cx.to_lsp(hint_start_offset);
-// let target_range = cx.lsp_range(indoc! {"
-// struct «TestStruct»;
-
-// fn main() {
-// let variable = TestStruct;
-// }
-// "});
-
-// let expected_uri = cx.buffer_lsp_url.clone();
-// let hint_label = ": TestStruct";
-// cx.lsp
-// .handle_request::<lsp::request::InlayHintRequest, _, _>(move |params, _| {
-// let expected_uri = expected_uri.clone();
-// async move {
-// assert_eq!(params.text_document.uri, expected_uri);
-// Ok(Some(vec![lsp::InlayHint {
-// position: hint_position,
-// label: lsp::InlayHintLabel::LabelParts(vec![lsp::InlayHintLabelPart {
-// value: hint_label.to_string(),
-// location: Some(lsp::Location {
-// uri: params.text_document.uri,
-// range: target_range,
-// }),
-// ..Default::default()
-// }]),
-// kind: Some(lsp::InlayHintKind::TYPE),
-// text_edits: None,
-// tooltip: None,
-// padding_left: Some(false),
-// padding_right: Some(false),
-// data: None,
-// }]))
-// }
-// })
-// .next()
-// .await;
-// cx.foreground().run_until_parked();
-// cx.update_editor(|editor, cx| {
-// let expected_layers = vec![hint_label.to_string()];
-// assert_eq!(expected_layers, cached_hint_labels(editor));
-// assert_eq!(expected_layers, visible_hint_labels(editor, cx));
-// });
-
-// let inlay_range = cx
-// .ranges(indoc! {"
-// struct TestStruct;
-
-// fn main() {
-// let variable« »= TestStruct;
-// }
-// "})
-// .get(0)
-// .cloned()
-// .unwrap();
-// let hint_hover_position = cx.update_editor(|editor, cx| {
-// let snapshot = editor.snapshot(cx);
-// let previous_valid = inlay_range.start.to_display_point(&snapshot);
-// let next_valid = inlay_range.end.to_display_point(&snapshot);
-// assert_eq!(previous_valid.row(), next_valid.row());
-// assert!(previous_valid.column() < next_valid.column());
-// let exact_unclipped = DisplayPoint::new(
-// previous_valid.row(),
-// previous_valid.column() + (hint_label.len() / 2) as u32,
-// );
-// PointForPosition {
-// previous_valid,
-// next_valid,
-// exact_unclipped,
-// column_overshoot_after_line_end: 0,
-// }
-// });
-// // Press cmd to trigger highlight
-// cx.update_editor(|editor, cx| {
-// update_inlay_link_and_hover_points(
-// &editor.snapshot(cx),
-// hint_hover_position,
-// editor,
-// true,
-// false,
-// cx,
-// );
-// });
-// cx.foreground().run_until_parked();
-// cx.update_editor(|editor, cx| {
-// let snapshot = editor.snapshot(cx);
-// let actual_highlights = snapshot
-// .inlay_highlights::<LinkGoToDefinitionState>()
-// .into_iter()
-// .flat_map(|highlights| highlights.values().map(|(_, highlight)| highlight))
-// .collect::<Vec<_>>();
-
-// let buffer_snapshot = editor.buffer().update(cx, |buffer, cx| buffer.snapshot(cx));
-// let expected_highlight = InlayHighlight {
-// inlay: InlayId::Hint(0),
-// inlay_position: buffer_snapshot.anchor_at(inlay_range.start, Bias::Right),
-// range: 0..hint_label.len(),
-// };
-// assert_set_eq!(actual_highlights, vec![&expected_highlight]);
-// });
-
-// // Unpress cmd causes highlight to go away
-// cx.update_editor(|editor, cx| {
-// editor.modifiers_changed(
-// &platform::ModifiersChangedEvent {
-// modifiers: Modifiers {
-// cmd: false,
-// ..Default::default()
-// },
-// ..Default::default()
-// },
-// cx,
-// );
-// });
-// // Assert no link highlights
-// cx.update_editor(|editor, cx| {
-// let snapshot = editor.snapshot(cx);
-// let actual_ranges = snapshot
-// .text_highlight_ranges::<LinkGoToDefinitionState>()
-// .map(|ranges| ranges.as_ref().clone().1)
-// .unwrap_or_default();
-
-// assert!(actual_ranges.is_empty(), "When no cmd is pressed, should have no hint label selected, but got: {actual_ranges:?}");
-// });
-
-// // Cmd+click without existing definition requests and jumps
-// cx.update_editor(|editor, cx| {
-// editor.modifiers_changed(
-// &platform::ModifiersChangedEvent {
-// modifiers: Modifiers {
-// cmd: true,
-// ..Default::default()
-// },
-// ..Default::default()
-// },
-// cx,
-// );
-// update_inlay_link_and_hover_points(
-// &editor.snapshot(cx),
-// hint_hover_position,
-// editor,
-// true,
-// false,
-// cx,
-// );
-// });
-// cx.foreground().run_until_parked();
-// cx.update_editor(|editor, cx| {
-// go_to_fetched_type_definition(editor, hint_hover_position, false, cx);
-// });
-// cx.foreground().run_until_parked();
-// cx.assert_editor_state(indoc! {"
-// struct «TestStructˇ»;
-
-// fn main() {
-// let variable = TestStruct;
-// }
-// "});
-// }
-// }
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::{
+ display_map::ToDisplayPoint,
+ editor_tests::init_test,
+ inlay_hint_cache::tests::{cached_hint_labels, visible_hint_labels},
+ test::editor_lsp_test_context::EditorLspTestContext,
+ };
+ use futures::StreamExt;
+ use gpui::{Modifiers, ModifiersChangedEvent, View};
+ use indoc::indoc;
+ use language::language_settings::InlayHintSettings;
+ use lsp::request::{GotoDefinition, GotoTypeDefinition};
+ use util::assert_set_eq;
+
+ #[gpui::test]
+ async fn test_link_go_to_type_definition(cx: &mut gpui::TestAppContext) {
+ init_test(cx, |_| {});
+
+ let mut cx = EditorLspTestContext::new_rust(
+ lsp::ServerCapabilities {
+ hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
+ type_definition_provider: Some(lsp::TypeDefinitionProviderCapability::Simple(true)),
+ ..Default::default()
+ },
+ cx,
+ )
+ .await;
+
+ cx.set_state(indoc! {"
+ struct A;
+ let vˇariable = A;
+ "});
+
+ // Basic hold cmd+shift, expect highlight in region if response contains type definition
+ let hover_point = cx.display_point(indoc! {"
+ struct A;
+ let vˇariable = A;
+ "});
+ let symbol_range = cx.lsp_range(indoc! {"
+ struct A;
+ let «variable» = A;
+ "});
+ let target_range = cx.lsp_range(indoc! {"
+ struct «A»;
+ let variable = A;
+ "});
+
+ let mut requests =
+ cx.handle_request::<GotoTypeDefinition, _, _>(move |url, _, _| async move {
+ Ok(Some(lsp::GotoTypeDefinitionResponse::Link(vec![
+ lsp::LocationLink {
+ origin_selection_range: Some(symbol_range),
+ target_uri: url.clone(),
+ target_range,
+ target_selection_range: target_range,
+ },
+ ])))
+ });
+
+ // Press cmd+shift to trigger highlight
+ cx.update_editor(|editor, cx| {
+ update_go_to_definition_link(
+ editor,
+ Some(GoToDefinitionTrigger::Text(hover_point)),
+ true,
+ true,
+ cx,
+ );
+ });
+ requests.next().await;
+ cx.background_executor.run_until_parked();
+ cx.assert_editor_text_highlights::<LinkGoToDefinitionState>(indoc! {"
+ struct A;
+ let «variable» = A;
+ "});
+
+ // Unpress shift causes highlight to go away (normal goto-definition is not valid here)
+ cx.update_editor(|editor, cx| {
+ crate::element::EditorElement::modifiers_changed(
+ editor,
+ &ModifiersChangedEvent {
+ modifiers: Modifiers {
+ command: true,
+ ..Default::default()
+ },
+ ..Default::default()
+ },
+ cx,
+ );
+ });
+ // Assert no link highlights
+ cx.assert_editor_text_highlights::<LinkGoToDefinitionState>(indoc! {"
+ struct A;
+ let variable = A;
+ "});
+
+ // Cmd+shift click without existing definition requests and jumps
+ let hover_point = cx.display_point(indoc! {"
+ struct A;
+ let vˇariable = A;
+ "});
+ let target_range = cx.lsp_range(indoc! {"
+ struct «A»;
+ let variable = A;
+ "});
+
+ let mut requests =
+ cx.handle_request::<GotoTypeDefinition, _, _>(move |url, _, _| async move {
+ Ok(Some(lsp::GotoTypeDefinitionResponse::Link(vec![
+ lsp::LocationLink {
+ origin_selection_range: None,
+ target_uri: url,
+ target_range,
+ target_selection_range: target_range,
+ },
+ ])))
+ });
+
+ cx.update_editor(|editor, cx| {
+ go_to_fetched_type_definition(editor, PointForPosition::valid(hover_point), false, cx);
+ });
+ requests.next().await;
+ cx.background_executor.run_until_parked();
+
+ cx.assert_editor_state(indoc! {"
+ struct «Aˇ»;
+ let variable = A;
+ "});
+ }
+
+ #[gpui::test]
+ async fn test_link_go_to_definition(cx: &mut gpui::TestAppContext) {
+ init_test(cx, |_| {});
+
+ let mut cx = EditorLspTestContext::new_rust(
+ lsp::ServerCapabilities {
+ hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
+ ..Default::default()
+ },
+ cx,
+ )
+ .await;
+
+ cx.set_state(indoc! {"
+ fn ˇtest() { do_work(); }
+ fn do_work() { test(); }
+ "});
+
+ // Basic hold cmd, expect highlight in region if response contains definition
+ let hover_point = cx.display_point(indoc! {"
+ fn test() { do_wˇork(); }
+ fn do_work() { test(); }
+ "});
+ let symbol_range = cx.lsp_range(indoc! {"
+ fn test() { «do_work»(); }
+ fn do_work() { test(); }
+ "});
+ let target_range = cx.lsp_range(indoc! {"
+ fn test() { do_work(); }
+ fn «do_work»() { test(); }
+ "});
+
+ let mut requests = cx.handle_request::<GotoDefinition, _, _>(move |url, _, _| async move {
+ Ok(Some(lsp::GotoDefinitionResponse::Link(vec![
+ lsp::LocationLink {
+ origin_selection_range: Some(symbol_range),
+ target_uri: url.clone(),
+ target_range,
+ target_selection_range: target_range,
+ },
+ ])))
+ });
+
+ cx.update_editor(|editor, cx| {
+ update_go_to_definition_link(
+ editor,
+ Some(GoToDefinitionTrigger::Text(hover_point)),
+ true,
+ false,
+ cx,
+ );
+ });
+ requests.next().await;
+ cx.background_executor.run_until_parked();
+ cx.assert_editor_text_highlights::<LinkGoToDefinitionState>(indoc! {"
+ fn test() { «do_work»(); }
+ fn do_work() { test(); }
+ "});
+
+ // Unpress cmd causes highlight to go away
+ cx.update_editor(|editor, cx| {
+ crate::element::EditorElement::modifiers_changed(editor, &Default::default(), cx);
+ });
+
+ // Assert no link highlights
+ cx.assert_editor_text_highlights::<LinkGoToDefinitionState>(indoc! {"
+ fn test() { do_work(); }
+ fn do_work() { test(); }
+ "});
+
+ // Response without source range still highlights word
+ cx.update_editor(|editor, _| editor.link_go_to_definition_state.last_trigger_point = None);
+ let mut requests = cx.handle_request::<GotoDefinition, _, _>(move |url, _, _| async move {
+ Ok(Some(lsp::GotoDefinitionResponse::Link(vec![
+ lsp::LocationLink {
+ // No origin range
+ origin_selection_range: None,
+ target_uri: url.clone(),
+ target_range,
+ target_selection_range: target_range,
+ },
+ ])))
+ });
+ cx.update_editor(|editor, cx| {
+ update_go_to_definition_link(
+ editor,
+ Some(GoToDefinitionTrigger::Text(hover_point)),
+ true,
+ false,
+ cx,
+ );
+ });
+ requests.next().await;
+ cx.background_executor.run_until_parked();
+
+ cx.assert_editor_text_highlights::<LinkGoToDefinitionState>(indoc! {"
+ fn test() { «do_work»(); }
+ fn do_work() { test(); }
+ "});
+
+ // Moving mouse to location with no response dismisses highlight
+ let hover_point = cx.display_point(indoc! {"
+ fˇn test() { do_work(); }
+ fn do_work() { test(); }
+ "});
+ let mut requests = cx
+ .lsp
+ .handle_request::<GotoDefinition, _, _>(move |_, _| async move {
+ // No definitions returned
+ Ok(Some(lsp::GotoDefinitionResponse::Link(vec![])))
+ });
+ cx.update_editor(|editor, cx| {
+ update_go_to_definition_link(
+ editor,
+ Some(GoToDefinitionTrigger::Text(hover_point)),
+ true,
+ false,
+ cx,
+ );
+ });
+ requests.next().await;
+ cx.background_executor.run_until_parked();
+
+ // Assert no link highlights
+ cx.assert_editor_text_highlights::<LinkGoToDefinitionState>(indoc! {"
+ fn test() { do_work(); }
+ fn do_work() { test(); }
+ "});
+
+ // Move mouse without cmd and then pressing cmd triggers highlight
+ let hover_point = cx.display_point(indoc! {"
+ fn test() { do_work(); }
+ fn do_work() { teˇst(); }
+ "});
+ cx.update_editor(|editor, cx| {
+ update_go_to_definition_link(
+ editor,
+ Some(GoToDefinitionTrigger::Text(hover_point)),
+ false,
+ false,
+ cx,
+ );
+ });
+ cx.background_executor.run_until_parked();
+
+ // Assert no link highlights
+ cx.assert_editor_text_highlights::<LinkGoToDefinitionState>(indoc! {"
+ fn test() { do_work(); }
+ fn do_work() { test(); }
+ "});
+
+ let symbol_range = cx.lsp_range(indoc! {"
+ fn test() { do_work(); }
+ fn do_work() { «test»(); }
+ "});
+ let target_range = cx.lsp_range(indoc! {"
+ fn «test»() { do_work(); }
+ fn do_work() { test(); }
+ "});
+
+ let mut requests = cx.handle_request::<GotoDefinition, _, _>(move |url, _, _| async move {
+ Ok(Some(lsp::GotoDefinitionResponse::Link(vec![
+ lsp::LocationLink {
+ origin_selection_range: Some(symbol_range),
+ target_uri: url,
+ target_range,
+ target_selection_range: target_range,
+ },
+ ])))
+ });
+ cx.update_editor(|editor, cx| {
+ crate::element::EditorElement::modifiers_changed(
+ editor,
+ &ModifiersChangedEvent {
+ modifiers: Modifiers {
+ command: true,
+ ..Default::default()
+ },
+ },
+ cx,
+ );
+ });
+ requests.next().await;
+ cx.background_executor.run_until_parked();
+
+ cx.assert_editor_text_highlights::<LinkGoToDefinitionState>(indoc! {"
+ fn test() { do_work(); }
+ fn do_work() { «test»(); }
+ "});
+
+ // Deactivating the window dismisses the highlight
+ cx.update_workspace(|workspace, cx| {
+ workspace.on_window_activation_changed(cx);
+ });
+ cx.assert_editor_text_highlights::<LinkGoToDefinitionState>(indoc! {"
+ fn test() { do_work(); }
+ fn do_work() { test(); }
+ "});
+
+ // Moving the mouse restores the highlights.
+ cx.update_editor(|editor, cx| {
+ update_go_to_definition_link(
+ editor,
+ Some(GoToDefinitionTrigger::Text(hover_point)),
+ true,
+ false,
+ cx,
+ );
+ });
+ cx.background_executor.run_until_parked();
+ cx.assert_editor_text_highlights::<LinkGoToDefinitionState>(indoc! {"
+ fn test() { do_work(); }
+ fn do_work() { «test»(); }
+ "});
+
+ // Moving again within the same symbol range doesn't re-request
+ let hover_point = cx.display_point(indoc! {"
+ fn test() { do_work(); }
+ fn do_work() { tesˇt(); }
+ "});
+ cx.update_editor(|editor, cx| {
+ update_go_to_definition_link(
+ editor,
+ Some(GoToDefinitionTrigger::Text(hover_point)),
+ true,
+ false,
+ cx,
+ );
+ });
+ cx.background_executor.run_until_parked();
+ cx.assert_editor_text_highlights::<LinkGoToDefinitionState>(indoc! {"
+ fn test() { do_work(); }
+ fn do_work() { «test»(); }
+ "});
+
+ // Cmd click with existing definition doesn't re-request and dismisses highlight
+ cx.update_editor(|editor, cx| {
+ go_to_fetched_definition(editor, PointForPosition::valid(hover_point), false, cx);
+ });
+ // Assert selection moved to to definition
+ cx.lsp
+ .handle_request::<GotoDefinition, _, _>(move |_, _| async move {
+ // Empty definition response to make sure we aren't hitting the lsp and using
+ // the cached location instead
+ Ok(Some(lsp::GotoDefinitionResponse::Link(vec![])))
+ });
+ cx.background_executor.run_until_parked();
+ cx.assert_editor_state(indoc! {"
+ fn «testˇ»() { do_work(); }
+ fn do_work() { test(); }
+ "});
+
+ // Assert no link highlights after jump
+ cx.assert_editor_text_highlights::<LinkGoToDefinitionState>(indoc! {"
+ fn test() { do_work(); }
+ fn do_work() { test(); }
+ "});
+
+ // Cmd click without existing definition requests and jumps
+ let hover_point = cx.display_point(indoc! {"
+ fn test() { do_wˇork(); }
+ fn do_work() { test(); }
+ "});
+ let target_range = cx.lsp_range(indoc! {"
+ fn test() { do_work(); }
+ fn «do_work»() { test(); }
+ "});
+
+ let mut requests = cx.handle_request::<GotoDefinition, _, _>(move |url, _, _| async move {
+ Ok(Some(lsp::GotoDefinitionResponse::Link(vec![
+ lsp::LocationLink {
+ origin_selection_range: None,
+ target_uri: url,
+ target_range,
+ target_selection_range: target_range,
+ },
+ ])))
+ });
+ cx.update_editor(|editor, cx| {
+ go_to_fetched_definition(editor, PointForPosition::valid(hover_point), false, cx);
+ });
+ requests.next().await;
+ cx.background_executor.run_until_parked();
+ cx.assert_editor_state(indoc! {"
+ fn test() { do_work(); }
+ fn «do_workˇ»() { test(); }
+ "});
+
+ // 1. We have a pending selection, mouse point is over a symbol that we have a response for, hitting cmd and nothing happens
+ // 2. Selection is completed, hovering
+ let hover_point = cx.display_point(indoc! {"
+ fn test() { do_wˇork(); }
+ fn do_work() { test(); }
+ "});
+ let target_range = cx.lsp_range(indoc! {"
+ fn test() { do_work(); }
+ fn «do_work»() { test(); }
+ "});
+ let mut requests = cx.handle_request::<GotoDefinition, _, _>(move |url, _, _| async move {
+ Ok(Some(lsp::GotoDefinitionResponse::Link(vec![
+ lsp::LocationLink {
+ origin_selection_range: None,
+ target_uri: url,
+ target_range,
+ target_selection_range: target_range,
+ },
+ ])))
+ });
+
+ // create a pending selection
+ let selection_range = cx.ranges(indoc! {"
+ fn «test() { do_w»ork(); }
+ fn do_work() { test(); }
+ "})[0]
+ .clone();
+ cx.update_editor(|editor, cx| {
+ let snapshot = editor.buffer().read(cx).snapshot(cx);
+ let anchor_range = snapshot.anchor_before(selection_range.start)
+ ..snapshot.anchor_after(selection_range.end);
+ editor.change_selections(Some(crate::Autoscroll::fit()), cx, |s| {
+ s.set_pending_anchor_range(anchor_range, crate::SelectMode::Character)
+ });
+ });
+ cx.update_editor(|editor, cx| {
+ update_go_to_definition_link(
+ editor,
+ Some(GoToDefinitionTrigger::Text(hover_point)),
+ true,
+ false,
+ cx,
+ );
+ });
+ cx.background_executor.run_until_parked();
+ assert!(requests.try_next().is_err());
+ cx.assert_editor_text_highlights::<LinkGoToDefinitionState>(indoc! {"
+ fn test() { do_work(); }
+ fn do_work() { test(); }
+ "});
+ cx.background_executor.run_until_parked();
+ }
+
+ #[gpui::test]
+ async fn test_link_go_to_inlay(cx: &mut gpui::TestAppContext) {
+ init_test(cx, |settings| {
+ settings.defaults.inlay_hints = Some(InlayHintSettings {
+ enabled: true,
+ show_type_hints: true,
+ show_parameter_hints: true,
+ show_other_hints: true,
+ })
+ });
+
+ let mut cx = EditorLspTestContext::new_rust(
+ lsp::ServerCapabilities {
+ inlay_hint_provider: Some(lsp::OneOf::Left(true)),
+ ..Default::default()
+ },
+ cx,
+ )
+ .await;
+ cx.set_state(indoc! {"
+ struct TestStruct;
+
+ fn main() {
+ let variableˇ = TestStruct;
+ }
+ "});
+ let hint_start_offset = cx.ranges(indoc! {"
+ struct TestStruct;
+
+ fn main() {
+ let variableˇ = TestStruct;
+ }
+ "})[0]
+ .start;
+ let hint_position = cx.to_lsp(hint_start_offset);
+ let target_range = cx.lsp_range(indoc! {"
+ struct «TestStruct»;
+
+ fn main() {
+ let variable = TestStruct;
+ }
+ "});
+
+ let expected_uri = cx.buffer_lsp_url.clone();
+ let hint_label = ": TestStruct";
+ cx.lsp
+ .handle_request::<lsp::request::InlayHintRequest, _, _>(move |params, _| {
+ let expected_uri = expected_uri.clone();
+ async move {
+ assert_eq!(params.text_document.uri, expected_uri);
+ Ok(Some(vec![lsp::InlayHint {
+ position: hint_position,
+ label: lsp::InlayHintLabel::LabelParts(vec![lsp::InlayHintLabelPart {
+ value: hint_label.to_string(),
+ location: Some(lsp::Location {
+ uri: params.text_document.uri,
+ range: target_range,
+ }),
+ ..Default::default()
+ }]),
+ kind: Some(lsp::InlayHintKind::TYPE),
+ text_edits: None,
+ tooltip: None,
+ padding_left: Some(false),
+ padding_right: Some(false),
+ data: None,
+ }]))
+ }
+ })
+ .next()
+ .await;
+ cx.background_executor.run_until_parked();
+ cx.update_editor(|editor, cx| {
+ let expected_layers = vec![hint_label.to_string()];
+ assert_eq!(expected_layers, cached_hint_labels(editor));
+ assert_eq!(expected_layers, visible_hint_labels(editor, cx));
+ });
+
+ let inlay_range = cx
+ .ranges(indoc! {"
+ struct TestStruct;
+
+ fn main() {
+ let variable« »= TestStruct;
+ }
+ "})
+ .get(0)
+ .cloned()
+ .unwrap();
+ let hint_hover_position = cx.update_editor(|editor, cx| {
+ let snapshot = editor.snapshot(cx);
+ let previous_valid = inlay_range.start.to_display_point(&snapshot);
+ let next_valid = inlay_range.end.to_display_point(&snapshot);
+ assert_eq!(previous_valid.row(), next_valid.row());
+ assert!(previous_valid.column() < next_valid.column());
+ let exact_unclipped = DisplayPoint::new(
+ previous_valid.row(),
+ previous_valid.column() + (hint_label.len() / 2) as u32,
+ );
+ PointForPosition {
+ previous_valid,
+ next_valid,
+ exact_unclipped,
+ column_overshoot_after_line_end: 0,
+ }
+ });
+ // Press cmd to trigger highlight
+ cx.update_editor(|editor, cx| {
+ update_inlay_link_and_hover_points(
+ &editor.snapshot(cx),
+ hint_hover_position,
+ editor,
+ true,
+ false,
+ cx,
+ );
+ });
+ cx.background_executor.run_until_parked();
+ cx.update_editor(|editor, cx| {
+ let snapshot = editor.snapshot(cx);
+ let actual_highlights = snapshot
+ .inlay_highlights::<LinkGoToDefinitionState>()
+ .into_iter()
+ .flat_map(|highlights| highlights.values().map(|(_, highlight)| highlight))
+ .collect::<Vec<_>>();
+
+ let buffer_snapshot = editor.buffer().update(cx, |buffer, cx| buffer.snapshot(cx));
+ let expected_highlight = InlayHighlight {
+ inlay: InlayId::Hint(0),
+ inlay_position: buffer_snapshot.anchor_at(inlay_range.start, Bias::Right),
+ range: 0..hint_label.len(),
+ };
+ assert_set_eq!(actual_highlights, vec![&expected_highlight]);
+ });
+
+ // Unpress cmd causes highlight to go away
+ cx.update_editor(|editor, cx| {
+ crate::element::EditorElement::modifiers_changed(
+ editor,
+ &ModifiersChangedEvent {
+ modifiers: Modifiers {
+ command: false,
+ ..Default::default()
+ },
+ ..Default::default()
+ },
+ cx,
+ );
+ });
+ // Assert no link highlights
+ cx.update_editor(|editor, cx| {
+ let snapshot = editor.snapshot(cx);
+ let actual_ranges = snapshot
+ .text_highlight_ranges::<LinkGoToDefinitionState>()
+ .map(|ranges| ranges.as_ref().clone().1)
+ .unwrap_or_default();
+
+ assert!(actual_ranges.is_empty(), "When no cmd is pressed, should have no hint label selected, but got: {actual_ranges:?}");
+ });
+
+ // Cmd+click without existing definition requests and jumps
+ cx.update_editor(|editor, cx| {
+ crate::element::EditorElement::modifiers_changed(
+ editor,
+ &ModifiersChangedEvent {
+ modifiers: Modifiers {
+ command: true,
+ ..Default::default()
+ },
+ ..Default::default()
+ },
+ cx,
+ );
+ update_inlay_link_and_hover_points(
+ &editor.snapshot(cx),
+ hint_hover_position,
+ editor,
+ true,
+ false,
+ cx,
+ );
+ });
+ cx.background_executor.run_until_parked();
+ cx.update_editor(|editor, cx| {
+ go_to_fetched_type_definition(editor, hint_hover_position, false, cx);
+ });
+ cx.background_executor.run_until_parked();
+ cx.assert_editor_state(indoc! {"
+ struct «TestStructˇ»;
+
+ fn main() {
+ let variable = TestStruct;
+ }
+ "});
+ }
+}
@@ -37,19 +37,18 @@ pub fn deploy_context_menu(
});
let context_menu = ui::ContextMenu::build(cx, |menu, cx| {
- menu.action("Rename Symbol", Box::new(Rename), cx)
- .action("Go to Definition", Box::new(GoToDefinition), cx)
- .action("Go to Type Definition", Box::new(GoToTypeDefinition), cx)
- .action("Find All References", Box::new(FindAllReferences), cx)
+ menu.action("Rename Symbol", Box::new(Rename))
+ .action("Go to Definition", Box::new(GoToDefinition))
+ .action("Go to Type Definition", Box::new(GoToTypeDefinition))
+ .action("Find All References", Box::new(FindAllReferences))
.action(
"Code Actions",
Box::new(ToggleCodeActions {
deployed_from_indicator: false,
}),
- cx,
)
.separator()
- .action("Reveal in Finder", Box::new(RevealInFinder), cx)
+ .action("Reveal in Finder", Box::new(RevealInFinder))
});
let context_menu_focus = context_menu.focus_handle(cx);
cx.focus(&context_menu_focus);
@@ -69,42 +68,43 @@ pub fn deploy_context_menu(
cx.notify();
}
-// #[cfg(test)]
-// mod tests {
-// use super::*;
-// use crate::{editor_tests::init_test, test::editor_lsp_test_context::EditorLspTestContext};
-// use indoc::indoc;
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::{editor_tests::init_test, test::editor_lsp_test_context::EditorLspTestContext};
+ use indoc::indoc;
-// #[gpui::test]
-// async fn test_mouse_context_menu(cx: &mut gpui::TestAppContext) {
-// init_test(cx, |_| {});
+ #[gpui::test]
+ async fn test_mouse_context_menu(cx: &mut gpui::TestAppContext) {
+ init_test(cx, |_| {});
-// let mut cx = EditorLspTestContext::new_rust(
-// lsp::ServerCapabilities {
-// hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
-// ..Default::default()
-// },
-// cx,
-// )
-// .await;
+ let mut cx = EditorLspTestContext::new_rust(
+ lsp::ServerCapabilities {
+ hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
+ ..Default::default()
+ },
+ cx,
+ )
+ .await;
-// cx.set_state(indoc! {"
-// fn teˇst() {
-// do_work();
-// }
-// "});
-// let point = cx.display_point(indoc! {"
-// fn test() {
-// do_wˇork();
-// }
-// "});
-// cx.update_editor(|editor, cx| deploy_context_menu(editor, Default::default(), point, cx));
+ cx.set_state(indoc! {"
+ fn teˇst() {
+ do_work();
+ }
+ "});
+ let point = cx.display_point(indoc! {"
+ fn test() {
+ do_wˇork();
+ }
+ "});
+ cx.editor(|editor, app| assert!(editor.mouse_context_menu.is_none()));
+ cx.update_editor(|editor, cx| deploy_context_menu(editor, Default::default(), point, cx));
-// cx.assert_editor_state(indoc! {"
-// fn test() {
-// do_wˇork();
-// }
-// "});
-// cx.editor(|editor, app| assert!(editor.mouse_context_menu.read(app).visible()));
-// }
-// }
+ cx.assert_editor_state(indoc! {"
+ fn test() {
+ do_wˇork();
+ }
+ "});
+ cx.editor(|editor, app| assert!(editor.mouse_context_menu.is_some()));
+ }
+}
@@ -452,483 +452,475 @@ pub fn split_display_range_by_lines(
result
}
-// #[cfg(test)]
-// mod tests {
-// use super::*;
-// use crate::{
-// display_map::Inlay,
-// test::{},
-// Buffer, DisplayMap, ExcerptRange, InlayId, MultiBuffer,
-// };
-// use project::Project;
-// use settings::SettingsStore;
-// use util::post_inc;
-
-// #[gpui::test]
-// fn test_previous_word_start(cx: &mut gpui::AppContext) {
-// init_test(cx);
-
-// fn assert(marked_text: &str, cx: &mut gpui::AppContext) {
-// let (snapshot, display_points) = marked_display_snapshot(marked_text, cx);
-// assert_eq!(
-// previous_word_start(&snapshot, display_points[1]),
-// display_points[0]
-// );
-// }
-
-// assert("\nˇ ˇlorem", cx);
-// assert("ˇ\nˇ lorem", cx);
-// assert(" ˇloremˇ", cx);
-// assert("ˇ ˇlorem", cx);
-// assert(" ˇlorˇem", cx);
-// assert("\nlorem\nˇ ˇipsum", cx);
-// assert("\n\nˇ\nˇ", cx);
-// assert(" ˇlorem ˇipsum", cx);
-// assert("loremˇ-ˇipsum", cx);
-// assert("loremˇ-#$@ˇipsum", cx);
-// assert("ˇlorem_ˇipsum", cx);
-// assert(" ˇdefγˇ", cx);
-// assert(" ˇbcΔˇ", cx);
-// assert(" abˇ——ˇcd", cx);
-// }
-
-// #[gpui::test]
-// fn test_previous_subword_start(cx: &mut gpui::AppContext) {
-// init_test(cx);
-
-// fn assert(marked_text: &str, cx: &mut gpui::AppContext) {
-// let (snapshot, display_points) = marked_display_snapshot(marked_text, cx);
-// assert_eq!(
-// previous_subword_start(&snapshot, display_points[1]),
-// display_points[0]
-// );
-// }
-
-// // Subword boundaries are respected
-// assert("lorem_ˇipˇsum", cx);
-// assert("lorem_ˇipsumˇ", cx);
-// assert("ˇlorem_ˇipsum", cx);
-// assert("lorem_ˇipsum_ˇdolor", cx);
-// assert("loremˇIpˇsum", cx);
-// assert("loremˇIpsumˇ", cx);
-
-// // Word boundaries are still respected
-// assert("\nˇ ˇlorem", cx);
-// assert(" ˇloremˇ", cx);
-// assert(" ˇlorˇem", cx);
-// assert("\nlorem\nˇ ˇipsum", cx);
-// assert("\n\nˇ\nˇ", cx);
-// assert(" ˇlorem ˇipsum", cx);
-// assert("loremˇ-ˇipsum", cx);
-// assert("loremˇ-#$@ˇipsum", cx);
-// assert(" ˇdefγˇ", cx);
-// assert(" bcˇΔˇ", cx);
-// assert(" ˇbcδˇ", cx);
-// assert(" abˇ——ˇcd", cx);
-// }
-
-// #[gpui::test]
-// fn test_find_preceding_boundary(cx: &mut gpui::AppContext) {
-// init_test(cx);
-
-// fn assert(
-// marked_text: &str,
-// cx: &mut gpui::AppContext,
-// is_boundary: impl FnMut(char, char) -> bool,
-// ) {
-// let (snapshot, display_points) = marked_display_snapshot(marked_text, cx);
-// assert_eq!(
-// find_preceding_boundary(
-// &snapshot,
-// display_points[1],
-// FindRange::MultiLine,
-// is_boundary
-// ),
-// display_points[0]
-// );
-// }
-
-// assert("abcˇdef\ngh\nijˇk", cx, |left, right| {
-// left == 'c' && right == 'd'
-// });
-// assert("abcdef\nˇgh\nijˇk", cx, |left, right| {
-// left == '\n' && right == 'g'
-// });
-// let mut line_count = 0;
-// assert("abcdef\nˇgh\nijˇk", cx, |left, _| {
-// if left == '\n' {
-// line_count += 1;
-// line_count == 2
-// } else {
-// false
-// }
-// });
-// }
-
-// #[gpui::test]
-// fn test_find_preceding_boundary_with_inlays(cx: &mut gpui::AppContext) {
-// init_test(cx);
-
-// let input_text = "abcdefghijklmnopqrstuvwxys";
-// let family_id = cx
-// .font_cache()
-// .load_family(&["Helvetica"], &Default::default())
-// .unwrap();
-// let font_id = cx
-// .font_cache()
-// .select_font(family_id, &Default::default())
-// .unwrap();
-// let font_size = 14.0;
-// let buffer = MultiBuffer::build_simple(input_text, cx);
-// let buffer_snapshot = buffer.read(cx).snapshot(cx);
-// let display_map =
-// cx.add_model(|cx| DisplayMap::new(buffer, font_id, font_size, None, 1, 1, cx));
-
-// // add all kinds of inlays between two word boundaries: we should be able to cross them all, when looking for another boundary
-// let mut id = 0;
-// let inlays = (0..buffer_snapshot.len())
-// .map(|offset| {
-// [
-// Inlay {
-// id: InlayId::Suggestion(post_inc(&mut id)),
-// position: buffer_snapshot.anchor_at(offset, Bias::Left),
-// text: format!("test").into(),
-// },
-// Inlay {
-// id: InlayId::Suggestion(post_inc(&mut id)),
-// position: buffer_snapshot.anchor_at(offset, Bias::Right),
-// text: format!("test").into(),
-// },
-// Inlay {
-// id: InlayId::Hint(post_inc(&mut id)),
-// position: buffer_snapshot.anchor_at(offset, Bias::Left),
-// text: format!("test").into(),
-// },
-// Inlay {
-// id: InlayId::Hint(post_inc(&mut id)),
-// position: buffer_snapshot.anchor_at(offset, Bias::Right),
-// text: format!("test").into(),
-// },
-// ]
-// })
-// .flatten()
-// .collect();
-// let snapshot = display_map.update(cx, |map, cx| {
-// map.splice_inlays(Vec::new(), inlays, cx);
-// map.snapshot(cx)
-// });
-
-// assert_eq!(
-// find_preceding_boundary(
-// &snapshot,
-// buffer_snapshot.len().to_display_point(&snapshot),
-// FindRange::MultiLine,
-// |left, _| left == 'e',
-// ),
-// snapshot
-// .buffer_snapshot
-// .offset_to_point(5)
-// .to_display_point(&snapshot),
-// "Should not stop at inlays when looking for boundaries"
-// );
-// }
-
-// #[gpui::test]
-// fn test_next_word_end(cx: &mut gpui::AppContext) {
-// init_test(cx);
-
-// fn assert(marked_text: &str, cx: &mut gpui::AppContext) {
-// let (snapshot, display_points) = marked_display_snapshot(marked_text, cx);
-// assert_eq!(
-// next_word_end(&snapshot, display_points[0]),
-// display_points[1]
-// );
-// }
-
-// assert("\nˇ loremˇ", cx);
-// assert(" ˇloremˇ", cx);
-// assert(" lorˇemˇ", cx);
-// assert(" loremˇ ˇ\nipsum\n", cx);
-// assert("\nˇ\nˇ\n\n", cx);
-// assert("loremˇ ipsumˇ ", cx);
-// assert("loremˇ-ˇipsum", cx);
-// assert("loremˇ#$@-ˇipsum", cx);
-// assert("loremˇ_ipsumˇ", cx);
-// assert(" ˇbcΔˇ", cx);
-// assert(" abˇ——ˇcd", cx);
-// }
-
-// #[gpui::test]
-// fn test_next_subword_end(cx: &mut gpui::AppContext) {
-// init_test(cx);
-
-// fn assert(marked_text: &str, cx: &mut gpui::AppContext) {
-// let (snapshot, display_points) = marked_display_snapshot(marked_text, cx);
-// assert_eq!(
-// next_subword_end(&snapshot, display_points[0]),
-// display_points[1]
-// );
-// }
-
-// // Subword boundaries are respected
-// assert("loˇremˇ_ipsum", cx);
-// assert("ˇloremˇ_ipsum", cx);
-// assert("loremˇ_ipsumˇ", cx);
-// assert("loremˇ_ipsumˇ_dolor", cx);
-// assert("loˇremˇIpsum", cx);
-// assert("loremˇIpsumˇDolor", cx);
-
-// // Word boundaries are still respected
-// assert("\nˇ loremˇ", cx);
-// assert(" ˇloremˇ", cx);
-// assert(" lorˇemˇ", cx);
-// assert(" loremˇ ˇ\nipsum\n", cx);
-// assert("\nˇ\nˇ\n\n", cx);
-// assert("loremˇ ipsumˇ ", cx);
-// assert("loremˇ-ˇipsum", cx);
-// assert("loremˇ#$@-ˇipsum", cx);
-// assert("loremˇ_ipsumˇ", cx);
-// assert(" ˇbcˇΔ", cx);
-// assert(" abˇ——ˇcd", cx);
-// }
-
-// #[gpui::test]
-// fn test_find_boundary(cx: &mut gpui::AppContext) {
-// init_test(cx);
-
-// fn assert(
-// marked_text: &str,
-// cx: &mut gpui::AppContext,
-// is_boundary: impl FnMut(char, char) -> bool,
-// ) {
-// let (snapshot, display_points) = marked_display_snapshot(marked_text, cx);
-// assert_eq!(
-// find_boundary(
-// &snapshot,
-// display_points[0],
-// FindRange::MultiLine,
-// is_boundary
-// ),
-// display_points[1]
-// );
-// }
-
-// assert("abcˇdef\ngh\nijˇk", cx, |left, right| {
-// left == 'j' && right == 'k'
-// });
-// assert("abˇcdef\ngh\nˇijk", cx, |left, right| {
-// left == '\n' && right == 'i'
-// });
-// let mut line_count = 0;
-// assert("abcˇdef\ngh\nˇijk", cx, |left, _| {
-// if left == '\n' {
-// line_count += 1;
-// line_count == 2
-// } else {
-// false
-// }
-// });
-// }
-
-// #[gpui::test]
-// fn test_surrounding_word(cx: &mut gpui::AppContext) {
-// init_test(cx);
-
-// fn assert(marked_text: &str, cx: &mut gpui::AppContext) {
-// let (snapshot, display_points) = marked_display_snapshot(marked_text, cx);
-// assert_eq!(
-// surrounding_word(&snapshot, display_points[1]),
-// display_points[0]..display_points[2],
-// "{}",
-// marked_text.to_string()
-// );
-// }
-
-// assert("ˇˇloremˇ ipsum", cx);
-// assert("ˇloˇremˇ ipsum", cx);
-// assert("ˇloremˇˇ ipsum", cx);
-// assert("loremˇ ˇ ˇipsum", cx);
-// assert("lorem\nˇˇˇ\nipsum", cx);
-// assert("lorem\nˇˇipsumˇ", cx);
-// assert("loremˇ,ˇˇ ipsum", cx);
-// assert("ˇloremˇˇ, ipsum", cx);
-// }
-
-// #[gpui::test]
-// async fn test_move_up_and_down_with_excerpts(cx: &mut gpui::TestAppContext) {
-// cx.update(|cx| {
-// init_test(cx);
-// });
-
-// let mut cx = EditorTestContext::new(cx).await;
-// let editor = cx.editor.clone();
-// let window = cx.window.clone();
-// cx.update_window(window, |cx| {
-// let text_layout_details =
-// editor.read_with(cx, |editor, cx| editor.text_layout_details(cx));
-
-// let family_id = cx
-// .font_cache()
-// .load_family(&["Helvetica"], &Default::default())
-// .unwrap();
-// let font_id = cx
-// .font_cache()
-// .select_font(family_id, &Default::default())
-// .unwrap();
-
-// let buffer =
-// cx.add_model(|cx| Buffer::new(0, cx.model_id() as u64, "abc\ndefg\nhijkl\nmn"));
-// let multibuffer = cx.add_model(|cx| {
-// let mut multibuffer = MultiBuffer::new(0);
-// multibuffer.push_excerpts(
-// buffer.clone(),
-// [
-// ExcerptRange {
-// context: Point::new(0, 0)..Point::new(1, 4),
-// primary: None,
-// },
-// ExcerptRange {
-// context: Point::new(2, 0)..Point::new(3, 2),
-// primary: None,
-// },
-// ],
-// cx,
-// );
-// multibuffer
-// });
-// let display_map =
-// cx.add_model(|cx| DisplayMap::new(multibuffer, font_id, 14.0, None, 2, 2, cx));
-// let snapshot = display_map.update(cx, |map, cx| map.snapshot(cx));
-
-// assert_eq!(snapshot.text(), "\n\nabc\ndefg\n\n\nhijkl\nmn");
-
-// let col_2_x = snapshot.x_for_point(DisplayPoint::new(2, 2), &text_layout_details);
-
-// // Can't move up into the first excerpt's header
-// assert_eq!(
-// up(
-// &snapshot,
-// DisplayPoint::new(2, 2),
-// SelectionGoal::HorizontalPosition(col_2_x),
-// false,
-// &text_layout_details
-// ),
-// (
-// DisplayPoint::new(2, 0),
-// SelectionGoal::HorizontalPosition(0.0)
-// ),
-// );
-// assert_eq!(
-// up(
-// &snapshot,
-// DisplayPoint::new(2, 0),
-// SelectionGoal::None,
-// false,
-// &text_layout_details
-// ),
-// (
-// DisplayPoint::new(2, 0),
-// SelectionGoal::HorizontalPosition(0.0)
-// ),
-// );
-
-// let col_4_x = snapshot.x_for_point(DisplayPoint::new(3, 4), &text_layout_details);
-
-// // Move up and down within first excerpt
-// assert_eq!(
-// up(
-// &snapshot,
-// DisplayPoint::new(3, 4),
-// SelectionGoal::HorizontalPosition(col_4_x),
-// false,
-// &text_layout_details
-// ),
-// (
-// DisplayPoint::new(2, 3),
-// SelectionGoal::HorizontalPosition(col_4_x)
-// ),
-// );
-// assert_eq!(
-// down(
-// &snapshot,
-// DisplayPoint::new(2, 3),
-// SelectionGoal::HorizontalPosition(col_4_x),
-// false,
-// &text_layout_details
-// ),
-// (
-// DisplayPoint::new(3, 4),
-// SelectionGoal::HorizontalPosition(col_4_x)
-// ),
-// );
-
-// let col_5_x = snapshot.x_for_point(DisplayPoint::new(6, 5), &text_layout_details);
-
-// // Move up and down across second excerpt's header
-// assert_eq!(
-// up(
-// &snapshot,
-// DisplayPoint::new(6, 5),
-// SelectionGoal::HorizontalPosition(col_5_x),
-// false,
-// &text_layout_details
-// ),
-// (
-// DisplayPoint::new(3, 4),
-// SelectionGoal::HorizontalPosition(col_5_x)
-// ),
-// );
-// assert_eq!(
-// down(
-// &snapshot,
-// DisplayPoint::new(3, 4),
-// SelectionGoal::HorizontalPosition(col_5_x),
-// false,
-// &text_layout_details
-// ),
-// (
-// DisplayPoint::new(6, 5),
-// SelectionGoal::HorizontalPosition(col_5_x)
-// ),
-// );
-
-// let max_point_x = snapshot.x_for_point(DisplayPoint::new(7, 2), &text_layout_details);
-
-// // Can't move down off the end
-// assert_eq!(
-// down(
-// &snapshot,
-// DisplayPoint::new(7, 0),
-// SelectionGoal::HorizontalPosition(0.0),
-// false,
-// &text_layout_details
-// ),
-// (
-// DisplayPoint::new(7, 2),
-// SelectionGoal::HorizontalPosition(max_point_x)
-// ),
-// );
-// assert_eq!(
-// down(
-// &snapshot,
-// DisplayPoint::new(7, 2),
-// SelectionGoal::HorizontalPosition(max_point_x),
-// false,
-// &text_layout_details
-// ),
-// (
-// DisplayPoint::new(7, 2),
-// SelectionGoal::HorizontalPosition(max_point_x)
-// ),
-// );
-// });
-// }
-
-// fn init_test(cx: &mut gpui::AppContext) {
-// cx.set_global(SettingsStore::test(cx));
-// theme::init(cx);
-// language::init(cx);
-// crate::init(cx);
-// Project::init_settings(cx);
-// }
-// }
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::{
+ display_map::Inlay,
+ test::{editor_test_context::EditorTestContext, marked_display_snapshot},
+ Buffer, DisplayMap, ExcerptRange, InlayId, MultiBuffer,
+ };
+ use gpui::{font, Context as _};
+ use project::Project;
+ use settings::SettingsStore;
+ use util::post_inc;
+
+ #[gpui::test]
+ fn test_previous_word_start(cx: &mut gpui::AppContext) {
+ init_test(cx);
+
+ fn assert(marked_text: &str, cx: &mut gpui::AppContext) {
+ let (snapshot, display_points) = marked_display_snapshot(marked_text, cx);
+ assert_eq!(
+ previous_word_start(&snapshot, display_points[1]),
+ display_points[0]
+ );
+ }
+
+ assert("\nˇ ˇlorem", cx);
+ assert("ˇ\nˇ lorem", cx);
+ assert(" ˇloremˇ", cx);
+ assert("ˇ ˇlorem", cx);
+ assert(" ˇlorˇem", cx);
+ assert("\nlorem\nˇ ˇipsum", cx);
+ assert("\n\nˇ\nˇ", cx);
+ assert(" ˇlorem ˇipsum", cx);
+ assert("loremˇ-ˇipsum", cx);
+ assert("loremˇ-#$@ˇipsum", cx);
+ assert("ˇlorem_ˇipsum", cx);
+ assert(" ˇdefγˇ", cx);
+ assert(" ˇbcΔˇ", cx);
+ assert(" abˇ——ˇcd", cx);
+ }
+
+ #[gpui::test]
+ fn test_previous_subword_start(cx: &mut gpui::AppContext) {
+ init_test(cx);
+
+ fn assert(marked_text: &str, cx: &mut gpui::AppContext) {
+ let (snapshot, display_points) = marked_display_snapshot(marked_text, cx);
+ assert_eq!(
+ previous_subword_start(&snapshot, display_points[1]),
+ display_points[0]
+ );
+ }
+
+ // Subword boundaries are respected
+ assert("lorem_ˇipˇsum", cx);
+ assert("lorem_ˇipsumˇ", cx);
+ assert("ˇlorem_ˇipsum", cx);
+ assert("lorem_ˇipsum_ˇdolor", cx);
+ assert("loremˇIpˇsum", cx);
+ assert("loremˇIpsumˇ", cx);
+
+ // Word boundaries are still respected
+ assert("\nˇ ˇlorem", cx);
+ assert(" ˇloremˇ", cx);
+ assert(" ˇlorˇem", cx);
+ assert("\nlorem\nˇ ˇipsum", cx);
+ assert("\n\nˇ\nˇ", cx);
+ assert(" ˇlorem ˇipsum", cx);
+ assert("loremˇ-ˇipsum", cx);
+ assert("loremˇ-#$@ˇipsum", cx);
+ assert(" ˇdefγˇ", cx);
+ assert(" bcˇΔˇ", cx);
+ assert(" ˇbcδˇ", cx);
+ assert(" abˇ——ˇcd", cx);
+ }
+
+ #[gpui::test]
+ fn test_find_preceding_boundary(cx: &mut gpui::AppContext) {
+ init_test(cx);
+
+ fn assert(
+ marked_text: &str,
+ cx: &mut gpui::AppContext,
+ is_boundary: impl FnMut(char, char) -> bool,
+ ) {
+ let (snapshot, display_points) = marked_display_snapshot(marked_text, cx);
+ assert_eq!(
+ find_preceding_boundary(
+ &snapshot,
+ display_points[1],
+ FindRange::MultiLine,
+ is_boundary
+ ),
+ display_points[0]
+ );
+ }
+
+ assert("abcˇdef\ngh\nijˇk", cx, |left, right| {
+ left == 'c' && right == 'd'
+ });
+ assert("abcdef\nˇgh\nijˇk", cx, |left, right| {
+ left == '\n' && right == 'g'
+ });
+ let mut line_count = 0;
+ assert("abcdef\nˇgh\nijˇk", cx, |left, _| {
+ if left == '\n' {
+ line_count += 1;
+ line_count == 2
+ } else {
+ false
+ }
+ });
+ }
+
+ #[gpui::test]
+ fn test_find_preceding_boundary_with_inlays(cx: &mut gpui::AppContext) {
+ init_test(cx);
+
+ let input_text = "abcdefghijklmnopqrstuvwxys";
+ let font = font("Helvetica");
+ let font_size = px(14.0);
+ let buffer = MultiBuffer::build_simple(input_text, cx);
+ let buffer_snapshot = buffer.read(cx).snapshot(cx);
+ let display_map =
+ cx.build_model(|cx| DisplayMap::new(buffer, font, font_size, None, 1, 1, cx));
+
+ // add all kinds of inlays between two word boundaries: we should be able to cross them all, when looking for another boundary
+ let mut id = 0;
+ let inlays = (0..buffer_snapshot.len())
+ .map(|offset| {
+ [
+ Inlay {
+ id: InlayId::Suggestion(post_inc(&mut id)),
+ position: buffer_snapshot.anchor_at(offset, Bias::Left),
+ text: format!("test").into(),
+ },
+ Inlay {
+ id: InlayId::Suggestion(post_inc(&mut id)),
+ position: buffer_snapshot.anchor_at(offset, Bias::Right),
+ text: format!("test").into(),
+ },
+ Inlay {
+ id: InlayId::Hint(post_inc(&mut id)),
+ position: buffer_snapshot.anchor_at(offset, Bias::Left),
+ text: format!("test").into(),
+ },
+ Inlay {
+ id: InlayId::Hint(post_inc(&mut id)),
+ position: buffer_snapshot.anchor_at(offset, Bias::Right),
+ text: format!("test").into(),
+ },
+ ]
+ })
+ .flatten()
+ .collect();
+ let snapshot = display_map.update(cx, |map, cx| {
+ map.splice_inlays(Vec::new(), inlays, cx);
+ map.snapshot(cx)
+ });
+
+ assert_eq!(
+ find_preceding_boundary(
+ &snapshot,
+ buffer_snapshot.len().to_display_point(&snapshot),
+ FindRange::MultiLine,
+ |left, _| left == 'e',
+ ),
+ snapshot
+ .buffer_snapshot
+ .offset_to_point(5)
+ .to_display_point(&snapshot),
+ "Should not stop at inlays when looking for boundaries"
+ );
+ }
+
+ #[gpui::test]
+ fn test_next_word_end(cx: &mut gpui::AppContext) {
+ init_test(cx);
+
+ fn assert(marked_text: &str, cx: &mut gpui::AppContext) {
+ let (snapshot, display_points) = marked_display_snapshot(marked_text, cx);
+ assert_eq!(
+ next_word_end(&snapshot, display_points[0]),
+ display_points[1]
+ );
+ }
+
+ assert("\nˇ loremˇ", cx);
+ assert(" ˇloremˇ", cx);
+ assert(" lorˇemˇ", cx);
+ assert(" loremˇ ˇ\nipsum\n", cx);
+ assert("\nˇ\nˇ\n\n", cx);
+ assert("loremˇ ipsumˇ ", cx);
+ assert("loremˇ-ˇipsum", cx);
+ assert("loremˇ#$@-ˇipsum", cx);
+ assert("loremˇ_ipsumˇ", cx);
+ assert(" ˇbcΔˇ", cx);
+ assert(" abˇ——ˇcd", cx);
+ }
+
+ #[gpui::test]
+ fn test_next_subword_end(cx: &mut gpui::AppContext) {
+ init_test(cx);
+
+ fn assert(marked_text: &str, cx: &mut gpui::AppContext) {
+ let (snapshot, display_points) = marked_display_snapshot(marked_text, cx);
+ assert_eq!(
+ next_subword_end(&snapshot, display_points[0]),
+ display_points[1]
+ );
+ }
+
+ // Subword boundaries are respected
+ assert("loˇremˇ_ipsum", cx);
+ assert("ˇloremˇ_ipsum", cx);
+ assert("loremˇ_ipsumˇ", cx);
+ assert("loremˇ_ipsumˇ_dolor", cx);
+ assert("loˇremˇIpsum", cx);
+ assert("loremˇIpsumˇDolor", cx);
+
+ // Word boundaries are still respected
+ assert("\nˇ loremˇ", cx);
+ assert(" ˇloremˇ", cx);
+ assert(" lorˇemˇ", cx);
+ assert(" loremˇ ˇ\nipsum\n", cx);
+ assert("\nˇ\nˇ\n\n", cx);
+ assert("loremˇ ipsumˇ ", cx);
+ assert("loremˇ-ˇipsum", cx);
+ assert("loremˇ#$@-ˇipsum", cx);
+ assert("loremˇ_ipsumˇ", cx);
+ assert(" ˇbcˇΔ", cx);
+ assert(" abˇ——ˇcd", cx);
+ }
+
+ #[gpui::test]
+ fn test_find_boundary(cx: &mut gpui::AppContext) {
+ init_test(cx);
+
+ fn assert(
+ marked_text: &str,
+ cx: &mut gpui::AppContext,
+ is_boundary: impl FnMut(char, char) -> bool,
+ ) {
+ let (snapshot, display_points) = marked_display_snapshot(marked_text, cx);
+ assert_eq!(
+ find_boundary(
+ &snapshot,
+ display_points[0],
+ FindRange::MultiLine,
+ is_boundary
+ ),
+ display_points[1]
+ );
+ }
+
+ assert("abcˇdef\ngh\nijˇk", cx, |left, right| {
+ left == 'j' && right == 'k'
+ });
+ assert("abˇcdef\ngh\nˇijk", cx, |left, right| {
+ left == '\n' && right == 'i'
+ });
+ let mut line_count = 0;
+ assert("abcˇdef\ngh\nˇijk", cx, |left, _| {
+ if left == '\n' {
+ line_count += 1;
+ line_count == 2
+ } else {
+ false
+ }
+ });
+ }
+
+ #[gpui::test]
+ fn test_surrounding_word(cx: &mut gpui::AppContext) {
+ init_test(cx);
+
+ fn assert(marked_text: &str, cx: &mut gpui::AppContext) {
+ let (snapshot, display_points) = marked_display_snapshot(marked_text, cx);
+ assert_eq!(
+ surrounding_word(&snapshot, display_points[1]),
+ display_points[0]..display_points[2],
+ "{}",
+ marked_text.to_string()
+ );
+ }
+
+ assert("ˇˇloremˇ ipsum", cx);
+ assert("ˇloˇremˇ ipsum", cx);
+ assert("ˇloremˇˇ ipsum", cx);
+ assert("loremˇ ˇ ˇipsum", cx);
+ assert("lorem\nˇˇˇ\nipsum", cx);
+ assert("lorem\nˇˇipsumˇ", cx);
+ assert("loremˇ,ˇˇ ipsum", cx);
+ assert("ˇloremˇˇ, ipsum", cx);
+ }
+
+ #[gpui::test]
+ async fn test_move_up_and_down_with_excerpts(cx: &mut gpui::TestAppContext) {
+ cx.update(|cx| {
+ init_test(cx);
+ });
+
+ let mut cx = EditorTestContext::new(cx).await;
+ let editor = cx.editor.clone();
+ let window = cx.window.clone();
+ cx.update_window(window, |_, cx| {
+ let text_layout_details =
+ editor.update(cx, |editor, cx| editor.text_layout_details(cx));
+
+ let font = font("Helvetica");
+
+ let buffer = cx
+ .build_model(|cx| Buffer::new(0, cx.entity_id().as_u64(), "abc\ndefg\nhijkl\nmn"));
+ let multibuffer = cx.build_model(|cx| {
+ let mut multibuffer = MultiBuffer::new(0);
+ multibuffer.push_excerpts(
+ buffer.clone(),
+ [
+ ExcerptRange {
+ context: Point::new(0, 0)..Point::new(1, 4),
+ primary: None,
+ },
+ ExcerptRange {
+ context: Point::new(2, 0)..Point::new(3, 2),
+ primary: None,
+ },
+ ],
+ cx,
+ );
+ multibuffer
+ });
+ let display_map =
+ cx.build_model(|cx| DisplayMap::new(multibuffer, font, px(14.0), None, 2, 2, cx));
+ let snapshot = display_map.update(cx, |map, cx| map.snapshot(cx));
+
+ assert_eq!(snapshot.text(), "\n\nabc\ndefg\n\n\nhijkl\nmn");
+
+ let col_2_x =
+ snapshot.x_for_display_point(DisplayPoint::new(2, 2), &text_layout_details);
+
+ // Can't move up into the first excerpt's header
+ assert_eq!(
+ up(
+ &snapshot,
+ DisplayPoint::new(2, 2),
+ SelectionGoal::HorizontalPosition(col_2_x.0),
+ false,
+ &text_layout_details
+ ),
+ (
+ DisplayPoint::new(2, 0),
+ SelectionGoal::HorizontalPosition(0.0)
+ ),
+ );
+ assert_eq!(
+ up(
+ &snapshot,
+ DisplayPoint::new(2, 0),
+ SelectionGoal::None,
+ false,
+ &text_layout_details
+ ),
+ (
+ DisplayPoint::new(2, 0),
+ SelectionGoal::HorizontalPosition(0.0)
+ ),
+ );
+
+ let col_4_x =
+ snapshot.x_for_display_point(DisplayPoint::new(3, 4), &text_layout_details);
+
+ // Move up and down within first excerpt
+ assert_eq!(
+ up(
+ &snapshot,
+ DisplayPoint::new(3, 4),
+ SelectionGoal::HorizontalPosition(col_4_x.0),
+ false,
+ &text_layout_details
+ ),
+ (
+ DisplayPoint::new(2, 3),
+ SelectionGoal::HorizontalPosition(col_4_x.0)
+ ),
+ );
+ assert_eq!(
+ down(
+ &snapshot,
+ DisplayPoint::new(2, 3),
+ SelectionGoal::HorizontalPosition(col_4_x.0),
+ false,
+ &text_layout_details
+ ),
+ (
+ DisplayPoint::new(3, 4),
+ SelectionGoal::HorizontalPosition(col_4_x.0)
+ ),
+ );
+
+ let col_5_x =
+ snapshot.x_for_display_point(DisplayPoint::new(6, 5), &text_layout_details);
+
+ // Move up and down across second excerpt's header
+ assert_eq!(
+ up(
+ &snapshot,
+ DisplayPoint::new(6, 5),
+ SelectionGoal::HorizontalPosition(col_5_x.0),
+ false,
+ &text_layout_details
+ ),
+ (
+ DisplayPoint::new(3, 4),
+ SelectionGoal::HorizontalPosition(col_5_x.0)
+ ),
+ );
+ assert_eq!(
+ down(
+ &snapshot,
+ DisplayPoint::new(3, 4),
+ SelectionGoal::HorizontalPosition(col_5_x.0),
+ false,
+ &text_layout_details
+ ),
+ (
+ DisplayPoint::new(6, 5),
+ SelectionGoal::HorizontalPosition(col_5_x.0)
+ ),
+ );
+
+ let max_point_x =
+ snapshot.x_for_display_point(DisplayPoint::new(7, 2), &text_layout_details);
+
+ // Can't move down off the end
+ assert_eq!(
+ down(
+ &snapshot,
+ DisplayPoint::new(7, 0),
+ SelectionGoal::HorizontalPosition(0.0),
+ false,
+ &text_layout_details
+ ),
+ (
+ DisplayPoint::new(7, 2),
+ SelectionGoal::HorizontalPosition(max_point_x.0)
+ ),
+ );
+ assert_eq!(
+ down(
+ &snapshot,
+ DisplayPoint::new(7, 2),
+ SelectionGoal::HorizontalPosition(max_point_x.0),
+ false,
+ &text_layout_details
+ ),
+ (
+ DisplayPoint::new(7, 2),
+ SelectionGoal::HorizontalPosition(max_point_x.0)
+ ),
+ );
+ });
+ }
+
+ fn init_test(cx: &mut gpui::AppContext) {
+ let settings_store = SettingsStore::test(cx);
+ cx.set_global(settings_store);
+ theme::init(theme::LoadThemes::JustBase, cx);
+ language::init(cx);
+ crate::init(cx);
+ Project::init_settings(cx);
+ }
+}
@@ -358,7 +358,7 @@ impl AppContext {
{
let entity_id = entity.entity_id();
let handle = entity.downgrade();
- self.observers.insert(
+ let (subscription, activate) = self.observers.insert(
entity_id,
Box::new(move |cx| {
if let Some(handle) = E::upgrade_from(&handle) {
@@ -367,7 +367,9 @@ impl AppContext {
false
}
}),
- )
+ );
+ self.defer(move |_| activate());
+ subscription
}
pub fn subscribe<T, E, Evt>(
@@ -398,8 +400,7 @@ impl AppContext {
{
let entity_id = entity.entity_id();
let entity = entity.downgrade();
-
- self.event_listeners.insert(
+ let (subscription, activate) = self.event_listeners.insert(
entity_id,
(
TypeId::of::<Evt>(),
@@ -412,7 +413,9 @@ impl AppContext {
}
}),
),
- )
+ );
+ self.defer(move |_| activate());
+ subscription
}
pub fn windows(&self) -> Vec<AnyWindowHandle> {
@@ -873,13 +876,15 @@ impl AppContext {
&mut self,
mut f: impl FnMut(&mut Self) + 'static,
) -> Subscription {
- self.global_observers.insert(
+ let (subscription, activate) = self.global_observers.insert(
TypeId::of::<G>(),
Box::new(move |cx| {
f(cx);
true
}),
- )
+ );
+ self.defer(move |_| activate());
+ subscription
}
/// Move the global of the given type to the stack.
@@ -903,7 +908,7 @@ impl AppContext {
&mut self,
on_new: impl 'static + Fn(&mut V, &mut ViewContext<V>),
) -> Subscription {
- self.new_view_observers.insert(
+ let (subscription, activate) = self.new_view_observers.insert(
TypeId::of::<V>(),
Box::new(move |any_view: AnyView, cx: &mut WindowContext| {
any_view
@@ -913,7 +918,9 @@ impl AppContext {
on_new(view_state, cx);
})
}),
- )
+ );
+ activate();
+ subscription
}
pub fn observe_release<E, T>(
@@ -925,13 +932,15 @@ impl AppContext {
E: Entity<T>,
T: 'static,
{
- self.release_listeners.insert(
+ let (subscription, activate) = self.release_listeners.insert(
handle.entity_id(),
Box::new(move |entity, cx| {
let entity = entity.downcast_mut().expect("invalid entity type");
on_release(entity, cx)
}),
- )
+ );
+ activate();
+ subscription
}
pub(crate) fn push_text_style(&mut self, text_style: TextStyleRefinement) {
@@ -996,13 +1005,15 @@ impl AppContext {
where
Fut: 'static + Future<Output = ()>,
{
- self.quit_observers.insert(
+ let (subscription, activate) = self.quit_observers.insert(
(),
Box::new(move |cx| {
let future = on_quit(cx);
async move { future.await }.boxed_local()
}),
- )
+ );
+ activate();
+ subscription
}
}
@@ -482,10 +482,6 @@ impl<T: 'static> WeakModel<T> {
/// Update the entity referenced by this model with the given function if
/// the referenced entity still exists. Returns an error if the entity has
/// been released.
- ///
- /// The update function receives a context appropriate for its environment.
- /// When updating in an `AppContext`, it receives a `ModelContext`.
- /// When updating an a `WindowContext`, it receives a `ViewContext`.
pub fn update<C, R>(
&self,
cx: &mut C,
@@ -501,6 +497,21 @@ impl<T: 'static> WeakModel<T> {
.map(|this| cx.update_model(&this, update)),
)
}
+
+ /// Reads the entity referenced by this model with the given function if
+ /// the referenced entity still exists. Returns an error if the entity has
+ /// been released.
+ pub fn read_with<C, R>(&self, cx: &C, read: impl FnOnce(&T, &AppContext) -> R) -> Result<R>
+ where
+ C: Context,
+ Result<C::Result<R>>: crate::Flatten<R>,
+ {
+ crate::Flatten::flatten(
+ self.upgrade()
+ .ok_or_else(|| anyhow!("entity release"))
+ .map(|this| cx.read_model(&this, read)),
+ )
+ }
}
impl<T> Hash for WeakModel<T> {
@@ -88,13 +88,15 @@ impl<'a, T: 'static> ModelContext<'a, T> {
where
T: 'static,
{
- self.app.release_listeners.insert(
+ let (subscription, activate) = self.app.release_listeners.insert(
self.model_state.entity_id,
Box::new(move |this, cx| {
let this = this.downcast_mut().expect("invalid entity type");
on_release(this, cx);
}),
- )
+ );
+ activate();
+ subscription
}
pub fn observe_release<T2, E>(
@@ -109,7 +111,7 @@ impl<'a, T: 'static> ModelContext<'a, T> {
{
let entity_id = entity.entity_id();
let this = self.weak_model();
- self.app.release_listeners.insert(
+ let (subscription, activate) = self.app.release_listeners.insert(
entity_id,
Box::new(move |entity, cx| {
let entity = entity.downcast_mut().expect("invalid entity type");
@@ -117,7 +119,9 @@ impl<'a, T: 'static> ModelContext<'a, T> {
this.update(cx, |this, cx| on_release(this, entity, cx));
}
}),
- )
+ );
+ activate();
+ subscription
}
pub fn observe_global<G: 'static>(
@@ -128,10 +132,12 @@ impl<'a, T: 'static> ModelContext<'a, T> {
T: 'static,
{
let handle = self.weak_model();
- self.global_observers.insert(
+ let (subscription, activate) = self.global_observers.insert(
TypeId::of::<G>(),
Box::new(move |cx| handle.update(cx, |view, cx| f(view, cx)).is_ok()),
- )
+ );
+ self.defer(move |_| activate());
+ subscription
}
pub fn on_app_quit<Fut>(
@@ -143,7 +149,7 @@ impl<'a, T: 'static> ModelContext<'a, T> {
T: 'static,
{
let handle = self.weak_model();
- self.app.quit_observers.insert(
+ let (subscription, activate) = self.app.quit_observers.insert(
(),
Box::new(move |cx| {
let future = handle.update(cx, |entity, cx| on_quit(entity, cx)).ok();
@@ -154,7 +160,9 @@ impl<'a, T: 'static> ModelContext<'a, T> {
}
.boxed_local()
}),
- )
+ );
+ activate();
+ subscription
}
pub fn notify(&mut self) {
@@ -1,13 +1,13 @@
use crate::{
div, Action, AnyView, AnyWindowHandle, AppCell, AppContext, AsyncAppContext,
- BackgroundExecutor, Context, Div, Entity, EventEmitter, ForegroundExecutor, InputEvent,
- KeyDownEvent, Keystroke, Model, ModelContext, Render, Result, Task, TestDispatcher,
- TestPlatform, TestWindow, TestWindowHandlers, View, ViewContext, VisualContext, WindowContext,
- WindowHandle, WindowOptions,
+ BackgroundExecutor, Bounds, Context, Div, Entity, EventEmitter, ForegroundExecutor, InputEvent,
+ KeyDownEvent, Keystroke, Model, ModelContext, Pixels, PlatformWindow, Point, Render, Result,
+ Size, Task, TestDispatcher, TestPlatform, TestWindow, TestWindowHandlers, View, ViewContext,
+ VisualContext, WindowBounds, WindowContext, WindowHandle, WindowOptions,
};
use anyhow::{anyhow, bail};
use futures::{Stream, StreamExt};
-use std::{future::Future, ops::Deref, rc::Rc, sync::Arc, time::Duration};
+use std::{future::Future, mem, ops::Deref, rc::Rc, sync::Arc, time::Duration};
#[derive(Clone)]
pub struct TestAppContext {
@@ -170,6 +170,45 @@ impl TestAppContext {
self.test_platform.has_pending_prompt()
}
+ pub fn simulate_window_resize(&self, window_handle: AnyWindowHandle, size: Size<Pixels>) {
+ let (mut handlers, scale_factor) = self
+ .app
+ .borrow_mut()
+ .update_window(window_handle, |_, cx| {
+ let platform_window = cx.window.platform_window.as_test().unwrap();
+ let scale_factor = platform_window.scale_factor();
+ match &mut platform_window.bounds {
+ WindowBounds::Fullscreen | WindowBounds::Maximized => {
+ platform_window.bounds = WindowBounds::Fixed(Bounds {
+ origin: Point::default(),
+ size: size.map(|pixels| f64::from(pixels).into()),
+ });
+ }
+ WindowBounds::Fixed(bounds) => {
+ bounds.size = size.map(|pixels| f64::from(pixels).into());
+ }
+ }
+
+ (
+ mem::take(&mut platform_window.handlers.lock().resize),
+ scale_factor,
+ )
+ })
+ .unwrap();
+
+ for handler in &mut handlers {
+ handler(size, scale_factor);
+ }
+
+ self.app
+ .borrow_mut()
+ .update_window(window_handle, |_, cx| {
+ let platform_window = cx.window.platform_window.as_test().unwrap();
+ platform_window.handlers.lock().resize = handlers;
+ })
+ .unwrap();
+ }
+
pub fn spawn<Fut, R>(&self, f: impl FnOnce(AsyncAppContext) -> Fut) -> Task<R>
where
Fut: Future<Output = R> + 'static,
@@ -343,12 +382,15 @@ impl TestAppContext {
use smol::future::FutureExt as _;
async {
- while notifications.next().await.is_some() {
+ loop {
if model.update(self, &mut predicate) {
return Ok(());
}
+
+ if notifications.next().await.is_none() {
+ bail!("model dropped")
+ }
}
- bail!("model dropped")
}
.race(timer.map(|_| Err(anyhow!("condition timed out"))))
.await
@@ -0,0 +1,48 @@
+use crate::{Bounds, Element, IntoElement, Pixels, StyleRefinement, Styled, WindowContext};
+
+pub fn canvas(callback: impl 'static + FnOnce(Bounds<Pixels>, &mut WindowContext)) -> Canvas {
+ Canvas {
+ paint_callback: Box::new(callback),
+ style: Default::default(),
+ }
+}
+
+pub struct Canvas {
+ paint_callback: Box<dyn FnOnce(Bounds<Pixels>, &mut WindowContext)>,
+ style: StyleRefinement,
+}
+
+impl IntoElement for Canvas {
+ type Element = Self;
+
+ fn element_id(&self) -> Option<crate::ElementId> {
+ None
+ }
+
+ fn into_element(self) -> Self::Element {
+ self
+ }
+}
+
+impl Element for Canvas {
+ type State = ();
+
+ fn layout(
+ &mut self,
+ _: Option<Self::State>,
+ cx: &mut WindowContext,
+ ) -> (crate::LayoutId, Self::State) {
+ let layout_id = cx.request_layout(&self.style.clone().into(), []);
+ (layout_id, ())
+ }
+
+ fn paint(self, bounds: Bounds<Pixels>, _: &mut (), cx: &mut WindowContext) {
+ (self.paint_callback)(bounds, cx)
+ }
+}
+
+impl Styled for Canvas {
+ fn style(&mut self) -> &mut crate::StyleRefinement {
+ &mut self.style
+ }
+}
@@ -221,20 +221,6 @@ pub trait InteractiveElement: Sized + Element {
/// Add a listener for the given action, fires during the bubble event phase
fn on_action<A: Action>(mut self, listener: impl Fn(&A, &mut WindowContext) + 'static) -> Self {
- // NOTE: this debug assert has the side-effect of working around
- // a bug where a crate consisting only of action definitions does
- // not register the actions in debug builds:
- //
- // https://github.com/rust-lang/rust/issues/47384
- // https://github.com/mmastrac/rust-ctor/issues/280
- //
- // if we are relying on this side-effect still, removing the debug_assert!
- // likely breaks the command_palette tests.
- // debug_assert!(
- // A::is_registered(),
- // "{:?} is not registered as an action",
- // A::qualified_name()
- // );
self.interactivity().action_listeners.push((
TypeId::of::<A>(),
Box::new(move |action, phase, cx| {
@@ -247,6 +233,23 @@ pub trait InteractiveElement: Sized + Element {
self
}
+ fn on_boxed_action(
+ mut self,
+ action: &Box<dyn Action>,
+ listener: impl Fn(&Box<dyn Action>, &mut WindowContext) + 'static,
+ ) -> Self {
+ let action = action.boxed_clone();
+ self.interactivity().action_listeners.push((
+ (*action).type_id(),
+ Box::new(move |_, phase, cx| {
+ if phase == DispatchPhase::Bubble {
+ (listener)(&action, cx)
+ }
+ }),
+ ));
+ self
+ }
+
fn on_key_down(
mut self,
listener: impl Fn(&KeyDownEvent, &mut WindowContext) + 'static,
@@ -1,3 +1,4 @@
+mod canvas;
mod div;
mod img;
mod overlay;
@@ -5,6 +6,7 @@ mod svg;
mod text;
mod uniform_list;
+pub use canvas::*;
pub use div::*;
pub use img::*;
pub use overlay::*;
@@ -128,11 +128,19 @@ impl BackgroundExecutor {
#[cfg(any(test, feature = "test-support"))]
#[track_caller]
pub fn block_test<R>(&self, future: impl Future<Output = R>) -> R {
- self.block_internal(false, future)
+ if let Ok(value) = self.block_internal(false, future, usize::MAX) {
+ value
+ } else {
+ unreachable!()
+ }
}
pub fn block<R>(&self, future: impl Future<Output = R>) -> R {
- self.block_internal(true, future)
+ if let Ok(value) = self.block_internal(true, future, usize::MAX) {
+ value
+ } else {
+ unreachable!()
+ }
}
#[track_caller]
@@ -140,7 +148,8 @@ impl BackgroundExecutor {
&self,
background_only: bool,
future: impl Future<Output = R>,
- ) -> R {
+ mut max_ticks: usize,
+ ) -> Result<R, ()> {
pin_mut!(future);
let unparker = self.dispatcher.unparker();
let awoken = Arc::new(AtomicBool::new(false));
@@ -156,8 +165,13 @@ impl BackgroundExecutor {
loop {
match future.as_mut().poll(&mut cx) {
- Poll::Ready(result) => return result,
+ Poll::Ready(result) => return Ok(result),
Poll::Pending => {
+ if max_ticks == 0 {
+ return Err(());
+ }
+ max_ticks -= 1;
+
if !self.dispatcher.tick(background_only) {
if awoken.swap(false, SeqCst) {
continue;
@@ -192,16 +206,25 @@ impl BackgroundExecutor {
return Err(future);
}
+ #[cfg(any(test, feature = "test-support"))]
+ let max_ticks = self
+ .dispatcher
+ .as_test()
+ .map_or(usize::MAX, |dispatcher| dispatcher.gen_block_on_ticks());
+ #[cfg(not(any(test, feature = "test-support")))]
+ let max_ticks = usize::MAX;
+
let mut timer = self.timer(duration).fuse();
+
let timeout = async {
futures::select_biased! {
value = future => Ok(value),
_ = timer => Err(()),
}
};
- match self.block(timeout) {
- Ok(value) => Ok(value),
- Err(_) => Err(future),
+ match self.block_internal(true, timeout, max_ticks) {
+ Ok(Ok(value)) => Ok(value),
+ _ => Err(future),
}
}
@@ -281,6 +304,11 @@ impl BackgroundExecutor {
pub fn is_main_thread(&self) -> bool {
self.dispatcher.is_main_thread()
}
+
+ #[cfg(any(test, feature = "test-support"))]
+ pub fn set_block_on_ticks(&self, range: std::ops::RangeInclusive<usize>) {
+ self.dispatcher.as_test().unwrap().set_block_on_ticks(range);
+ }
}
impl ForegroundExecutor {
@@ -21,7 +21,7 @@ mod subscription;
mod svg_renderer;
mod taffy;
#[cfg(any(test, feature = "test-support"))]
-mod test;
+pub mod test;
mod text_system;
mod util;
mod view;
@@ -16,7 +16,7 @@ pub struct DispatchNodeId(usize);
pub(crate) struct DispatchTree {
node_stack: Vec<DispatchNodeId>,
- context_stack: Vec<KeyContext>,
+ pub(crate) context_stack: Vec<KeyContext>,
nodes: Vec<DispatchNode>,
focusable_node_ids: HashMap<FocusId, DispatchNodeId>,
keystroke_matchers: HashMap<SmallVec<[KeyContext; 4]>, KeystrokeMatcher>,
@@ -163,11 +163,25 @@ impl DispatchTree {
actions
}
- pub fn bindings_for_action(&self, action: &dyn Action) -> Vec<KeyBinding> {
+ pub fn bindings_for_action(
+ &self,
+ action: &dyn Action,
+ context_stack: &Vec<KeyContext>,
+ ) -> Vec<KeyBinding> {
self.keymap
.lock()
.bindings_for_action(action.type_id())
- .filter(|candidate| candidate.action.partial_eq(action))
+ .filter(|candidate| {
+ if !candidate.action.partial_eq(action) {
+ return false;
+ }
+ for i in 1..context_stack.len() {
+ if candidate.matches_context(&context_stack[0..=i]) {
+ return true;
+ }
+ }
+ return false;
+ })
.cloned()
.collect()
}
@@ -44,7 +44,7 @@ pub(crate) fn current_platform() -> Rc<dyn Platform> {
Rc::new(MacPlatform::new())
}
-pub(crate) trait Platform: 'static {
+pub trait Platform: 'static {
fn background_executor(&self) -> BackgroundExecutor;
fn foreground_executor(&self) -> ForegroundExecutor;
fn text_system(&self) -> Arc<dyn PlatformTextSystem>;
@@ -128,7 +128,7 @@ impl Debug for DisplayId {
unsafe impl Send for DisplayId {}
-pub(crate) trait PlatformWindow {
+pub trait PlatformWindow {
fn bounds(&self) -> WindowBounds;
fn content_size(&self) -> Size<Pixels>;
fn scale_factor(&self) -> f32;
@@ -160,7 +160,7 @@ pub(crate) trait PlatformWindow {
fn sprite_atlas(&self) -> Arc<dyn PlatformAtlas>;
#[cfg(any(test, feature = "test-support"))]
- fn as_test(&self) -> Option<&TestWindow> {
+ fn as_test(&mut self) -> Option<&mut TestWindow> {
None
}
}
@@ -7,6 +7,7 @@ use parking_lot::Mutex;
use rand::prelude::*;
use std::{
future::Future,
+ ops::RangeInclusive,
pin::Pin,
sync::Arc,
task::{Context, Poll},
@@ -36,6 +37,7 @@ struct TestDispatcherState {
allow_parking: bool,
waiting_backtrace: Option<Backtrace>,
deprioritized_task_labels: HashSet<TaskLabel>,
+ block_on_ticks: RangeInclusive<usize>,
}
impl TestDispatcher {
@@ -53,6 +55,7 @@ impl TestDispatcher {
allow_parking: false,
waiting_backtrace: None,
deprioritized_task_labels: Default::default(),
+ block_on_ticks: 0..=1000,
};
TestDispatcher {
@@ -82,8 +85,8 @@ impl TestDispatcher {
}
pub fn simulate_random_delay(&self) -> impl 'static + Send + Future<Output = ()> {
- pub struct YieldNow {
- count: usize,
+ struct YieldNow {
+ pub(crate) count: usize,
}
impl Future for YieldNow {
@@ -142,6 +145,16 @@ impl TestDispatcher {
pub fn rng(&self) -> StdRng {
self.state.lock().random.clone()
}
+
+ pub fn set_block_on_ticks(&self, range: std::ops::RangeInclusive<usize>) {
+ self.state.lock().block_on_ticks = range;
+ }
+
+ pub fn gen_block_on_ticks(&self) -> usize {
+ let mut lock = self.state.lock();
+ let block_on_ticks = lock.block_on_ticks.clone();
+ lock.random.gen_range(block_on_ticks)
+ }
}
impl Clone for TestDispatcher {
@@ -19,7 +19,7 @@ pub(crate) struct TestWindowHandlers {
}
pub struct TestWindow {
- bounds: WindowBounds,
+ pub(crate) bounds: WindowBounds,
current_scene: Mutex<Option<Scene>>,
display: Rc<dyn PlatformDisplay>,
pub(crate) window_title: Option<String>,
@@ -170,7 +170,7 @@ impl PlatformWindow for TestWindow {
self.sprite_atlas.clone()
}
- fn as_test(&self) -> Option<&TestWindow> {
+ fn as_test(&mut self) -> Option<&mut TestWindow> {
Some(self)
}
}
@@ -198,7 +198,7 @@ impl SceneBuilder {
}
}
-pub(crate) struct Scene {
+pub struct Scene {
pub shadows: Vec<Shadow>,
pub quads: Vec<Quad>,
pub paths: Vec<Path<ScaledPixels>>,
@@ -214,7 +214,7 @@ impl Scene {
&self.paths
}
- pub fn batches(&self) -> impl Iterator<Item = PrimitiveBatch> {
+ pub(crate) fn batches(&self) -> impl Iterator<Item = PrimitiveBatch> {
BatchIterator {
shadows: &self.shadows,
shadows_start: 0,
@@ -208,8 +208,9 @@ impl TextStyle {
}
}
+ /// Returns the rounded line height in pixels.
pub fn line_height_in_pixels(&self, rem_size: Pixels) -> Pixels {
- self.line_height.to_pixels(self.font_size, rem_size)
+ self.line_height.to_pixels(self.font_size, rem_size).round()
}
pub fn to_run(&self, len: usize) -> TextRun {
@@ -1,6 +1,6 @@
use collections::{BTreeMap, BTreeSet};
use parking_lot::Mutex;
-use std::{fmt::Debug, mem, sync::Arc};
+use std::{cell::Cell, fmt::Debug, mem, rc::Rc, sync::Arc};
use util::post_inc;
pub(crate) struct SubscriberSet<EmitterKey, Callback>(
@@ -14,11 +14,16 @@ impl<EmitterKey, Callback> Clone for SubscriberSet<EmitterKey, Callback> {
}
struct SubscriberSetState<EmitterKey, Callback> {
- subscribers: BTreeMap<EmitterKey, Option<BTreeMap<usize, Callback>>>,
+ subscribers: BTreeMap<EmitterKey, Option<BTreeMap<usize, Subscriber<Callback>>>>,
dropped_subscribers: BTreeSet<(EmitterKey, usize)>,
next_subscriber_id: usize,
}
+struct Subscriber<Callback> {
+ active: Rc<Cell<bool>>,
+ callback: Callback,
+}
+
impl<EmitterKey, Callback> SubscriberSet<EmitterKey, Callback>
where
EmitterKey: 'static + Ord + Clone + Debug,
@@ -32,16 +37,33 @@ where
})))
}
- pub fn insert(&self, emitter_key: EmitterKey, callback: Callback) -> Subscription {
+ /// Inserts a new `[Subscription]` for the given `emitter_key`. By default, subscriptions
+ /// are inert, meaning that they won't be listed when calling `[SubscriberSet::remove]` or `[SubscriberSet::retain]`.
+ /// This method returns a tuple of a `[Subscription]` and an `impl FnOnce`, and you can use the latter
+ /// to activate the `[Subscription]`.
+ #[must_use]
+ pub fn insert(
+ &self,
+ emitter_key: EmitterKey,
+ callback: Callback,
+ ) -> (Subscription, impl FnOnce()) {
+ let active = Rc::new(Cell::new(false));
let mut lock = self.0.lock();
let subscriber_id = post_inc(&mut lock.next_subscriber_id);
lock.subscribers
.entry(emitter_key.clone())
.or_default()
.get_or_insert_with(|| Default::default())
- .insert(subscriber_id, callback);
+ .insert(
+ subscriber_id,
+ Subscriber {
+ active: active.clone(),
+ callback,
+ },
+ );
let this = self.0.clone();
- Subscription {
+
+ let subscription = Subscription {
unsubscribe: Some(Box::new(move || {
let mut lock = this.lock();
let Some(subscribers) = lock.subscribers.get_mut(&emitter_key) else {
@@ -63,7 +85,8 @@ where
lock.dropped_subscribers
.insert((emitter_key, subscriber_id));
})),
- }
+ };
+ (subscription, move || active.set(true))
}
pub fn remove(&self, emitter: &EmitterKey) -> impl IntoIterator<Item = Callback> {
@@ -73,6 +96,13 @@ where
.map(|s| s.into_values())
.into_iter()
.flatten()
+ .filter_map(|subscriber| {
+ if subscriber.active.get() {
+ Some(subscriber.callback)
+ } else {
+ None
+ }
+ })
}
/// Call the given callback for each subscriber to the given emitter.
@@ -91,7 +121,13 @@ where
return;
};
- subscribers.retain(|_, callback| f(callback));
+ subscribers.retain(|_, subscriber| {
+ if subscriber.active.get() {
+ f(&mut subscriber.callback)
+ } else {
+ true
+ }
+ });
let mut lock = self.0.lock();
// Add any new subscribers that were added while invoking the callback.
@@ -1,5 +1,7 @@
-use crate::TestDispatcher;
+use crate::{Entity, Subscription, TestAppContext, TestDispatcher};
+use futures::StreamExt as _;
use rand::prelude::*;
+use smol::channel;
use std::{
env,
panic::{self, RefUnwindSafe},
@@ -49,3 +51,30 @@ pub fn run_test(
}
}
}
+
+pub struct Observation<T> {
+ rx: channel::Receiver<T>,
+ _subscription: Subscription,
+}
+
+impl<T: 'static> futures::Stream for Observation<T> {
+ type Item = T;
+
+ fn poll_next(
+ mut self: std::pin::Pin<&mut Self>,
+ cx: &mut std::task::Context<'_>,
+ ) -> std::task::Poll<Option<Self::Item>> {
+ self.rx.poll_next_unpin(cx)
+ }
+}
+
+pub fn observe<T: 'static>(entity: &impl Entity<T>, cx: &mut TestAppContext) -> Observation<()> {
+ let (tx, rx) = smol::channel::unbounded();
+ let _subscription = cx.update(|cx| {
+ cx.observe(entity, move |_, _| {
+ let _ = smol::block_on(tx.send(()));
+ })
+ });
+
+ Observation { rx, _subscription }
+}
@@ -72,7 +72,7 @@ impl TextSystem {
}
}
- pub fn bounding_box(&self, font_id: FontId, font_size: Pixels) -> Result<Bounds<Pixels>> {
+ pub fn bounding_box(&self, font_id: FontId, font_size: Pixels) -> Bounds<Pixels> {
self.read_metrics(font_id, |metrics| metrics.bounding_box(font_size))
}
@@ -89,9 +89,9 @@ impl TextSystem {
let bounds = self
.platform_text_system
.typographic_bounds(font_id, glyph_id)?;
- self.read_metrics(font_id, |metrics| {
+ Ok(self.read_metrics(font_id, |metrics| {
(bounds / metrics.units_per_em as f32 * font_size.0).map(px)
- })
+ }))
}
pub fn advance(&self, font_id: FontId, font_size: Pixels, ch: char) -> Result<Size<Pixels>> {
@@ -100,28 +100,28 @@ impl TextSystem {
.glyph_for_char(font_id, ch)
.ok_or_else(|| anyhow!("glyph not found for character '{}'", ch))?;
let result = self.platform_text_system.advance(font_id, glyph_id)?
- / self.units_per_em(font_id)? as f32;
+ / self.units_per_em(font_id) as f32;
Ok(result * font_size)
}
- pub fn units_per_em(&self, font_id: FontId) -> Result<u32> {
+ pub fn units_per_em(&self, font_id: FontId) -> u32 {
self.read_metrics(font_id, |metrics| metrics.units_per_em as u32)
}
- pub fn cap_height(&self, font_id: FontId, font_size: Pixels) -> Result<Pixels> {
+ pub fn cap_height(&self, font_id: FontId, font_size: Pixels) -> Pixels {
self.read_metrics(font_id, |metrics| metrics.cap_height(font_size))
}
- pub fn x_height(&self, font_id: FontId, font_size: Pixels) -> Result<Pixels> {
+ pub fn x_height(&self, font_id: FontId, font_size: Pixels) -> Pixels {
self.read_metrics(font_id, |metrics| metrics.x_height(font_size))
}
- pub fn ascent(&self, font_id: FontId, font_size: Pixels) -> Result<Pixels> {
+ pub fn ascent(&self, font_id: FontId, font_size: Pixels) -> Pixels {
self.read_metrics(font_id, |metrics| metrics.ascent(font_size))
}
- pub fn descent(&self, font_id: FontId, font_size: Pixels) -> Result<Pixels> {
+ pub fn descent(&self, font_id: FontId, font_size: Pixels) -> Pixels {
self.read_metrics(font_id, |metrics| metrics.descent(font_size))
}
@@ -130,24 +130,24 @@ impl TextSystem {
font_id: FontId,
font_size: Pixels,
line_height: Pixels,
- ) -> Result<Pixels> {
- let ascent = self.ascent(font_id, font_size)?;
- let descent = self.descent(font_id, font_size)?;
+ ) -> Pixels {
+ let ascent = self.ascent(font_id, font_size);
+ let descent = self.descent(font_id, font_size);
let padding_top = (line_height - ascent - descent) / 2.;
- Ok(padding_top + ascent)
+ padding_top + ascent
}
- fn read_metrics<T>(&self, font_id: FontId, read: impl FnOnce(&FontMetrics) -> T) -> Result<T> {
+ fn read_metrics<T>(&self, font_id: FontId, read: impl FnOnce(&FontMetrics) -> T) -> T {
let lock = self.font_metrics.upgradable_read();
if let Some(metrics) = lock.get(&font_id) {
- Ok(read(metrics))
+ read(metrics)
} else {
let mut lock = RwLockUpgradableReadGuard::upgrade(lock);
let metrics = lock
.entry(font_id)
.or_insert_with(|| self.platform_text_system.font_metrics(font_id));
- Ok(read(metrics))
+ read(metrics)
}
}
@@ -101,9 +101,7 @@ fn paint_line(
let mut glyph_origin = origin;
let mut prev_glyph_position = Point::default();
for (run_ix, run) in layout.runs.iter().enumerate() {
- let max_glyph_size = text_system
- .bounding_box(run.font_id, layout.font_size)?
- .size;
+ let max_glyph_size = text_system.bounding_box(run.font_id, layout.font_size).size;
for (glyph_ix, glyph) in run.glyphs.iter().enumerate() {
glyph_origin.x += glyph.position.x - prev_glyph_position.x;
@@ -490,7 +490,7 @@ impl<'a> WindowContext<'a> {
let entity_id = entity.entity_id();
let entity = entity.downgrade();
let window_handle = self.window.handle;
- self.app.event_listeners.insert(
+ let (subscription, activate) = self.app.event_listeners.insert(
entity_id,
(
TypeId::of::<Evt>(),
@@ -508,7 +508,9 @@ impl<'a> WindowContext<'a> {
.unwrap_or(false)
}),
),
- )
+ );
+ self.app.defer(move |_| activate());
+ subscription
}
/// Create an `AsyncWindowContext`, which has a static lifetime and can be held across
@@ -1348,6 +1350,8 @@ impl<'a> WindowContext<'a> {
.dispatch_tree
.dispatch_path(node_id);
+ let mut actions: Vec<Box<dyn Action>> = Vec::new();
+
// Capture phase
let mut context_stack: SmallVec<[KeyContext; 16]> = SmallVec::new();
self.propagate_event = true;
@@ -1382,22 +1386,26 @@ impl<'a> WindowContext<'a> {
let node = self.window.current_frame.dispatch_tree.node(*node_id);
if !node.context.is_empty() {
if let Some(key_down_event) = event.downcast_ref::<KeyDownEvent>() {
- if let Some(action) = self
+ if let Some(found) = self
.window
.current_frame
.dispatch_tree
.dispatch_key(&key_down_event.keystroke, &context_stack)
{
- self.dispatch_action_on_node(*node_id, action);
- if !self.propagate_event {
- return;
- }
+ actions.push(found.boxed_clone())
}
}
context_stack.pop();
}
}
+
+ for action in actions {
+ self.dispatch_action_on_node(node_id, action);
+ if !self.propagate_event {
+ return;
+ }
+ }
}
}
@@ -1425,7 +1433,6 @@ impl<'a> WindowContext<'a> {
}
}
}
-
// Bubble phase
for node_id in dispatch_path.iter().rev() {
let node = self.window.current_frame.dispatch_tree.node(*node_id);
@@ -1453,10 +1460,12 @@ impl<'a> WindowContext<'a> {
f: impl Fn(&mut WindowContext<'_>) + 'static,
) -> Subscription {
let window_handle = self.window.handle;
- self.global_observers.insert(
+ let (subscription, activate) = self.global_observers.insert(
TypeId::of::<G>(),
Box::new(move |cx| window_handle.update(cx, |_, cx| f(cx)).is_ok()),
- )
+ );
+ self.app.defer(move |_| activate());
+ subscription
}
pub fn activate_window(&self) {
@@ -1493,9 +1502,30 @@ impl<'a> WindowContext<'a> {
pub fn bindings_for_action(&self, action: &dyn Action) -> Vec<KeyBinding> {
self.window
- .current_frame
+ .previous_frame
.dispatch_tree
- .bindings_for_action(action)
+ .bindings_for_action(
+ action,
+ &self.window.previous_frame.dispatch_tree.context_stack,
+ )
+ }
+
+ pub fn bindings_for_action_in(
+ &self,
+ action: &dyn Action,
+ focus_handle: &FocusHandle,
+ ) -> Vec<KeyBinding> {
+ let dispatch_tree = &self.window.previous_frame.dispatch_tree;
+
+ let Some(node_id) = dispatch_tree.focusable_node_id(focus_handle.id) else {
+ return vec![];
+ };
+ let context_stack = dispatch_tree
+ .dispatch_path(node_id)
+ .into_iter()
+ .map(|node_id| dispatch_tree.node(node_id).context.clone())
+ .collect();
+ dispatch_tree.bindings_for_action(action, &context_stack)
}
pub fn listener_for<V: Render, E>(
@@ -2096,7 +2126,7 @@ impl<'a, V: 'static> ViewContext<'a, V> {
let entity_id = entity.entity_id();
let entity = entity.downgrade();
let window_handle = self.window.handle;
- self.app.observers.insert(
+ let (subscription, activate) = self.app.observers.insert(
entity_id,
Box::new(move |cx| {
window_handle
@@ -2110,7 +2140,9 @@ impl<'a, V: 'static> ViewContext<'a, V> {
})
.unwrap_or(false)
}),
- )
+ );
+ self.app.defer(move |_| activate());
+ subscription
}
pub fn subscribe<V2, E, Evt>(
@@ -2127,7 +2159,7 @@ impl<'a, V: 'static> ViewContext<'a, V> {
let entity_id = entity.entity_id();
let handle = entity.downgrade();
let window_handle = self.window.handle;
- self.app.event_listeners.insert(
+ let (subscription, activate) = self.app.event_listeners.insert(
entity_id,
(
TypeId::of::<Evt>(),
@@ -2145,7 +2177,9 @@ impl<'a, V: 'static> ViewContext<'a, V> {
.unwrap_or(false)
}),
),
- )
+ );
+ self.app.defer(move |_| activate());
+ subscription
}
pub fn on_release(
@@ -2153,13 +2187,15 @@ impl<'a, V: 'static> ViewContext<'a, V> {
on_release: impl FnOnce(&mut V, &mut WindowContext) + 'static,
) -> Subscription {
let window_handle = self.window.handle;
- self.app.release_listeners.insert(
+ let (subscription, activate) = self.app.release_listeners.insert(
self.view.model.entity_id,
Box::new(move |this, cx| {
let this = this.downcast_mut().expect("invalid entity type");
let _ = window_handle.update(cx, |_, cx| on_release(this, cx));
}),
- )
+ );
+ activate();
+ subscription
}
pub fn observe_release<V2, E>(
@@ -2175,7 +2211,7 @@ impl<'a, V: 'static> ViewContext<'a, V> {
let view = self.view().downgrade();
let entity_id = entity.entity_id();
let window_handle = self.window.handle;
- self.app.release_listeners.insert(
+ let (subscription, activate) = self.app.release_listeners.insert(
entity_id,
Box::new(move |entity, cx| {
let entity = entity.downcast_mut().expect("invalid entity type");
@@ -2183,7 +2219,9 @@ impl<'a, V: 'static> ViewContext<'a, V> {
view.update(cx, |this, cx| on_release(this, entity, cx))
});
}),
- )
+ );
+ activate();
+ subscription
}
pub fn notify(&mut self) {
@@ -2198,10 +2236,12 @@ impl<'a, V: 'static> ViewContext<'a, V> {
mut callback: impl FnMut(&mut V, &mut ViewContext<V>) + 'static,
) -> Subscription {
let view = self.view.downgrade();
- self.window.bounds_observers.insert(
+ let (subscription, activate) = self.window.bounds_observers.insert(
(),
Box::new(move |cx| view.update(cx, |view, cx| callback(view, cx)).is_ok()),
- )
+ );
+ activate();
+ subscription
}
pub fn observe_window_activation(
@@ -2209,10 +2249,12 @@ impl<'a, V: 'static> ViewContext<'a, V> {
mut callback: impl FnMut(&mut V, &mut ViewContext<V>) + 'static,
) -> Subscription {
let view = self.view.downgrade();
- self.window.activation_observers.insert(
+ let (subscription, activate) = self.window.activation_observers.insert(
(),
Box::new(move |cx| view.update(cx, |view, cx| callback(view, cx)).is_ok()),
- )
+ );
+ activate();
+ subscription
}
/// Register a listener to be called when the given focus handle receives focus.
@@ -2225,7 +2267,7 @@ impl<'a, V: 'static> ViewContext<'a, V> {
) -> Subscription {
let view = self.view.downgrade();
let focus_id = handle.id;
- self.window.focus_listeners.insert(
+ let (subscription, activate) = self.window.focus_listeners.insert(
(),
Box::new(move |event, cx| {
view.update(cx, |view, cx| {
@@ -2235,7 +2277,9 @@ impl<'a, V: 'static> ViewContext<'a, V> {
})
.is_ok()
}),
- )
+ );
+ self.app.defer(move |_| activate());
+ subscription
}
/// Register a listener to be called when the given focus handle or one of its descendants receives focus.
@@ -2248,7 +2292,7 @@ impl<'a, V: 'static> ViewContext<'a, V> {
) -> Subscription {
let view = self.view.downgrade();
let focus_id = handle.id;
- self.window.focus_listeners.insert(
+ let (subscription, activate) = self.window.focus_listeners.insert(
(),
Box::new(move |event, cx| {
view.update(cx, |view, cx| {
@@ -2262,7 +2306,9 @@ impl<'a, V: 'static> ViewContext<'a, V> {
})
.is_ok()
}),
- )
+ );
+ self.app.defer(move |_| activate());
+ subscription
}
/// Register a listener to be called when the given focus handle loses focus.
@@ -2275,7 +2321,7 @@ impl<'a, V: 'static> ViewContext<'a, V> {
) -> Subscription {
let view = self.view.downgrade();
let focus_id = handle.id;
- self.window.focus_listeners.insert(
+ let (subscription, activate) = self.window.focus_listeners.insert(
(),
Box::new(move |event, cx| {
view.update(cx, |view, cx| {
@@ -2285,7 +2331,9 @@ impl<'a, V: 'static> ViewContext<'a, V> {
})
.is_ok()
}),
- )
+ );
+ self.app.defer(move |_| activate());
+ subscription
}
/// Register a listener to be called when the given focus handle or one of its descendants loses focus.
@@ -2298,7 +2346,7 @@ impl<'a, V: 'static> ViewContext<'a, V> {
) -> Subscription {
let view = self.view.downgrade();
let focus_id = handle.id;
- self.window.focus_listeners.insert(
+ let (subscription, activate) = self.window.focus_listeners.insert(
(),
Box::new(move |event, cx| {
view.update(cx, |view, cx| {
@@ -2312,7 +2360,9 @@ impl<'a, V: 'static> ViewContext<'a, V> {
})
.is_ok()
}),
- )
+ );
+ self.app.defer(move |_| activate());
+ subscription
}
pub fn spawn<Fut, R>(
@@ -2343,14 +2393,16 @@ impl<'a, V: 'static> ViewContext<'a, V> {
) -> Subscription {
let window_handle = self.window.handle;
let view = self.view().downgrade();
- self.global_observers.insert(
+ let (subscription, activate) = self.global_observers.insert(
TypeId::of::<G>(),
Box::new(move |cx| {
window_handle
.update(cx, |_, cx| view.update(cx, |view, cx| f(view, cx)).is_ok())
.unwrap_or(false)
}),
- )
+ );
+ self.app.defer(move |_| activate());
+ subscription
}
pub fn on_mouse_event<Event: 'static>(
@@ -2708,6 +2760,7 @@ pub enum ElementId {
Integer(usize),
Name(SharedString),
FocusHandle(FocusId),
+ NamedInteger(SharedString, usize),
}
impl ElementId {
@@ -2757,3 +2810,9 @@ impl<'a> From<&'a FocusHandle> for ElementId {
ElementId::FocusHandle(handle.id)
}
}
+
+impl From<(&'static str, EntityId)> for ElementId {
+ fn from((name, id): (&'static str, EntityId)) -> Self {
+ ElementId::NamedInteger(name.into(), id.as_u64() as usize)
+ }
+}
@@ -1121,20 +1121,22 @@ impl Project {
project_path: impl Into<ProjectPath>,
is_directory: bool,
cx: &mut ModelContext<Self>,
- ) -> Option<Task<Result<Entry>>> {
+ ) -> Task<Result<Option<Entry>>> {
let project_path = project_path.into();
- let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
+ let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) else {
+ return Task::ready(Ok(None));
+ };
if self.is_local() {
- Some(worktree.update(cx, |worktree, cx| {
+ worktree.update(cx, |worktree, cx| {
worktree
.as_local_mut()
.unwrap()
.create_entry(project_path.path, is_directory, cx)
- }))
+ })
} else {
let client = self.client.clone();
let project_id = self.remote_id().unwrap();
- Some(cx.spawn_weak(|_, mut cx| async move {
+ cx.spawn_weak(|_, mut cx| async move {
let response = client
.request(proto::CreateProjectEntry {
worktree_id: project_path.worktree_id.to_proto(),
@@ -1143,19 +1145,20 @@ impl Project {
is_directory,
})
.await?;
- let entry = response
- .entry
- .ok_or_else(|| anyhow!("missing entry in response"))?;
- worktree
- .update(&mut cx, |worktree, cx| {
- worktree.as_remote_mut().unwrap().insert_entry(
- entry,
- response.worktree_scan_id as usize,
- cx,
- )
- })
- .await
- }))
+ match response.entry {
+ Some(entry) => worktree
+ .update(&mut cx, |worktree, cx| {
+ worktree.as_remote_mut().unwrap().insert_entry(
+ entry,
+ response.worktree_scan_id as usize,
+ cx,
+ )
+ })
+ .await
+ .map(Some),
+ None => Ok(None),
+ }
+ })
}
}
@@ -1164,8 +1167,10 @@ impl Project {
entry_id: ProjectEntryId,
new_path: impl Into<Arc<Path>>,
cx: &mut ModelContext<Self>,
- ) -> Option<Task<Result<Entry>>> {
- let worktree = self.worktree_for_entry(entry_id, cx)?;
+ ) -> Task<Result<Option<Entry>>> {
+ let Some(worktree) = self.worktree_for_entry(entry_id, cx) else {
+ return Task::ready(Ok(None));
+ };
let new_path = new_path.into();
if self.is_local() {
worktree.update(cx, |worktree, cx| {
@@ -1178,7 +1183,7 @@ impl Project {
let client = self.client.clone();
let project_id = self.remote_id().unwrap();
- Some(cx.spawn_weak(|_, mut cx| async move {
+ cx.spawn_weak(|_, mut cx| async move {
let response = client
.request(proto::CopyProjectEntry {
project_id,
@@ -1186,19 +1191,20 @@ impl Project {
new_path: new_path.to_string_lossy().into(),
})
.await?;
- let entry = response
- .entry
- .ok_or_else(|| anyhow!("missing entry in response"))?;
- worktree
- .update(&mut cx, |worktree, cx| {
- worktree.as_remote_mut().unwrap().insert_entry(
- entry,
- response.worktree_scan_id as usize,
- cx,
- )
- })
- .await
- }))
+ match response.entry {
+ Some(entry) => worktree
+ .update(&mut cx, |worktree, cx| {
+ worktree.as_remote_mut().unwrap().insert_entry(
+ entry,
+ response.worktree_scan_id as usize,
+ cx,
+ )
+ })
+ .await
+ .map(Some),
+ None => Ok(None),
+ }
+ })
}
}
@@ -1207,8 +1213,10 @@ impl Project {
entry_id: ProjectEntryId,
new_path: impl Into<Arc<Path>>,
cx: &mut ModelContext<Self>,
- ) -> Option<Task<Result<Entry>>> {
- let worktree = self.worktree_for_entry(entry_id, cx)?;
+ ) -> Task<Result<Option<Entry>>> {
+ let Some(worktree) = self.worktree_for_entry(entry_id, cx) else {
+ return Task::ready(Ok(None));
+ };
let new_path = new_path.into();
if self.is_local() {
worktree.update(cx, |worktree, cx| {
@@ -1221,7 +1229,7 @@ impl Project {
let client = self.client.clone();
let project_id = self.remote_id().unwrap();
- Some(cx.spawn_weak(|_, mut cx| async move {
+ cx.spawn_weak(|_, mut cx| async move {
let response = client
.request(proto::RenameProjectEntry {
project_id,
@@ -1229,19 +1237,20 @@ impl Project {
new_path: new_path.to_string_lossy().into(),
})
.await?;
- let entry = response
- .entry
- .ok_or_else(|| anyhow!("missing entry in response"))?;
- worktree
- .update(&mut cx, |worktree, cx| {
- worktree.as_remote_mut().unwrap().insert_entry(
- entry,
- response.worktree_scan_id as usize,
- cx,
- )
- })
- .await
- }))
+ match response.entry {
+ Some(entry) => worktree
+ .update(&mut cx, |worktree, cx| {
+ worktree.as_remote_mut().unwrap().insert_entry(
+ entry,
+ response.worktree_scan_id as usize,
+ cx,
+ )
+ })
+ .await
+ .map(Some),
+ None => Ok(None),
+ }
+ })
}
}
@@ -1658,19 +1667,15 @@ impl Project {
pub fn open_path(
&mut self,
- path: impl Into<ProjectPath>,
+ path: ProjectPath,
cx: &mut ModelContext<Self>,
- ) -> Task<Result<(ProjectEntryId, AnyModelHandle)>> {
- let project_path = path.into();
- let task = self.open_buffer(project_path.clone(), cx);
+ ) -> Task<Result<(Option<ProjectEntryId>, AnyModelHandle)>> {
+ let task = self.open_buffer(path.clone(), cx);
cx.spawn_weak(|_, cx| async move {
let buffer = task.await?;
- let project_entry_id = buffer
- .read_with(&cx, |buffer, cx| {
- File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
- })
- .with_context(|| format!("no project entry for {project_path:?}"))?;
-
+ let project_entry_id = buffer.read_with(&cx, |buffer, cx| {
+ File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
+ });
let buffer: &AnyModelHandle = &buffer;
Ok((project_entry_id, buffer.clone()))
})
@@ -1985,8 +1990,10 @@ impl Project {
remote_id,
);
- self.local_buffer_ids_by_entry_id
- .insert(file.entry_id, remote_id);
+ if let Some(entry_id) = file.entry_id {
+ self.local_buffer_ids_by_entry_id
+ .insert(entry_id, remote_id);
+ }
}
}
@@ -2441,24 +2448,25 @@ impl Project {
return None;
};
- match self.local_buffer_ids_by_entry_id.get(&file.entry_id) {
- Some(_) => {
- return None;
- }
- None => {
- let remote_id = buffer.read(cx).remote_id();
- self.local_buffer_ids_by_entry_id
- .insert(file.entry_id, remote_id);
-
- self.local_buffer_ids_by_path.insert(
- ProjectPath {
- worktree_id: file.worktree_id(cx),
- path: file.path.clone(),
- },
- remote_id,
- );
+ let remote_id = buffer.read(cx).remote_id();
+ if let Some(entry_id) = file.entry_id {
+ match self.local_buffer_ids_by_entry_id.get(&entry_id) {
+ Some(_) => {
+ return None;
+ }
+ None => {
+ self.local_buffer_ids_by_entry_id
+ .insert(entry_id, remote_id);
+ }
}
- }
+ };
+ self.local_buffer_ids_by_path.insert(
+ ProjectPath {
+ worktree_id: file.worktree_id(cx),
+ path: file.path.clone(),
+ },
+ remote_id,
+ );
}
_ => {}
}
@@ -5776,11 +5784,6 @@ impl Project {
while let Some(ignored_abs_path) =
ignored_paths_to_process.pop_front()
{
- if !query.file_matches(Some(&ignored_abs_path))
- || snapshot.is_path_excluded(&ignored_abs_path)
- {
- continue;
- }
if let Some(fs_metadata) = fs
.metadata(&ignored_abs_path)
.await
@@ -5808,6 +5811,13 @@ impl Project {
}
}
} else if !fs_metadata.is_symlink {
+ if !query.file_matches(Some(&ignored_abs_path))
+ || snapshot.is_path_excluded(
+ ignored_entry.path.to_path_buf(),
+ )
+ {
+ continue;
+ }
let matches = if let Some(file) = fs
.open_sync(&ignored_abs_path)
.await
@@ -6208,10 +6218,13 @@ impl Project {
return;
}
- let new_file = if let Some(entry) = snapshot.entry_for_id(old_file.entry_id) {
+ let new_file = if let Some(entry) = old_file
+ .entry_id
+ .and_then(|entry_id| snapshot.entry_for_id(entry_id))
+ {
File {
is_local: true,
- entry_id: entry.id,
+ entry_id: Some(entry.id),
mtime: entry.mtime,
path: entry.path.clone(),
worktree: worktree_handle.clone(),
@@ -6220,7 +6233,7 @@ impl Project {
} else if let Some(entry) = snapshot.entry_for_path(old_file.path().as_ref()) {
File {
is_local: true,
- entry_id: entry.id,
+ entry_id: Some(entry.id),
mtime: entry.mtime,
path: entry.path.clone(),
worktree: worktree_handle.clone(),
@@ -6250,10 +6263,12 @@ impl Project {
);
}
- if new_file.entry_id != *entry_id {
+ if new_file.entry_id != Some(*entry_id) {
self.local_buffer_ids_by_entry_id.remove(entry_id);
- self.local_buffer_ids_by_entry_id
- .insert(new_file.entry_id, buffer_id);
+ if let Some(entry_id) = new_file.entry_id {
+ self.local_buffer_ids_by_entry_id
+ .insert(entry_id, buffer_id);
+ }
}
if new_file != *old_file {
@@ -6816,7 +6831,7 @@ impl Project {
})
.await?;
Ok(proto::ProjectEntryResponse {
- entry: Some((&entry).into()),
+ entry: entry.as_ref().map(|e| e.into()),
worktree_scan_id: worktree_scan_id as u64,
})
}
@@ -6840,11 +6855,10 @@ impl Project {
.as_local_mut()
.unwrap()
.rename_entry(entry_id, new_path, cx)
- .ok_or_else(|| anyhow!("invalid entry"))
- })?
+ })
.await?;
Ok(proto::ProjectEntryResponse {
- entry: Some((&entry).into()),
+ entry: entry.as_ref().map(|e| e.into()),
worktree_scan_id: worktree_scan_id as u64,
})
}
@@ -6868,11 +6882,10 @@ impl Project {
.as_local_mut()
.unwrap()
.copy_entry(entry_id, new_path, cx)
- .ok_or_else(|| anyhow!("invalid entry"))
- })?
+ })
.await?;
Ok(proto::ProjectEntryResponse {
- entry: Some((&entry).into()),
+ entry: entry.as_ref().map(|e| e.into()),
worktree_scan_id: worktree_scan_id as u64,
})
}
@@ -4050,6 +4050,94 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
);
}
+#[gpui::test]
+async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
+ init_test(cx);
+
+ let fs = FakeFs::new(cx.background());
+ fs.insert_tree(
+ "/dir",
+ json!({
+ ".git": {},
+ ".gitignore": "**/target\n/node_modules\n",
+ "target": {
+ "index.txt": "index_key:index_value"
+ },
+ "node_modules": {
+ "eslint": {
+ "index.ts": "const eslint_key = 'eslint value'",
+ "package.json": r#"{ "some_key": "some value" }"#,
+ },
+ "prettier": {
+ "index.ts": "const prettier_key = 'prettier value'",
+ "package.json": r#"{ "other_key": "other value" }"#,
+ },
+ },
+ "package.json": r#"{ "main_key": "main value" }"#,
+ }),
+ )
+ .await;
+ let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
+
+ let query = "key";
+ assert_eq!(
+ search(
+ &project,
+ SearchQuery::text(query, false, false, false, Vec::new(), Vec::new()).unwrap(),
+ cx
+ )
+ .await
+ .unwrap(),
+ HashMap::from_iter([("package.json".to_string(), vec![8..11])]),
+ "Only one non-ignored file should have the query"
+ );
+
+ assert_eq!(
+ search(
+ &project,
+ SearchQuery::text(query, false, false, true, Vec::new(), Vec::new()).unwrap(),
+ cx
+ )
+ .await
+ .unwrap(),
+ HashMap::from_iter([
+ ("package.json".to_string(), vec![8..11]),
+ ("target/index.txt".to_string(), vec![6..9]),
+ (
+ "node_modules/prettier/package.json".to_string(),
+ vec![9..12]
+ ),
+ ("node_modules/prettier/index.ts".to_string(), vec![15..18]),
+ ("node_modules/eslint/index.ts".to_string(), vec![13..16]),
+ ("node_modules/eslint/package.json".to_string(), vec![8..11]),
+ ]),
+ "Unrestricted search with ignored directories should find every file with the query"
+ );
+
+ assert_eq!(
+ search(
+ &project,
+ SearchQuery::text(
+ query,
+ false,
+ false,
+ true,
+ vec![PathMatcher::new("node_modules/prettier/**").unwrap()],
+ vec![PathMatcher::new("*.ts").unwrap()],
+ )
+ .unwrap(),
+ cx
+ )
+ .await
+ .unwrap(),
+ HashMap::from_iter([(
+ "node_modules/prettier/package.json".to_string(),
+ vec![9..12]
+ )]),
+ "With search including ignored prettier directory and excluding TS files, only one file should be found"
+ );
+}
+
#[test]
fn test_glob_literal_prefix() {
assert_eq!(glob_literal_prefix("**/*.js"), "");
@@ -371,15 +371,25 @@ impl SearchQuery {
pub fn file_matches(&self, file_path: Option<&Path>) -> bool {
match file_path {
Some(file_path) => {
- !self
- .files_to_exclude()
- .iter()
- .any(|exclude_glob| exclude_glob.is_match(file_path))
- && (self.files_to_include().is_empty()
+ let mut path = file_path.to_path_buf();
+ loop {
+ if self
+ .files_to_exclude()
+ .iter()
+ .any(|exclude_glob| exclude_glob.is_match(&path))
+ {
+ return false;
+ } else if self.files_to_include().is_empty()
|| self
.files_to_include()
.iter()
- .any(|include_glob| include_glob.is_match(file_path)))
+ .any(|include_glob| include_glob.is_match(&path))
+ {
+ return true;
+ } else if !path.pop() {
+ return false;
+ }
+ }
}
None => self.files_to_include().is_empty(),
}
@@ -960,8 +960,6 @@ impl LocalWorktree {
cx.spawn(|this, cx| async move {
let text = fs.load(&abs_path).await?;
- let entry = entry.await?;
-
let mut index_task = None;
let snapshot = this.read_with(&cx, |this, _| this.as_local().unwrap().snapshot());
if let Some(repo) = snapshot.repository_for_path(&path) {
@@ -981,18 +979,43 @@ impl LocalWorktree {
None
};
- Ok((
- File {
- entry_id: entry.id,
- worktree: this,
- path: entry.path,
- mtime: entry.mtime,
- is_local: true,
- is_deleted: false,
- },
- text,
- diff_base,
- ))
+ match entry.await? {
+ Some(entry) => Ok((
+ File {
+ entry_id: Some(entry.id),
+ worktree: this,
+ path: entry.path,
+ mtime: entry.mtime,
+ is_local: true,
+ is_deleted: false,
+ },
+ text,
+ diff_base,
+ )),
+ None => {
+ let metadata = fs
+ .metadata(&abs_path)
+ .await
+ .with_context(|| {
+ format!("Loading metadata for excluded file {abs_path:?}")
+ })?
+ .with_context(|| {
+ format!("Excluded file {abs_path:?} got removed during loading")
+ })?;
+ Ok((
+ File {
+ entry_id: None,
+ worktree: this,
+ path,
+ mtime: metadata.mtime,
+ is_local: true,
+ is_deleted: false,
+ },
+ text,
+ diff_base,
+ ))
+ }
+ }
})
}
@@ -1013,17 +1036,37 @@ impl LocalWorktree {
let text = buffer.as_rope().clone();
let fingerprint = text.fingerprint();
let version = buffer.version();
- let save = self.write_file(path, text, buffer.line_ending(), cx);
+ let save = self.write_file(path.as_ref(), text, buffer.line_ending(), cx);
+ let fs = Arc::clone(&self.fs);
+ let abs_path = self.absolutize(&path);
cx.as_mut().spawn(|mut cx| async move {
let entry = save.await?;
+ let (entry_id, mtime, path) = match entry {
+ Some(entry) => (Some(entry.id), entry.mtime, entry.path),
+ None => {
+ let metadata = fs
+ .metadata(&abs_path)
+ .await
+ .with_context(|| {
+ format!(
+ "Fetching metadata after saving the excluded buffer {abs_path:?}"
+ )
+ })?
+ .with_context(|| {
+ format!("Excluded buffer {path:?} got removed during saving")
+ })?;
+ (None, metadata.mtime, path)
+ }
+ };
+
if has_changed_file {
let new_file = Arc::new(File {
- entry_id: entry.id,
+ entry_id,
worktree: handle,
- path: entry.path,
- mtime: entry.mtime,
+ path,
+ mtime,
is_local: true,
is_deleted: false,
});
@@ -1049,13 +1092,13 @@ impl LocalWorktree {
project_id,
buffer_id,
version: serialize_version(&version),
- mtime: Some(entry.mtime.into()),
+ mtime: Some(mtime.into()),
fingerprint: serialize_fingerprint(fingerprint),
})?;
}
buffer_handle.update(&mut cx, |buffer, cx| {
- buffer.did_save(version.clone(), fingerprint, entry.mtime, cx);
+ buffer.did_save(version.clone(), fingerprint, mtime, cx);
});
Ok(())
@@ -1080,7 +1123,7 @@ impl LocalWorktree {
path: impl Into<Arc<Path>>,
is_dir: bool,
cx: &mut ModelContext<Worktree>,
- ) -> Task<Result<Entry>> {
+ ) -> Task<Result<Option<Entry>>> {
let path = path.into();
let lowest_ancestor = self.lowest_ancestor(&path);
let abs_path = self.absolutize(&path);
@@ -1097,7 +1140,7 @@ impl LocalWorktree {
cx.spawn(|this, mut cx| async move {
write.await?;
let (result, refreshes) = this.update(&mut cx, |this, cx| {
- let mut refreshes = Vec::<Task<anyhow::Result<Entry>>>::new();
+ let mut refreshes = Vec::new();
let refresh_paths = path.strip_prefix(&lowest_ancestor).unwrap();
for refresh_path in refresh_paths.ancestors() {
if refresh_path == Path::new("") {
@@ -1124,14 +1167,14 @@ impl LocalWorktree {
})
}
- pub fn write_file(
+ pub(crate) fn write_file(
&self,
path: impl Into<Arc<Path>>,
text: Rope,
line_ending: LineEnding,
cx: &mut ModelContext<Worktree>,
- ) -> Task<Result<Entry>> {
- let path = path.into();
+ ) -> Task<Result<Option<Entry>>> {
+ let path: Arc<Path> = path.into();
let abs_path = self.absolutize(&path);
let fs = self.fs.clone();
let write = cx
@@ -1190,8 +1233,11 @@ impl LocalWorktree {
entry_id: ProjectEntryId,
new_path: impl Into<Arc<Path>>,
cx: &mut ModelContext<Worktree>,
- ) -> Option<Task<Result<Entry>>> {
- let old_path = self.entry_for_id(entry_id)?.path.clone();
+ ) -> Task<Result<Option<Entry>>> {
+ let old_path = match self.entry_for_id(entry_id) {
+ Some(entry) => entry.path.clone(),
+ None => return Task::ready(Ok(None)),
+ };
let new_path = new_path.into();
let abs_old_path = self.absolutize(&old_path);
let abs_new_path = self.absolutize(&new_path);
@@ -1201,7 +1247,7 @@ impl LocalWorktree {
.await
});
- Some(cx.spawn(|this, mut cx| async move {
+ cx.spawn(|this, mut cx| async move {
rename.await?;
this.update(&mut cx, |this, cx| {
this.as_local_mut()
@@ -1209,7 +1255,7 @@ impl LocalWorktree {
.refresh_entry(new_path.clone(), Some(old_path), cx)
})
.await
- }))
+ })
}
pub fn copy_entry(
@@ -1217,8 +1263,11 @@ impl LocalWorktree {
entry_id: ProjectEntryId,
new_path: impl Into<Arc<Path>>,
cx: &mut ModelContext<Worktree>,
- ) -> Option<Task<Result<Entry>>> {
- let old_path = self.entry_for_id(entry_id)?.path.clone();
+ ) -> Task<Result<Option<Entry>>> {
+ let old_path = match self.entry_for_id(entry_id) {
+ Some(entry) => entry.path.clone(),
+ None => return Task::ready(Ok(None)),
+ };
let new_path = new_path.into();
let abs_old_path = self.absolutize(&old_path);
let abs_new_path = self.absolutize(&new_path);
@@ -1233,7 +1282,7 @@ impl LocalWorktree {
.await
});
- Some(cx.spawn(|this, mut cx| async move {
+ cx.spawn(|this, mut cx| async move {
copy.await?;
this.update(&mut cx, |this, cx| {
this.as_local_mut()
@@ -1241,7 +1290,7 @@ impl LocalWorktree {
.refresh_entry(new_path.clone(), None, cx)
})
.await
- }))
+ })
}
pub fn expand_entry(
@@ -1277,7 +1326,10 @@ impl LocalWorktree {
path: Arc<Path>,
old_path: Option<Arc<Path>>,
cx: &mut ModelContext<Worktree>,
- ) -> Task<Result<Entry>> {
+ ) -> Task<Result<Option<Entry>>> {
+ if self.is_path_excluded(path.to_path_buf()) {
+ return Task::ready(Ok(None));
+ }
let paths = if let Some(old_path) = old_path.as_ref() {
vec![old_path.clone(), path.clone()]
} else {
@@ -1286,13 +1338,15 @@ impl LocalWorktree {
let mut refresh = self.refresh_entries_for_paths(paths);
cx.spawn_weak(move |this, mut cx| async move {
refresh.recv().await;
- this.upgrade(&cx)
+ let new_entry = this
+ .upgrade(&cx)
.ok_or_else(|| anyhow!("worktree was dropped"))?
.update(&mut cx, |this, _| {
this.entry_for_path(path)
.cloned()
.ok_or_else(|| anyhow!("failed to read path after update"))
- })
+ })?;
+ Ok(Some(new_entry))
})
}
@@ -2226,10 +2280,19 @@ impl LocalSnapshot {
paths
}
- pub fn is_path_excluded(&self, abs_path: &Path) -> bool {
- self.file_scan_exclusions
- .iter()
- .any(|exclude_matcher| exclude_matcher.is_match(abs_path))
+ pub fn is_path_excluded(&self, mut path: PathBuf) -> bool {
+ loop {
+ if self
+ .file_scan_exclusions
+ .iter()
+ .any(|exclude_matcher| exclude_matcher.is_match(&path))
+ {
+ return true;
+ }
+ if !path.pop() {
+ return false;
+ }
+ }
}
}
@@ -2458,8 +2521,7 @@ impl BackgroundScannerState {
ids_to_preserve.insert(work_directory_id);
} else {
let git_dir_abs_path = snapshot.abs_path().join(&entry.git_dir_path);
- let git_dir_excluded = snapshot.is_path_excluded(&entry.git_dir_path)
- || snapshot.is_path_excluded(&git_dir_abs_path);
+ let git_dir_excluded = snapshot.is_path_excluded(entry.git_dir_path.to_path_buf());
if git_dir_excluded
&& !matches!(smol::block_on(fs.metadata(&git_dir_abs_path)), Ok(None))
{
@@ -2666,7 +2728,7 @@ pub struct File {
pub worktree: ModelHandle<Worktree>,
pub path: Arc<Path>,
pub mtime: SystemTime,
- pub(crate) entry_id: ProjectEntryId,
+ pub(crate) entry_id: Option<ProjectEntryId>,
pub(crate) is_local: bool,
pub(crate) is_deleted: bool,
}
@@ -2735,7 +2797,7 @@ impl language::File for File {
fn to_proto(&self) -> rpc::proto::File {
rpc::proto::File {
worktree_id: self.worktree.id() as u64,
- entry_id: self.entry_id.to_proto(),
+ entry_id: self.entry_id.map(|id| id.to_proto()),
path: self.path.to_string_lossy().into(),
mtime: Some(self.mtime.into()),
is_deleted: self.is_deleted,
@@ -2793,7 +2855,7 @@ impl File {
worktree,
path: entry.path.clone(),
mtime: entry.mtime,
- entry_id: entry.id,
+ entry_id: Some(entry.id),
is_local: true,
is_deleted: false,
})
@@ -2818,7 +2880,7 @@ impl File {
worktree,
path: Path::new(&proto.path).into(),
mtime: proto.mtime.ok_or_else(|| anyhow!("no timestamp"))?.into(),
- entry_id: ProjectEntryId::from_proto(proto.entry_id),
+ entry_id: proto.entry_id.map(ProjectEntryId::from_proto),
is_local: false,
is_deleted: proto.is_deleted,
})
@@ -2836,7 +2898,7 @@ impl File {
if self.is_deleted {
None
} else {
- Some(self.entry_id)
+ self.entry_id
}
}
}
@@ -3338,16 +3400,7 @@ impl BackgroundScanner {
return false;
}
- // FS events may come for files which parent directory is excluded, need to check ignore those.
- let mut path_to_test = abs_path.clone();
- let mut excluded_file_event = snapshot.is_path_excluded(abs_path)
- || snapshot.is_path_excluded(&relative_path);
- while !excluded_file_event && path_to_test.pop() {
- if snapshot.is_path_excluded(&path_to_test) {
- excluded_file_event = true;
- }
- }
- if excluded_file_event {
+ if snapshot.is_path_excluded(relative_path.to_path_buf()) {
if !is_git_related {
log::debug!("ignoring FS event for excluded path {relative_path:?}");
}
@@ -3531,7 +3584,7 @@ impl BackgroundScanner {
let state = self.state.lock();
let snapshot = &state.snapshot;
root_abs_path = snapshot.abs_path().clone();
- if snapshot.is_path_excluded(&job.abs_path) {
+ if snapshot.is_path_excluded(job.path.to_path_buf()) {
log::error!("skipping excluded directory {:?}", job.path);
return Ok(());
}
@@ -3603,8 +3656,8 @@ impl BackgroundScanner {
{
let mut state = self.state.lock();
- if state.snapshot.is_path_excluded(&child_abs_path) {
- let relative_path = job.path.join(child_name);
+ let relative_path = job.path.join(child_name);
+ if state.snapshot.is_path_excluded(relative_path.clone()) {
log::debug!("skipping excluded child entry {relative_path:?}");
state.remove_path(&relative_path);
continue;
@@ -1052,11 +1052,12 @@ async fn test_fs_events_in_exclusions(cx: &mut TestAppContext) {
&[
".git/HEAD",
".git/foo",
+ "node_modules",
"node_modules/.DS_Store",
"node_modules/prettier",
"node_modules/prettier/package.json",
],
- &["target", "node_modules"],
+ &["target"],
&[
".DS_Store",
"src/.DS_Store",
@@ -1106,6 +1107,7 @@ async fn test_fs_events_in_exclusions(cx: &mut TestAppContext) {
".git/HEAD",
".git/foo",
".git/new_file",
+ "node_modules",
"node_modules/.DS_Store",
"node_modules/prettier",
"node_modules/prettier/package.json",
@@ -1114,7 +1116,7 @@ async fn test_fs_events_in_exclusions(cx: &mut TestAppContext) {
"build_output/new_file",
"test_output/new_file",
],
- &["target", "node_modules", "test_output"],
+ &["target", "test_output"],
&[
".DS_Store",
"src/.DS_Store",
@@ -1174,6 +1176,7 @@ async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
.create_entry("a/e".as_ref(), true, cx)
})
.await
+ .unwrap()
.unwrap();
assert!(entry.is_dir());
@@ -1222,6 +1225,7 @@ async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
.create_entry("a/b/c/d.txt".as_ref(), false, cx)
})
.await
+ .unwrap()
.unwrap();
assert!(entry.is_file());
@@ -1257,6 +1261,7 @@ async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
.create_entry("a/b/c/d.txt".as_ref(), false, cx)
})
.await
+ .unwrap()
.unwrap();
assert!(entry.is_file());
@@ -1275,6 +1280,7 @@ async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
.create_entry("a/b/c/e.txt".as_ref(), false, cx)
})
.await
+ .unwrap()
.unwrap();
assert!(entry.is_file());
@@ -1291,6 +1297,7 @@ async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
.create_entry("d/e/f/g.txt".as_ref(), false, cx)
})
.await
+ .unwrap()
.unwrap();
assert!(entry.is_file());
@@ -1616,14 +1623,14 @@ fn randomly_mutate_worktree(
entry.id.0,
new_path
);
- let task = worktree.rename_entry(entry.id, new_path, cx).unwrap();
+ let task = worktree.rename_entry(entry.id, new_path, cx);
cx.foreground().spawn(async move {
- task.await?;
+ task.await?.unwrap();
Ok(())
})
}
_ => {
- let task = if entry.is_dir() {
+ if entry.is_dir() {
let child_path = entry.path.join(random_filename(rng));
let is_dir = rng.gen_bool(0.3);
log::info!(
@@ -1631,15 +1638,20 @@ fn randomly_mutate_worktree(
if is_dir { "dir" } else { "file" },
child_path,
);
- worktree.create_entry(child_path, is_dir, cx)
+ let task = worktree.create_entry(child_path, is_dir, cx);
+ cx.foreground().spawn(async move {
+ task.await?;
+ Ok(())
+ })
} else {
log::info!("overwriting file {:?} ({})", entry.path, entry.id.0);
- worktree.write_file(entry.path.clone(), "".into(), Default::default(), cx)
- };
- cx.foreground().spawn(async move {
- task.await?;
- Ok(())
- })
+ let task =
+ worktree.write_file(entry.path.clone(), "".into(), Default::default(), cx);
+ cx.foreground().spawn(async move {
+ task.await?;
+ Ok(())
+ })
+ }
}
}
}
@@ -1151,20 +1151,22 @@ impl Project {
project_path: impl Into<ProjectPath>,
is_directory: bool,
cx: &mut ModelContext<Self>,
- ) -> Option<Task<Result<Entry>>> {
+ ) -> Task<Result<Option<Entry>>> {
let project_path = project_path.into();
- let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
+ let Some(worktree) = self.worktree_for_id(project_path.worktree_id, cx) else {
+ return Task::ready(Ok(None));
+ };
if self.is_local() {
- Some(worktree.update(cx, |worktree, cx| {
+ worktree.update(cx, |worktree, cx| {
worktree
.as_local_mut()
.unwrap()
.create_entry(project_path.path, is_directory, cx)
- }))
+ })
} else {
let client = self.client.clone();
let project_id = self.remote_id().unwrap();
- Some(cx.spawn(move |_, mut cx| async move {
+ cx.spawn(move |_, mut cx| async move {
let response = client
.request(proto::CreateProjectEntry {
worktree_id: project_path.worktree_id.to_proto(),
@@ -1173,19 +1175,20 @@ impl Project {
is_directory,
})
.await?;
- let entry = response
- .entry
- .ok_or_else(|| anyhow!("missing entry in response"))?;
- worktree
- .update(&mut cx, |worktree, cx| {
- worktree.as_remote_mut().unwrap().insert_entry(
- entry,
- response.worktree_scan_id as usize,
- cx,
- )
- })?
- .await
- }))
+ match response.entry {
+ Some(entry) => worktree
+ .update(&mut cx, |worktree, cx| {
+ worktree.as_remote_mut().unwrap().insert_entry(
+ entry,
+ response.worktree_scan_id as usize,
+ cx,
+ )
+ })?
+ .await
+ .map(Some),
+ None => Ok(None),
+ }
+ })
}
}
@@ -1194,8 +1197,10 @@ impl Project {
entry_id: ProjectEntryId,
new_path: impl Into<Arc<Path>>,
cx: &mut ModelContext<Self>,
- ) -> Option<Task<Result<Entry>>> {
- let worktree = self.worktree_for_entry(entry_id, cx)?;
+ ) -> Task<Result<Option<Entry>>> {
+ let Some(worktree) = self.worktree_for_entry(entry_id, cx) else {
+ return Task::ready(Ok(None));
+ };
let new_path = new_path.into();
if self.is_local() {
worktree.update(cx, |worktree, cx| {
@@ -1208,7 +1213,7 @@ impl Project {
let client = self.client.clone();
let project_id = self.remote_id().unwrap();
- Some(cx.spawn(move |_, mut cx| async move {
+ cx.spawn(move |_, mut cx| async move {
let response = client
.request(proto::CopyProjectEntry {
project_id,
@@ -1216,19 +1221,20 @@ impl Project {
new_path: new_path.to_string_lossy().into(),
})
.await?;
- let entry = response
- .entry
- .ok_or_else(|| anyhow!("missing entry in response"))?;
- worktree
- .update(&mut cx, |worktree, cx| {
- worktree.as_remote_mut().unwrap().insert_entry(
- entry,
- response.worktree_scan_id as usize,
- cx,
- )
- })?
- .await
- }))
+ match response.entry {
+ Some(entry) => worktree
+ .update(&mut cx, |worktree, cx| {
+ worktree.as_remote_mut().unwrap().insert_entry(
+ entry,
+ response.worktree_scan_id as usize,
+ cx,
+ )
+ })?
+ .await
+ .map(Some),
+ None => Ok(None),
+ }
+ })
}
}
@@ -1237,8 +1243,10 @@ impl Project {
entry_id: ProjectEntryId,
new_path: impl Into<Arc<Path>>,
cx: &mut ModelContext<Self>,
- ) -> Option<Task<Result<Entry>>> {
- let worktree = self.worktree_for_entry(entry_id, cx)?;
+ ) -> Task<Result<Option<Entry>>> {
+ let Some(worktree) = self.worktree_for_entry(entry_id, cx) else {
+ return Task::ready(Ok(None));
+ };
let new_path = new_path.into();
if self.is_local() {
worktree.update(cx, |worktree, cx| {
@@ -1251,7 +1259,7 @@ impl Project {
let client = self.client.clone();
let project_id = self.remote_id().unwrap();
- Some(cx.spawn(move |_, mut cx| async move {
+ cx.spawn(move |_, mut cx| async move {
let response = client
.request(proto::RenameProjectEntry {
project_id,
@@ -1259,19 +1267,20 @@ impl Project {
new_path: new_path.to_string_lossy().into(),
})
.await?;
- let entry = response
- .entry
- .ok_or_else(|| anyhow!("missing entry in response"))?;
- worktree
- .update(&mut cx, |worktree, cx| {
- worktree.as_remote_mut().unwrap().insert_entry(
- entry,
- response.worktree_scan_id as usize,
- cx,
- )
- })?
- .await
- }))
+ match response.entry {
+ Some(entry) => worktree
+ .update(&mut cx, |worktree, cx| {
+ worktree.as_remote_mut().unwrap().insert_entry(
+ entry,
+ response.worktree_scan_id as usize,
+ cx,
+ )
+ })?
+ .await
+ .map(Some),
+ None => Ok(None),
+ }
+ })
}
}
@@ -1688,18 +1697,15 @@ impl Project {
pub fn open_path(
&mut self,
- path: impl Into<ProjectPath>,
+ path: ProjectPath,
cx: &mut ModelContext<Self>,
- ) -> Task<Result<(ProjectEntryId, AnyModel)>> {
- let project_path = path.into();
- let task = self.open_buffer(project_path.clone(), cx);
- cx.spawn(move |_, mut cx| async move {
+ ) -> Task<Result<(Option<ProjectEntryId>, AnyModel)>> {
+ let task = self.open_buffer(path.clone(), cx);
+ cx.spawn(move |_, cx| async move {
let buffer = task.await?;
- let project_entry_id = buffer
- .update(&mut cx, |buffer, cx| {
- File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
- })?
- .with_context(|| format!("no project entry for {project_path:?}"))?;
+ let project_entry_id = buffer.read_with(&cx, |buffer, cx| {
+ File::from_dyn(buffer.file()).and_then(|file| file.project_entry_id(cx))
+ })?;
let buffer: &AnyModel = &buffer;
Ok((project_entry_id, buffer.clone()))
@@ -2018,8 +2024,10 @@ impl Project {
remote_id,
);
- self.local_buffer_ids_by_entry_id
- .insert(file.entry_id, remote_id);
+ if let Some(entry_id) = file.entry_id {
+ self.local_buffer_ids_by_entry_id
+ .insert(entry_id, remote_id);
+ }
}
}
@@ -2474,24 +2482,25 @@ impl Project {
return None;
};
- match self.local_buffer_ids_by_entry_id.get(&file.entry_id) {
- Some(_) => {
- return None;
- }
- None => {
- let remote_id = buffer.read(cx).remote_id();
- self.local_buffer_ids_by_entry_id
- .insert(file.entry_id, remote_id);
-
- self.local_buffer_ids_by_path.insert(
- ProjectPath {
- worktree_id: file.worktree_id(cx),
- path: file.path.clone(),
- },
- remote_id,
- );
+ let remote_id = buffer.read(cx).remote_id();
+ if let Some(entry_id) = file.entry_id {
+ match self.local_buffer_ids_by_entry_id.get(&entry_id) {
+ Some(_) => {
+ return None;
+ }
+ None => {
+ self.local_buffer_ids_by_entry_id
+ .insert(entry_id, remote_id);
+ }
}
- }
+ };
+ self.local_buffer_ids_by_path.insert(
+ ProjectPath {
+ worktree_id: file.worktree_id(cx),
+ path: file.path.clone(),
+ },
+ remote_id,
+ );
}
_ => {}
}
@@ -5845,11 +5854,6 @@ impl Project {
while let Some(ignored_abs_path) =
ignored_paths_to_process.pop_front()
{
- if !query.file_matches(Some(&ignored_abs_path))
- || snapshot.is_path_excluded(&ignored_abs_path)
- {
- continue;
- }
if let Some(fs_metadata) = fs
.metadata(&ignored_abs_path)
.await
@@ -5877,6 +5881,13 @@ impl Project {
}
}
} else if !fs_metadata.is_symlink {
+ if !query.file_matches(Some(&ignored_abs_path))
+ || snapshot.is_path_excluded(
+ ignored_entry.path.to_path_buf(),
+ )
+ {
+ continue;
+ }
let matches = if let Some(file) = fs
.open_sync(&ignored_abs_path)
.await
@@ -6278,10 +6289,13 @@ impl Project {
return;
}
- let new_file = if let Some(entry) = snapshot.entry_for_id(old_file.entry_id) {
+ let new_file = if let Some(entry) = old_file
+ .entry_id
+ .and_then(|entry_id| snapshot.entry_for_id(entry_id))
+ {
File {
is_local: true,
- entry_id: entry.id,
+ entry_id: Some(entry.id),
mtime: entry.mtime,
path: entry.path.clone(),
worktree: worktree_handle.clone(),
@@ -6290,7 +6304,7 @@ impl Project {
} else if let Some(entry) = snapshot.entry_for_path(old_file.path().as_ref()) {
File {
is_local: true,
- entry_id: entry.id,
+ entry_id: Some(entry.id),
mtime: entry.mtime,
path: entry.path.clone(),
worktree: worktree_handle.clone(),
@@ -6320,10 +6334,12 @@ impl Project {
);
}
- if new_file.entry_id != *entry_id {
+ if new_file.entry_id != Some(*entry_id) {
self.local_buffer_ids_by_entry_id.remove(entry_id);
- self.local_buffer_ids_by_entry_id
- .insert(new_file.entry_id, buffer_id);
+ if let Some(entry_id) = new_file.entry_id {
+ self.local_buffer_ids_by_entry_id
+ .insert(entry_id, buffer_id);
+ }
}
if new_file != *old_file {
@@ -6890,7 +6906,7 @@ impl Project {
})?
.await?;
Ok(proto::ProjectEntryResponse {
- entry: Some((&entry).into()),
+ entry: entry.as_ref().map(|e| e.into()),
worktree_scan_id: worktree_scan_id as u64,
})
}
@@ -6914,11 +6930,10 @@ impl Project {
.as_local_mut()
.unwrap()
.rename_entry(entry_id, new_path, cx)
- .ok_or_else(|| anyhow!("invalid entry"))
- })??
+ })?
.await?;
Ok(proto::ProjectEntryResponse {
- entry: Some((&entry).into()),
+ entry: entry.as_ref().map(|e| e.into()),
worktree_scan_id: worktree_scan_id as u64,
})
}
@@ -6942,11 +6957,10 @@ impl Project {
.as_local_mut()
.unwrap()
.copy_entry(entry_id, new_path, cx)
- .ok_or_else(|| anyhow!("invalid entry"))
- })??
+ })?
.await?;
Ok(proto::ProjectEntryResponse {
- entry: Some((&entry).into()),
+ entry: entry.as_ref().map(|e| e.into()),
worktree_scan_id: worktree_scan_id as u64,
})
}
@@ -4182,6 +4182,94 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
);
}
+#[gpui::test]
+async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
+ init_test(cx);
+
+ let fs = FakeFs::new(cx.background_executor.clone());
+ fs.insert_tree(
+ "/dir",
+ json!({
+ ".git": {},
+ ".gitignore": "**/target\n/node_modules\n",
+ "target": {
+ "index.txt": "index_key:index_value"
+ },
+ "node_modules": {
+ "eslint": {
+ "index.ts": "const eslint_key = 'eslint value'",
+ "package.json": r#"{ "some_key": "some value" }"#,
+ },
+ "prettier": {
+ "index.ts": "const prettier_key = 'prettier value'",
+ "package.json": r#"{ "other_key": "other value" }"#,
+ },
+ },
+ "package.json": r#"{ "main_key": "main value" }"#,
+ }),
+ )
+ .await;
+ let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
+
+ let query = "key";
+ assert_eq!(
+ search(
+ &project,
+ SearchQuery::text(query, false, false, false, Vec::new(), Vec::new()).unwrap(),
+ cx
+ )
+ .await
+ .unwrap(),
+ HashMap::from_iter([("package.json".to_string(), vec![8..11])]),
+ "Only one non-ignored file should have the query"
+ );
+
+ assert_eq!(
+ search(
+ &project,
+ SearchQuery::text(query, false, false, true, Vec::new(), Vec::new()).unwrap(),
+ cx
+ )
+ .await
+ .unwrap(),
+ HashMap::from_iter([
+ ("package.json".to_string(), vec![8..11]),
+ ("target/index.txt".to_string(), vec![6..9]),
+ (
+ "node_modules/prettier/package.json".to_string(),
+ vec![9..12]
+ ),
+ ("node_modules/prettier/index.ts".to_string(), vec![15..18]),
+ ("node_modules/eslint/index.ts".to_string(), vec![13..16]),
+ ("node_modules/eslint/package.json".to_string(), vec![8..11]),
+ ]),
+ "Unrestricted search with ignored directories should find every file with the query"
+ );
+
+ assert_eq!(
+ search(
+ &project,
+ SearchQuery::text(
+ query,
+ false,
+ false,
+ true,
+ vec![PathMatcher::new("node_modules/prettier/**").unwrap()],
+ vec![PathMatcher::new("*.ts").unwrap()],
+ )
+ .unwrap(),
+ cx
+ )
+ .await
+ .unwrap(),
+ HashMap::from_iter([(
+ "node_modules/prettier/package.json".to_string(),
+ vec![9..12]
+ )]),
+ "With search including ignored prettier directory and excluding TS files, only one file should be found"
+ );
+}
+
#[test]
fn test_glob_literal_prefix() {
assert_eq!(glob_literal_prefix("**/*.js"), "");
@@ -371,15 +371,25 @@ impl SearchQuery {
pub fn file_matches(&self, file_path: Option<&Path>) -> bool {
match file_path {
Some(file_path) => {
- !self
- .files_to_exclude()
- .iter()
- .any(|exclude_glob| exclude_glob.is_match(file_path))
- && (self.files_to_include().is_empty()
+ let mut path = file_path.to_path_buf();
+ loop {
+ if self
+ .files_to_exclude()
+ .iter()
+ .any(|exclude_glob| exclude_glob.is_match(&path))
+ {
+ return false;
+ } else if self.files_to_include().is_empty()
|| self
.files_to_include()
.iter()
- .any(|include_glob| include_glob.is_match(file_path)))
+ .any(|include_glob| include_glob.is_match(&path))
+ {
+ return true;
+ } else if !path.pop() {
+ return false;
+ }
+ }
}
None => self.files_to_include().is_empty(),
}
@@ -958,8 +958,6 @@ impl LocalWorktree {
cx.spawn(|this, mut cx| async move {
let text = fs.load(&abs_path).await?;
- let entry = entry.await?;
-
let mut index_task = None;
let snapshot = this.update(&mut cx, |this, _| this.as_local().unwrap().snapshot())?;
if let Some(repo) = snapshot.repository_for_path(&path) {
@@ -982,18 +980,43 @@ impl LocalWorktree {
let worktree = this
.upgrade()
.ok_or_else(|| anyhow!("worktree was dropped"))?;
- Ok((
- File {
- entry_id: entry.id,
- worktree,
- path: entry.path,
- mtime: entry.mtime,
- is_local: true,
- is_deleted: false,
- },
- text,
- diff_base,
- ))
+ match entry.await? {
+ Some(entry) => Ok((
+ File {
+ entry_id: Some(entry.id),
+ worktree,
+ path: entry.path,
+ mtime: entry.mtime,
+ is_local: true,
+ is_deleted: false,
+ },
+ text,
+ diff_base,
+ )),
+ None => {
+ let metadata = fs
+ .metadata(&abs_path)
+ .await
+ .with_context(|| {
+ format!("Loading metadata for excluded file {abs_path:?}")
+ })?
+ .with_context(|| {
+ format!("Excluded file {abs_path:?} got removed during loading")
+ })?;
+ Ok((
+ File {
+ entry_id: None,
+ worktree,
+ path,
+ mtime: metadata.mtime,
+ is_local: true,
+ is_deleted: false,
+ },
+ text,
+ diff_base,
+ ))
+ }
+ }
})
}
@@ -1013,18 +1036,38 @@ impl LocalWorktree {
let text = buffer.as_rope().clone();
let fingerprint = text.fingerprint();
let version = buffer.version();
- let save = self.write_file(path, text, buffer.line_ending(), cx);
+ let save = self.write_file(path.as_ref(), text, buffer.line_ending(), cx);
+ let fs = Arc::clone(&self.fs);
+ let abs_path = self.absolutize(&path);
cx.spawn(move |this, mut cx| async move {
let entry = save.await?;
let this = this.upgrade().context("worktree dropped")?;
+ let (entry_id, mtime, path) = match entry {
+ Some(entry) => (Some(entry.id), entry.mtime, entry.path),
+ None => {
+ let metadata = fs
+ .metadata(&abs_path)
+ .await
+ .with_context(|| {
+ format!(
+ "Fetching metadata after saving the excluded buffer {abs_path:?}"
+ )
+ })?
+ .with_context(|| {
+ format!("Excluded buffer {path:?} got removed during saving")
+ })?;
+ (None, metadata.mtime, path)
+ }
+ };
+
if has_changed_file {
let new_file = Arc::new(File {
- entry_id: entry.id,
+ entry_id,
worktree: this,
- path: entry.path,
- mtime: entry.mtime,
+ path,
+ mtime,
is_local: true,
is_deleted: false,
});
@@ -1050,13 +1093,13 @@ impl LocalWorktree {
project_id,
buffer_id,
version: serialize_version(&version),
- mtime: Some(entry.mtime.into()),
+ mtime: Some(mtime.into()),
fingerprint: serialize_fingerprint(fingerprint),
})?;
}
buffer_handle.update(&mut cx, |buffer, cx| {
- buffer.did_save(version.clone(), fingerprint, entry.mtime, cx);
+ buffer.did_save(version.clone(), fingerprint, mtime, cx);
})?;
Ok(())
@@ -1081,7 +1124,7 @@ impl LocalWorktree {
path: impl Into<Arc<Path>>,
is_dir: bool,
cx: &mut ModelContext<Worktree>,
- ) -> Task<Result<Entry>> {
+ ) -> Task<Result<Option<Entry>>> {
let path = path.into();
let lowest_ancestor = self.lowest_ancestor(&path);
let abs_path = self.absolutize(&path);
@@ -1098,7 +1141,7 @@ impl LocalWorktree {
cx.spawn(|this, mut cx| async move {
write.await?;
let (result, refreshes) = this.update(&mut cx, |this, cx| {
- let mut refreshes = Vec::<Task<anyhow::Result<Entry>>>::new();
+ let mut refreshes = Vec::new();
let refresh_paths = path.strip_prefix(&lowest_ancestor).unwrap();
for refresh_path in refresh_paths.ancestors() {
if refresh_path == Path::new("") {
@@ -1125,14 +1168,14 @@ impl LocalWorktree {
})
}
- pub fn write_file(
+ pub(crate) fn write_file(
&self,
path: impl Into<Arc<Path>>,
text: Rope,
line_ending: LineEnding,
cx: &mut ModelContext<Worktree>,
- ) -> Task<Result<Entry>> {
- let path = path.into();
+ ) -> Task<Result<Option<Entry>>> {
+ let path: Arc<Path> = path.into();
let abs_path = self.absolutize(&path);
let fs = self.fs.clone();
let write = cx
@@ -1191,8 +1234,11 @@ impl LocalWorktree {
entry_id: ProjectEntryId,
new_path: impl Into<Arc<Path>>,
cx: &mut ModelContext<Worktree>,
- ) -> Option<Task<Result<Entry>>> {
- let old_path = self.entry_for_id(entry_id)?.path.clone();
+ ) -> Task<Result<Option<Entry>>> {
+ let old_path = match self.entry_for_id(entry_id) {
+ Some(entry) => entry.path.clone(),
+ None => return Task::ready(Ok(None)),
+ };
let new_path = new_path.into();
let abs_old_path = self.absolutize(&old_path);
let abs_new_path = self.absolutize(&new_path);
@@ -1202,7 +1248,7 @@ impl LocalWorktree {
.await
});
- Some(cx.spawn(|this, mut cx| async move {
+ cx.spawn(|this, mut cx| async move {
rename.await?;
this.update(&mut cx, |this, cx| {
this.as_local_mut()
@@ -1210,7 +1256,7 @@ impl LocalWorktree {
.refresh_entry(new_path.clone(), Some(old_path), cx)
})?
.await
- }))
+ })
}
pub fn copy_entry(
@@ -1218,8 +1264,11 @@ impl LocalWorktree {
entry_id: ProjectEntryId,
new_path: impl Into<Arc<Path>>,
cx: &mut ModelContext<Worktree>,
- ) -> Option<Task<Result<Entry>>> {
- let old_path = self.entry_for_id(entry_id)?.path.clone();
+ ) -> Task<Result<Option<Entry>>> {
+ let old_path = match self.entry_for_id(entry_id) {
+ Some(entry) => entry.path.clone(),
+ None => return Task::ready(Ok(None)),
+ };
let new_path = new_path.into();
let abs_old_path = self.absolutize(&old_path);
let abs_new_path = self.absolutize(&new_path);
@@ -1234,7 +1283,7 @@ impl LocalWorktree {
.await
});
- Some(cx.spawn(|this, mut cx| async move {
+ cx.spawn(|this, mut cx| async move {
copy.await?;
this.update(&mut cx, |this, cx| {
this.as_local_mut()
@@ -1242,7 +1291,7 @@ impl LocalWorktree {
.refresh_entry(new_path.clone(), None, cx)
})?
.await
- }))
+ })
}
pub fn expand_entry(
@@ -1278,7 +1327,10 @@ impl LocalWorktree {
path: Arc<Path>,
old_path: Option<Arc<Path>>,
cx: &mut ModelContext<Worktree>,
- ) -> Task<Result<Entry>> {
+ ) -> Task<Result<Option<Entry>>> {
+ if self.is_path_excluded(path.to_path_buf()) {
+ return Task::ready(Ok(None));
+ }
let paths = if let Some(old_path) = old_path.as_ref() {
vec![old_path.clone(), path.clone()]
} else {
@@ -1287,11 +1339,12 @@ impl LocalWorktree {
let mut refresh = self.refresh_entries_for_paths(paths);
cx.spawn(move |this, mut cx| async move {
refresh.recv().await;
- this.update(&mut cx, |this, _| {
+ let new_entry = this.update(&mut cx, |this, _| {
this.entry_for_path(path)
.cloned()
.ok_or_else(|| anyhow!("failed to read path after update"))
- })?
+ })??;
+ Ok(Some(new_entry))
})
}
@@ -2222,10 +2275,19 @@ impl LocalSnapshot {
paths
}
- pub fn is_path_excluded(&self, abs_path: &Path) -> bool {
- self.file_scan_exclusions
- .iter()
- .any(|exclude_matcher| exclude_matcher.is_match(abs_path))
+ pub fn is_path_excluded(&self, mut path: PathBuf) -> bool {
+ loop {
+ if self
+ .file_scan_exclusions
+ .iter()
+ .any(|exclude_matcher| exclude_matcher.is_match(&path))
+ {
+ return true;
+ }
+ if !path.pop() {
+ return false;
+ }
+ }
}
}
@@ -2455,8 +2517,7 @@ impl BackgroundScannerState {
ids_to_preserve.insert(work_directory_id);
} else {
let git_dir_abs_path = snapshot.abs_path().join(&entry.git_dir_path);
- let git_dir_excluded = snapshot.is_path_excluded(&entry.git_dir_path)
- || snapshot.is_path_excluded(&git_dir_abs_path);
+ let git_dir_excluded = snapshot.is_path_excluded(entry.git_dir_path.to_path_buf());
if git_dir_excluded
&& !matches!(smol::block_on(fs.metadata(&git_dir_abs_path)), Ok(None))
{
@@ -2663,7 +2724,7 @@ pub struct File {
pub worktree: Model<Worktree>,
pub path: Arc<Path>,
pub mtime: SystemTime,
- pub(crate) entry_id: ProjectEntryId,
+ pub(crate) entry_id: Option<ProjectEntryId>,
pub(crate) is_local: bool,
pub(crate) is_deleted: bool,
}
@@ -2732,7 +2793,7 @@ impl language::File for File {
fn to_proto(&self) -> rpc::proto::File {
rpc::proto::File {
worktree_id: self.worktree.entity_id().as_u64(),
- entry_id: self.entry_id.to_proto(),
+ entry_id: self.entry_id.map(|id| id.to_proto()),
path: self.path.to_string_lossy().into(),
mtime: Some(self.mtime.into()),
is_deleted: self.is_deleted,
@@ -2790,7 +2851,7 @@ impl File {
worktree,
path: entry.path.clone(),
mtime: entry.mtime,
- entry_id: entry.id,
+ entry_id: Some(entry.id),
is_local: true,
is_deleted: false,
})
@@ -2815,7 +2876,7 @@ impl File {
worktree,
path: Path::new(&proto.path).into(),
mtime: proto.mtime.ok_or_else(|| anyhow!("no timestamp"))?.into(),
- entry_id: ProjectEntryId::from_proto(proto.entry_id),
+ entry_id: proto.entry_id.map(ProjectEntryId::from_proto),
is_local: false,
is_deleted: proto.is_deleted,
})
@@ -2833,7 +2894,7 @@ impl File {
if self.is_deleted {
None
} else {
- Some(self.entry_id)
+ self.entry_id
}
}
}
@@ -3329,16 +3390,7 @@ impl BackgroundScanner {
return false;
}
- // FS events may come for files which parent directory is excluded, need to check ignore those.
- let mut path_to_test = abs_path.clone();
- let mut excluded_file_event = snapshot.is_path_excluded(abs_path)
- || snapshot.is_path_excluded(&relative_path);
- while !excluded_file_event && path_to_test.pop() {
- if snapshot.is_path_excluded(&path_to_test) {
- excluded_file_event = true;
- }
- }
- if excluded_file_event {
+ if snapshot.is_path_excluded(relative_path.to_path_buf()) {
if !is_git_related {
log::debug!("ignoring FS event for excluded path {relative_path:?}");
}
@@ -3522,7 +3574,7 @@ impl BackgroundScanner {
let state = self.state.lock();
let snapshot = &state.snapshot;
root_abs_path = snapshot.abs_path().clone();
- if snapshot.is_path_excluded(&job.abs_path) {
+ if snapshot.is_path_excluded(job.path.to_path_buf()) {
log::error!("skipping excluded directory {:?}", job.path);
return Ok(());
}
@@ -3593,9 +3645,9 @@ impl BackgroundScanner {
}
{
+ let relative_path = job.path.join(child_name);
let mut state = self.state.lock();
- if state.snapshot.is_path_excluded(&child_abs_path) {
- let relative_path = job.path.join(child_name);
+ if state.snapshot.is_path_excluded(relative_path.clone()) {
log::debug!("skipping excluded child entry {relative_path:?}");
state.remove_path(&relative_path);
continue;
@@ -1055,11 +1055,12 @@ async fn test_fs_events_in_exclusions(cx: &mut TestAppContext) {
&[
".git/HEAD",
".git/foo",
+ "node_modules",
"node_modules/.DS_Store",
"node_modules/prettier",
"node_modules/prettier/package.json",
],
- &["target", "node_modules"],
+ &["target"],
&[
".DS_Store",
"src/.DS_Store",
@@ -1109,6 +1110,7 @@ async fn test_fs_events_in_exclusions(cx: &mut TestAppContext) {
".git/HEAD",
".git/foo",
".git/new_file",
+ "node_modules",
"node_modules/.DS_Store",
"node_modules/prettier",
"node_modules/prettier/package.json",
@@ -1117,7 +1119,7 @@ async fn test_fs_events_in_exclusions(cx: &mut TestAppContext) {
"build_output/new_file",
"test_output/new_file",
],
- &["target", "node_modules", "test_output"],
+ &["target", "test_output"],
&[
".DS_Store",
"src/.DS_Store",
@@ -1177,6 +1179,7 @@ async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
.create_entry("a/e".as_ref(), true, cx)
})
.await
+ .unwrap()
.unwrap();
assert!(entry.is_dir());
@@ -1226,6 +1229,7 @@ async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
.create_entry("a/b/c/d.txt".as_ref(), false, cx)
})
.await
+ .unwrap()
.unwrap();
assert!(entry.is_file());
@@ -1261,6 +1265,7 @@ async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
.create_entry("a/b/c/d.txt".as_ref(), false, cx)
})
.await
+ .unwrap()
.unwrap();
assert!(entry.is_file());
@@ -1279,6 +1284,7 @@ async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
.create_entry("a/b/c/e.txt".as_ref(), false, cx)
})
.await
+ .unwrap()
.unwrap();
assert!(entry.is_file());
@@ -1295,6 +1301,7 @@ async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
.create_entry("d/e/f/g.txt".as_ref(), false, cx)
})
.await
+ .unwrap()
.unwrap();
assert!(entry.is_file());
@@ -1620,14 +1627,14 @@ fn randomly_mutate_worktree(
entry.id.0,
new_path
);
- let task = worktree.rename_entry(entry.id, new_path, cx).unwrap();
+ let task = worktree.rename_entry(entry.id, new_path, cx);
cx.background_executor().spawn(async move {
- task.await?;
+ task.await?.unwrap();
Ok(())
})
}
_ => {
- let task = if entry.is_dir() {
+ if entry.is_dir() {
let child_path = entry.path.join(random_filename(rng));
let is_dir = rng.gen_bool(0.3);
log::info!(
@@ -1635,15 +1642,20 @@ fn randomly_mutate_worktree(
if is_dir { "dir" } else { "file" },
child_path,
);
- worktree.create_entry(child_path, is_dir, cx)
+ let task = worktree.create_entry(child_path, is_dir, cx);
+ cx.background_executor().spawn(async move {
+ task.await?;
+ Ok(())
+ })
} else {
log::info!("overwriting file {:?} ({})", entry.path, entry.id.0);
- worktree.write_file(entry.path.clone(), "".into(), Default::default(), cx)
- };
- cx.background_executor().spawn(async move {
- task.await?;
- Ok(())
- })
+ let task =
+ worktree.write_file(entry.path.clone(), "".into(), Default::default(), cx);
+ cx.background_executor().spawn(async move {
+ task.await?;
+ Ok(())
+ })
+ }
}
}
}
@@ -621,7 +621,7 @@ impl ProjectPanel {
edited_entry_id = NEW_ENTRY_ID;
edit_task = self.project.update(cx, |project, cx| {
project.create_entry((worktree_id, &new_path), is_dir, cx)
- })?;
+ });
} else {
let new_path = if let Some(parent) = entry.path.clone().parent() {
parent.join(&filename)
@@ -635,7 +635,7 @@ impl ProjectPanel {
edited_entry_id = entry.id;
edit_task = self.project.update(cx, |project, cx| {
project.rename_entry(entry.id, new_path.as_path(), cx)
- })?;
+ });
};
edit_state.processing_filename = Some(filename);
@@ -648,21 +648,22 @@ impl ProjectPanel {
cx.notify();
})?;
- let new_entry = new_entry?;
- this.update(&mut cx, |this, cx| {
- if let Some(selection) = &mut this.selection {
- if selection.entry_id == edited_entry_id {
- selection.worktree_id = worktree_id;
- selection.entry_id = new_entry.id;
- this.expand_to_selection(cx);
+ if let Some(new_entry) = new_entry? {
+ this.update(&mut cx, |this, cx| {
+ if let Some(selection) = &mut this.selection {
+ if selection.entry_id == edited_entry_id {
+ selection.worktree_id = worktree_id;
+ selection.entry_id = new_entry.id;
+ this.expand_to_selection(cx);
+ }
}
- }
- this.update_visible_entries(None, cx);
- if is_new_entry && !is_dir {
- this.open_entry(new_entry.id, true, cx);
- }
- cx.notify();
- })?;
+ this.update_visible_entries(None, cx);
+ if is_new_entry && !is_dir {
+ this.open_entry(new_entry.id, true, cx);
+ }
+ cx.notify();
+ })?;
+ }
Ok(())
}))
}
@@ -935,15 +936,17 @@ impl ProjectPanel {
}
if clipboard_entry.is_cut() {
- if let Some(task) = self.project.update(cx, |project, cx| {
- project.rename_entry(clipboard_entry.entry_id(), new_path, cx)
- }) {
- task.detach_and_log_err(cx)
- }
- } else if let Some(task) = self.project.update(cx, |project, cx| {
- project.copy_entry(clipboard_entry.entry_id(), new_path, cx)
- }) {
- task.detach_and_log_err(cx)
+ self.project
+ .update(cx, |project, cx| {
+ project.rename_entry(clipboard_entry.entry_id(), new_path, cx)
+ })
+ .detach_and_log_err(cx)
+ } else {
+ self.project
+ .update(cx, |project, cx| {
+ project.copy_entry(clipboard_entry.entry_id(), new_path, cx)
+ })
+ .detach_and_log_err(cx)
}
}
None
@@ -1026,7 +1029,7 @@ impl ProjectPanel {
let mut new_path = destination_path.to_path_buf();
new_path.push(entry_path.path.file_name()?);
if new_path != entry_path.path.as_ref() {
- let task = project.rename_entry(entry_to_move, new_path, cx)?;
+ let task = project.rename_entry(entry_to_move, new_path, cx);
cx.foreground().spawn(task).detach_and_log_err(cx);
}
@@ -397,7 +397,6 @@ impl ProjectPanel {
menu = menu.action(
"Add Folder to Project",
Box::new(workspace::AddFolderToProject),
- cx,
);
if is_root {
menu = menu.entry(
@@ -412,35 +411,35 @@ impl ProjectPanel {
}
menu = menu
- .action("New File", Box::new(NewFile), cx)
- .action("New Folder", Box::new(NewDirectory), cx)
+ .action("New File", Box::new(NewFile))
+ .action("New Folder", Box::new(NewDirectory))
.separator()
- .action("Cut", Box::new(Cut), cx)
- .action("Copy", Box::new(Copy), cx);
+ .action("Cut", Box::new(Cut))
+ .action("Copy", Box::new(Copy));
if let Some(clipboard_entry) = self.clipboard_entry {
if clipboard_entry.worktree_id() == worktree_id {
- menu = menu.action("Paste", Box::new(Paste), cx);
+ menu = menu.action("Paste", Box::new(Paste));
}
}
menu = menu
.separator()
- .action("Copy Path", Box::new(CopyPath), cx)
- .action("Copy Relative Path", Box::new(CopyRelativePath), cx)
+ .action("Copy Path", Box::new(CopyPath))
+ .action("Copy Relative Path", Box::new(CopyRelativePath))
.separator()
- .action("Reveal in Finder", Box::new(RevealInFinder), cx);
+ .action("Reveal in Finder", Box::new(RevealInFinder));
if is_dir {
menu = menu
- .action("Open in Terminal", Box::new(OpenInTerminal), cx)
- .action("Search Inside", Box::new(NewSearchInDirectory), cx)
+ .action("Open in Terminal", Box::new(OpenInTerminal))
+ .action("Search Inside", Box::new(NewSearchInDirectory))
}
- menu = menu.separator().action("Rename", Box::new(Rename), cx);
+ menu = menu.separator().action("Rename", Box::new(Rename));
if !is_root {
- menu = menu.action("Delete", Box::new(Delete), cx);
+ menu = menu.action("Delete", Box::new(Delete));
}
menu
@@ -611,7 +610,7 @@ impl ProjectPanel {
edited_entry_id = NEW_ENTRY_ID;
edit_task = self.project.update(cx, |project, cx| {
project.create_entry((worktree_id, &new_path), is_dir, cx)
- })?;
+ });
} else {
let new_path = if let Some(parent) = entry.path.clone().parent() {
parent.join(&filename)
@@ -625,7 +624,7 @@ impl ProjectPanel {
edited_entry_id = entry.id;
edit_task = self.project.update(cx, |project, cx| {
project.rename_entry(entry.id, new_path.as_path(), cx)
- })?;
+ });
};
edit_state.processing_filename = Some(filename);
@@ -638,21 +637,22 @@ impl ProjectPanel {
cx.notify();
})?;
- let new_entry = new_entry?;
- this.update(&mut cx, |this, cx| {
- if let Some(selection) = &mut this.selection {
- if selection.entry_id == edited_entry_id {
- selection.worktree_id = worktree_id;
- selection.entry_id = new_entry.id;
- this.expand_to_selection(cx);
+ if let Some(new_entry) = new_entry? {
+ this.update(&mut cx, |this, cx| {
+ if let Some(selection) = &mut this.selection {
+ if selection.entry_id == edited_entry_id {
+ selection.worktree_id = worktree_id;
+ selection.entry_id = new_entry.id;
+ this.expand_to_selection(cx);
+ }
}
- }
- this.update_visible_entries(None, cx);
- if is_new_entry && !is_dir {
- this.open_entry(new_entry.id, true, cx);
- }
- cx.notify();
- })?;
+ this.update_visible_entries(None, cx);
+ if is_new_entry && !is_dir {
+ this.open_entry(new_entry.id, true, cx);
+ }
+ cx.notify();
+ })?;
+ }
Ok(())
}))
}
@@ -932,15 +932,17 @@ impl ProjectPanel {
}
if clipboard_entry.is_cut() {
- if let Some(task) = self.project.update(cx, |project, cx| {
- project.rename_entry(clipboard_entry.entry_id(), new_path, cx)
- }) {
- task.detach_and_log_err(cx);
- }
- } else if let Some(task) = self.project.update(cx, |project, cx| {
- project.copy_entry(clipboard_entry.entry_id(), new_path, cx)
- }) {
- task.detach_and_log_err(cx);
+ self.project
+ .update(cx, |project, cx| {
+ project.rename_entry(clipboard_entry.entry_id(), new_path, cx)
+ })
+ .detach_and_log_err(cx)
+ } else {
+ self.project
+ .update(cx, |project, cx| {
+ project.copy_entry(clipboard_entry.entry_id(), new_path, cx)
+ })
+ .detach_and_log_err(cx)
}
Some(())
@@ -1026,7 +1028,7 @@ impl ProjectPanel {
// let mut new_path = destination_path.to_path_buf();
// new_path.push(entry_path.path.file_name()?);
// if new_path != entry_path.path.as_ref() {
- // let task = project.rename_entry(entry_to_move, new_path, cx)?;
+ // let task = project.rename_entry(entry_to_move, new_path, cx);
// cx.foreground_executor().spawn(task).detach_and_log_err(cx);
// }
@@ -430,7 +430,7 @@ message ExpandProjectEntryResponse {
}
message ProjectEntryResponse {
- Entry entry = 1;
+ optional Entry entry = 1;
uint64 worktree_scan_id = 2;
}
@@ -1357,7 +1357,7 @@ message User {
message File {
uint64 worktree_id = 1;
- uint64 entry_id = 2;
+ optional uint64 entry_id = 2;
string path = 3;
Timestamp mtime = 4;
bool is_deleted = 5;
@@ -9,4 +9,4 @@ pub use notification::*;
pub use peer::*;
mod macros;
-pub const PROTOCOL_VERSION: u32 = 66;
+pub const PROTOCOL_VERSION: u32 = 67;
@@ -430,7 +430,7 @@ message ExpandProjectEntryResponse {
}
message ProjectEntryResponse {
- Entry entry = 1;
+ optional Entry entry = 1;
uint64 worktree_scan_id = 2;
}
@@ -1357,7 +1357,7 @@ message User {
message File {
uint64 worktree_id = 1;
- uint64 entry_id = 2;
+ optional uint64 entry_id = 2;
string path = 3;
Timestamp mtime = 4;
bool is_deleted = 5;
@@ -9,4 +9,4 @@ pub use notification::*;
pub use peer::*;
mod macros;
-pub const PROTOCOL_VERSION: u32 = 66;
+pub const PROTOCOL_VERSION: u32 = 67;
@@ -0,0 +1,69 @@
+[package]
+name = "semantic_index2"
+version = "0.1.0"
+edition = "2021"
+publish = false
+
+[lib]
+path = "src/semantic_index.rs"
+doctest = false
+
+[dependencies]
+ai = { package = "ai2", path = "../ai2" }
+collections = { path = "../collections" }
+gpui = { package = "gpui2", path = "../gpui2" }
+language = { package = "language2", path = "../language2" }
+project = { package = "project2", path = "../project2" }
+workspace = { package = "workspace2", path = "../workspace2" }
+util = { path = "../util" }
+rpc = { package = "rpc2", path = "../rpc2" }
+settings = { package = "settings2", path = "../settings2" }
+anyhow.workspace = true
+postage.workspace = true
+futures.workspace = true
+ordered-float.workspace = true
+smol.workspace = true
+rusqlite.workspace = true
+log.workspace = true
+tree-sitter.workspace = true
+lazy_static.workspace = true
+serde.workspace = true
+serde_json.workspace = true
+async-trait.workspace = true
+tiktoken-rs.workspace = true
+parking_lot.workspace = true
+rand.workspace = true
+schemars.workspace = true
+globset.workspace = true
+sha1 = "0.10.5"
+ndarray = { version = "0.15.0" }
+
+[dev-dependencies]
+ai = { package = "ai2", path = "../ai2", features = ["test-support"] }
+collections = { path = "../collections", features = ["test-support"] }
+gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
+language = { package = "language2", path = "../language2", features = ["test-support"] }
+project = { package = "project2", path = "../project2", features = ["test-support"] }
+rpc = { package = "rpc2", path = "../rpc2", features = ["test-support"] }
+workspace = { package = "workspace2", path = "../workspace2", features = ["test-support"] }
+settings = { package = "settings2", path = "../settings2", features = ["test-support"]}
+rust-embed = { version = "8.0", features = ["include-exclude"] }
+client = { package = "client2", path = "../client2" }
+node_runtime = { path = "../node_runtime"}
+
+pretty_assertions.workspace = true
+rand.workspace = true
+unindent.workspace = true
+tempdir.workspace = true
+ctor.workspace = true
+env_logger.workspace = true
+
+tree-sitter-typescript.workspace = true
+tree-sitter-json.workspace = true
+tree-sitter-rust.workspace = true
+tree-sitter-toml.workspace = true
+tree-sitter-cpp.workspace = true
+tree-sitter-elixir.workspace = true
+tree-sitter-lua.workspace = true
+tree-sitter-ruby.workspace = true
+tree-sitter-php.workspace = true
@@ -0,0 +1,20 @@
+
+# Semantic Index
+
+## Evaluation
+
+### Metrics
+
+nDCG@k:
+- "The value of NDCG is determined by comparing the relevance of the items returned by the search engine to the relevance of the item that a hypothetical "ideal" search engine would return.
+- "The relevance of result is represented by a score (also known as a 'grade') that is assigned to the search query. The scores of these results are then discounted based on their position in the search results -- did they get recommended first or last?"
+
+MRR@k:
+- "Mean reciprocal rank quantifies the rank of the first relevant item found in teh recommendation list."
+
+MAP@k:
+- "Mean average precision averages the precision@k metric at each relevant item position in the recommendation list.
+
+Resources:
+- [Evaluating recommendation metrics](https://www.shaped.ai/blog/evaluating-recommendation-systems-map-mmr-ndcg)
+- [Math Walkthrough](https://towardsdatascience.com/demystifying-ndcg-bee3be58cfe0)
@@ -0,0 +1,114 @@
+{
+ "repo": "https://github.com/AntonOsika/gpt-engineer.git",
+ "commit": "7735a6445bae3611c62f521e6464c67c957f87c2",
+ "assertions": [
+ {
+ "query": "How do I contribute to this project?",
+ "matches": [
+ ".github/CONTRIBUTING.md:1",
+ "ROADMAP.md:48"
+ ]
+ },
+ {
+ "query": "What version of the openai package is active?",
+ "matches": [
+ "pyproject.toml:14"
+ ]
+ },
+ {
+ "query": "Ask user for clarification",
+ "matches": [
+ "gpt_engineer/steps.py:69"
+ ]
+ },
+ {
+ "query": "generate tests for python code",
+ "matches": [
+ "gpt_engineer/steps.py:153"
+ ]
+ },
+ {
+ "query": "get item from database based on key",
+ "matches": [
+ "gpt_engineer/db.py:42",
+ "gpt_engineer/db.py:68"
+ ]
+ },
+ {
+ "query": "prompt user to select files",
+ "matches": [
+ "gpt_engineer/file_selector.py:171",
+ "gpt_engineer/file_selector.py:306",
+ "gpt_engineer/file_selector.py:289",
+ "gpt_engineer/file_selector.py:234"
+ ]
+ },
+ {
+ "query": "send to rudderstack",
+ "matches": [
+ "gpt_engineer/collect.py:11",
+ "gpt_engineer/collect.py:38"
+ ]
+ },
+ {
+ "query": "parse code blocks from chat messages",
+ "matches": [
+ "gpt_engineer/chat_to_files.py:10",
+ "docs/intro/chat_parsing.md:1"
+ ]
+ },
+ {
+ "query": "how do I use the docker cli?",
+ "matches": [
+ "docker/README.md:1"
+ ]
+ },
+ {
+ "query": "ask the user if the code ran successfully?",
+ "matches": [
+ "gpt_engineer/learning.py:54"
+ ]
+ },
+ {
+ "query": "how is consent granted by the user?",
+ "matches": [
+ "gpt_engineer/learning.py:107",
+ "gpt_engineer/learning.py:130",
+ "gpt_engineer/learning.py:152"
+ ]
+ },
+ {
+ "query": "what are all the different steps the agent can take?",
+ "matches": [
+ "docs/intro/steps_module.md:1",
+ "gpt_engineer/steps.py:391"
+ ]
+ },
+ {
+ "query": "ask the user for clarification?",
+ "matches": [
+ "gpt_engineer/steps.py:69"
+ ]
+ },
+ {
+ "query": "what models are available?",
+ "matches": [
+ "gpt_engineer/ai.py:315",
+ "gpt_engineer/ai.py:341",
+ "docs/open-models.md:1"
+ ]
+ },
+ {
+ "query": "what is the current focus of the project?",
+ "matches": [
+ "ROADMAP.md:11"
+ ]
+ },
+ {
+ "query": "does the agent know how to fix code?",
+ "matches": [
+ "gpt_engineer/steps.py:367"
+ ]
+ }
+ ]
+}
@@ -0,0 +1,104 @@
+{
+ "repo": "https://github.com/tree-sitter/tree-sitter.git",
+ "commit": "46af27796a76c72d8466627d499f2bca4af958ee",
+ "assertions": [
+ {
+ "query": "What attributes are available for the tags configuration struct?",
+ "matches": [
+ "tags/src/lib.rs:24"
+ ]
+ },
+ {
+ "query": "create a new tag configuration",
+ "matches": [
+ "tags/src/lib.rs:119"
+ ]
+ },
+ {
+ "query": "generate tags based on config",
+ "matches": [
+ "tags/src/lib.rs:261"
+ ]
+ },
+ {
+ "query": "match on ts quantifier in rust",
+ "matches": [
+ "lib/binding_rust/lib.rs:139"
+ ]
+ },
+ {
+ "query": "cli command to generate tags",
+ "matches": [
+ "cli/src/tags.rs:10"
+ ]
+ },
+ {
+ "query": "what version of the tree-sitter-tags package is active?",
+ "matches": [
+ "tags/Cargo.toml:4"
+ ]
+ },
+ {
+ "query": "Insert a new parse state",
+ "matches": [
+ "cli/src/generate/build_tables/build_parse_table.rs:153"
+ ]
+ },
+ {
+ "query": "Handle conflict when numerous actions occur on the same symbol",
+ "matches": [
+ "cli/src/generate/build_tables/build_parse_table.rs:363",
+ "cli/src/generate/build_tables/build_parse_table.rs:442"
+ ]
+ },
+ {
+ "query": "Match based on associativity of actions",
+ "matches": [
+ "cri/src/generate/build_tables/build_parse_table.rs:542"
+ ]
+ },
+ {
+ "query": "Format token set display",
+ "matches": [
+ "cli/src/generate/build_tables/item.rs:246"
+ ]
+ },
+ {
+ "query": "extract choices from rule",
+ "matches": [
+ "cli/src/generate/prepare_grammar/flatten_grammar.rs:124"
+ ]
+ },
+ {
+ "query": "How do we identify if a symbol is being used?",
+ "matches": [
+ "cli/src/generate/prepare_grammar/flatten_grammar.rs:175"
+ ]
+ },
+ {
+ "query": "How do we launch the playground?",
+ "matches": [
+ "cli/src/playground.rs:46"
+ ]
+ },
+ {
+ "query": "How do we test treesitter query matches in rust?",
+ "matches": [
+ "cli/src/query_testing.rs:152",
+ "cli/src/tests/query_test.rs:781",
+ "cli/src/tests/query_test.rs:2163",
+ "cli/src/tests/query_test.rs:3781",
+ "cli/src/tests/query_test.rs:887"
+ ]
+ },
+ {
+ "query": "What does the CLI do?",
+ "matches": [
+ "cli/README.md:10",
+ "cli/loader/README.md:3",
+ "docs/section-5-implementation.md:14",
+ "docs/section-5-implementation.md:18"
+ ]
+ }
+ ]
+}
@@ -0,0 +1,603 @@
+use crate::{
+ parsing::{Span, SpanDigest},
+ SEMANTIC_INDEX_VERSION,
+};
+use ai::embedding::Embedding;
+use anyhow::{anyhow, Context, Result};
+use collections::HashMap;
+use futures::channel::oneshot;
+use gpui::BackgroundExecutor;
+use ndarray::{Array1, Array2};
+use ordered_float::OrderedFloat;
+use project::Fs;
+use rpc::proto::Timestamp;
+use rusqlite::params;
+use rusqlite::types::Value;
+use std::{
+ future::Future,
+ ops::Range,
+ path::{Path, PathBuf},
+ rc::Rc,
+ sync::Arc,
+ time::SystemTime,
+};
+use util::{paths::PathMatcher, TryFutureExt};
+
+pub fn argsort<T: Ord>(data: &[T]) -> Vec<usize> {
+ let mut indices = (0..data.len()).collect::<Vec<_>>();
+ indices.sort_by_key(|&i| &data[i]);
+ indices.reverse();
+ indices
+}
+
+#[derive(Debug)]
+pub struct FileRecord {
+ pub id: usize,
+ pub relative_path: String,
+ pub mtime: Timestamp,
+}
+
+#[derive(Clone)]
+pub struct VectorDatabase {
+ path: Arc<Path>,
+ transactions:
+ smol::channel::Sender<Box<dyn 'static + Send + FnOnce(&mut rusqlite::Connection)>>,
+}
+
+impl VectorDatabase {
+ pub async fn new(
+ fs: Arc<dyn Fs>,
+ path: Arc<Path>,
+ executor: BackgroundExecutor,
+ ) -> Result<Self> {
+ if let Some(db_directory) = path.parent() {
+ fs.create_dir(db_directory).await?;
+ }
+
+ let (transactions_tx, transactions_rx) = smol::channel::unbounded::<
+ Box<dyn 'static + Send + FnOnce(&mut rusqlite::Connection)>,
+ >();
+ executor
+ .spawn({
+ let path = path.clone();
+ async move {
+ let mut connection = rusqlite::Connection::open(&path)?;
+
+ connection.pragma_update(None, "journal_mode", "wal")?;
+ connection.pragma_update(None, "synchronous", "normal")?;
+ connection.pragma_update(None, "cache_size", 1000000)?;
+ connection.pragma_update(None, "temp_store", "MEMORY")?;
+
+ while let Ok(transaction) = transactions_rx.recv().await {
+ transaction(&mut connection);
+ }
+
+ anyhow::Ok(())
+ }
+ .log_err()
+ })
+ .detach();
+ let this = Self {
+ transactions: transactions_tx,
+ path,
+ };
+ this.initialize_database().await?;
+ Ok(this)
+ }
+
+ pub fn path(&self) -> &Arc<Path> {
+ &self.path
+ }
+
+ fn transact<F, T>(&self, f: F) -> impl Future<Output = Result<T>>
+ where
+ F: 'static + Send + FnOnce(&rusqlite::Transaction) -> Result<T>,
+ T: 'static + Send,
+ {
+ let (tx, rx) = oneshot::channel();
+ let transactions = self.transactions.clone();
+ async move {
+ if transactions
+ .send(Box::new(|connection| {
+ let result = connection
+ .transaction()
+ .map_err(|err| anyhow!(err))
+ .and_then(|transaction| {
+ let result = f(&transaction)?;
+ transaction.commit()?;
+ Ok(result)
+ });
+ let _ = tx.send(result);
+ }))
+ .await
+ .is_err()
+ {
+ return Err(anyhow!("connection was dropped"))?;
+ }
+ rx.await?
+ }
+ }
+
+ fn initialize_database(&self) -> impl Future<Output = Result<()>> {
+ self.transact(|db| {
+ rusqlite::vtab::array::load_module(&db)?;
+
+ // Delete existing tables, if SEMANTIC_INDEX_VERSION is bumped
+ let version_query = db.prepare("SELECT version from semantic_index_config");
+ let version = version_query
+ .and_then(|mut query| query.query_row([], |row| Ok(row.get::<_, i64>(0)?)));
+ if version.map_or(false, |version| version == SEMANTIC_INDEX_VERSION as i64) {
+ log::trace!("vector database schema up to date");
+ return Ok(());
+ }
+
+ log::trace!("vector database schema out of date. updating...");
+ // We renamed the `documents` table to `spans`, so we want to drop
+ // `documents` without recreating it if it exists.
+ db.execute("DROP TABLE IF EXISTS documents", [])
+ .context("failed to drop 'documents' table")?;
+ db.execute("DROP TABLE IF EXISTS spans", [])
+ .context("failed to drop 'spans' table")?;
+ db.execute("DROP TABLE IF EXISTS files", [])
+ .context("failed to drop 'files' table")?;
+ db.execute("DROP TABLE IF EXISTS worktrees", [])
+ .context("failed to drop 'worktrees' table")?;
+ db.execute("DROP TABLE IF EXISTS semantic_index_config", [])
+ .context("failed to drop 'semantic_index_config' table")?;
+
+ // Initialize Vector Databasing Tables
+ db.execute(
+ "CREATE TABLE semantic_index_config (
+ version INTEGER NOT NULL
+ )",
+ [],
+ )?;
+
+ db.execute(
+ "INSERT INTO semantic_index_config (version) VALUES (?1)",
+ params![SEMANTIC_INDEX_VERSION],
+ )?;
+
+ db.execute(
+ "CREATE TABLE worktrees (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ absolute_path VARCHAR NOT NULL
+ );
+ CREATE UNIQUE INDEX worktrees_absolute_path ON worktrees (absolute_path);
+ ",
+ [],
+ )?;
+
+ db.execute(
+ "CREATE TABLE files (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ worktree_id INTEGER NOT NULL,
+ relative_path VARCHAR NOT NULL,
+ mtime_seconds INTEGER NOT NULL,
+ mtime_nanos INTEGER NOT NULL,
+ FOREIGN KEY(worktree_id) REFERENCES worktrees(id) ON DELETE CASCADE
+ )",
+ [],
+ )?;
+
+ db.execute(
+ "CREATE UNIQUE INDEX files_worktree_id_and_relative_path ON files (worktree_id, relative_path)",
+ [],
+ )?;
+
+ db.execute(
+ "CREATE TABLE spans (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ file_id INTEGER NOT NULL,
+ start_byte INTEGER NOT NULL,
+ end_byte INTEGER NOT NULL,
+ name VARCHAR NOT NULL,
+ embedding BLOB NOT NULL,
+ digest BLOB NOT NULL,
+ FOREIGN KEY(file_id) REFERENCES files(id) ON DELETE CASCADE
+ )",
+ [],
+ )?;
+ db.execute(
+ "CREATE INDEX spans_digest ON spans (digest)",
+ [],
+ )?;
+
+ log::trace!("vector database initialized with updated schema.");
+ Ok(())
+ })
+ }
+
+ pub fn delete_file(
+ &self,
+ worktree_id: i64,
+ delete_path: Arc<Path>,
+ ) -> impl Future<Output = Result<()>> {
+ self.transact(move |db| {
+ db.execute(
+ "DELETE FROM files WHERE worktree_id = ?1 AND relative_path = ?2",
+ params![worktree_id, delete_path.to_str()],
+ )?;
+ Ok(())
+ })
+ }
+
+ pub fn insert_file(
+ &self,
+ worktree_id: i64,
+ path: Arc<Path>,
+ mtime: SystemTime,
+ spans: Vec<Span>,
+ ) -> impl Future<Output = Result<()>> {
+ self.transact(move |db| {
+ // Return the existing ID, if both the file and mtime match
+ let mtime = Timestamp::from(mtime);
+
+ db.execute(
+ "
+ REPLACE INTO files
+ (worktree_id, relative_path, mtime_seconds, mtime_nanos)
+ VALUES (?1, ?2, ?3, ?4)
+ ",
+ params![worktree_id, path.to_str(), mtime.seconds, mtime.nanos],
+ )?;
+
+ let file_id = db.last_insert_rowid();
+
+ let mut query = db.prepare(
+ "
+ INSERT INTO spans
+ (file_id, start_byte, end_byte, name, embedding, digest)
+ VALUES (?1, ?2, ?3, ?4, ?5, ?6)
+ ",
+ )?;
+
+ for span in spans {
+ query.execute(params![
+ file_id,
+ span.range.start.to_string(),
+ span.range.end.to_string(),
+ span.name,
+ span.embedding,
+ span.digest
+ ])?;
+ }
+
+ Ok(())
+ })
+ }
+
+ pub fn worktree_previously_indexed(
+ &self,
+ worktree_root_path: &Path,
+ ) -> impl Future<Output = Result<bool>> {
+ let worktree_root_path = worktree_root_path.to_string_lossy().into_owned();
+ self.transact(move |db| {
+ let mut worktree_query =
+ db.prepare("SELECT id FROM worktrees WHERE absolute_path = ?1")?;
+ let worktree_id = worktree_query
+ .query_row(params![worktree_root_path], |row| Ok(row.get::<_, i64>(0)?));
+
+ if worktree_id.is_ok() {
+ return Ok(true);
+ } else {
+ return Ok(false);
+ }
+ })
+ }
+
+ pub fn embeddings_for_digests(
+ &self,
+ digests: Vec<SpanDigest>,
+ ) -> impl Future<Output = Result<HashMap<SpanDigest, Embedding>>> {
+ self.transact(move |db| {
+ let mut query = db.prepare(
+ "
+ SELECT digest, embedding
+ FROM spans
+ WHERE digest IN rarray(?)
+ ",
+ )?;
+ let mut embeddings_by_digest = HashMap::default();
+ let digests = Rc::new(
+ digests
+ .into_iter()
+ .map(|p| Value::Blob(p.0.to_vec()))
+ .collect::<Vec<_>>(),
+ );
+ let rows = query.query_map(params![digests], |row| {
+ Ok((row.get::<_, SpanDigest>(0)?, row.get::<_, Embedding>(1)?))
+ })?;
+
+ for row in rows {
+ if let Ok(row) = row {
+ embeddings_by_digest.insert(row.0, row.1);
+ }
+ }
+
+ Ok(embeddings_by_digest)
+ })
+ }
+
+ pub fn embeddings_for_files(
+ &self,
+ worktree_id_file_paths: HashMap<i64, Vec<Arc<Path>>>,
+ ) -> impl Future<Output = Result<HashMap<SpanDigest, Embedding>>> {
+ self.transact(move |db| {
+ let mut query = db.prepare(
+ "
+ SELECT digest, embedding
+ FROM spans
+ LEFT JOIN files ON files.id = spans.file_id
+ WHERE files.worktree_id = ? AND files.relative_path IN rarray(?)
+ ",
+ )?;
+ let mut embeddings_by_digest = HashMap::default();
+ for (worktree_id, file_paths) in worktree_id_file_paths {
+ let file_paths = Rc::new(
+ file_paths
+ .into_iter()
+ .map(|p| Value::Text(p.to_string_lossy().into_owned()))
+ .collect::<Vec<_>>(),
+ );
+ let rows = query.query_map(params![worktree_id, file_paths], |row| {
+ Ok((row.get::<_, SpanDigest>(0)?, row.get::<_, Embedding>(1)?))
+ })?;
+
+ for row in rows {
+ if let Ok(row) = row {
+ embeddings_by_digest.insert(row.0, row.1);
+ }
+ }
+ }
+
+ Ok(embeddings_by_digest)
+ })
+ }
+
+ pub fn find_or_create_worktree(
+ &self,
+ worktree_root_path: Arc<Path>,
+ ) -> impl Future<Output = Result<i64>> {
+ self.transact(move |db| {
+ let mut worktree_query =
+ db.prepare("SELECT id FROM worktrees WHERE absolute_path = ?1")?;
+ let worktree_id = worktree_query
+ .query_row(params![worktree_root_path.to_string_lossy()], |row| {
+ Ok(row.get::<_, i64>(0)?)
+ });
+
+ if worktree_id.is_ok() {
+ return Ok(worktree_id?);
+ }
+
+ // If worktree_id is Err, insert new worktree
+ db.execute(
+ "INSERT into worktrees (absolute_path) VALUES (?1)",
+ params![worktree_root_path.to_string_lossy()],
+ )?;
+ Ok(db.last_insert_rowid())
+ })
+ }
+
+ pub fn get_file_mtimes(
+ &self,
+ worktree_id: i64,
+ ) -> impl Future<Output = Result<HashMap<PathBuf, SystemTime>>> {
+ self.transact(move |db| {
+ let mut statement = db.prepare(
+ "
+ SELECT relative_path, mtime_seconds, mtime_nanos
+ FROM files
+ WHERE worktree_id = ?1
+ ORDER BY relative_path",
+ )?;
+ let mut result: HashMap<PathBuf, SystemTime> = HashMap::default();
+ for row in statement.query_map(params![worktree_id], |row| {
+ Ok((
+ row.get::<_, String>(0)?.into(),
+ Timestamp {
+ seconds: row.get(1)?,
+ nanos: row.get(2)?,
+ }
+ .into(),
+ ))
+ })? {
+ let row = row?;
+ result.insert(row.0, row.1);
+ }
+ Ok(result)
+ })
+ }
+
+ pub fn top_k_search(
+ &self,
+ query_embedding: &Embedding,
+ limit: usize,
+ file_ids: &[i64],
+ ) -> impl Future<Output = Result<Vec<(i64, OrderedFloat<f32>)>>> {
+ let file_ids = file_ids.to_vec();
+ let query = query_embedding.clone().0;
+ let query = Array1::from_vec(query);
+ self.transact(move |db| {
+ let mut query_statement = db.prepare(
+ "
+ SELECT
+ id, embedding
+ FROM
+ spans
+ WHERE
+ file_id IN rarray(?)
+ ",
+ )?;
+
+ let deserialized_rows = query_statement
+ .query_map(params![ids_to_sql(&file_ids)], |row| {
+ Ok((row.get::<_, usize>(0)?, row.get::<_, Embedding>(1)?))
+ })?
+ .filter_map(|row| row.ok())
+ .collect::<Vec<(usize, Embedding)>>();
+
+ if deserialized_rows.len() == 0 {
+ return Ok(Vec::new());
+ }
+
+ // Get Length of Embeddings Returned
+ let embedding_len = deserialized_rows[0].1 .0.len();
+
+ let batch_n = 1000;
+ let mut batches = Vec::new();
+ let mut batch_ids = Vec::new();
+ let mut batch_embeddings: Vec<f32> = Vec::new();
+ deserialized_rows.iter().for_each(|(id, embedding)| {
+ batch_ids.push(id);
+ batch_embeddings.extend(&embedding.0);
+
+ if batch_ids.len() == batch_n {
+ let embeddings = std::mem::take(&mut batch_embeddings);
+ let ids = std::mem::take(&mut batch_ids);
+ let array =
+ Array2::from_shape_vec((ids.len(), embedding_len.clone()), embeddings);
+ match array {
+ Ok(array) => {
+ batches.push((ids, array));
+ }
+ Err(err) => log::error!("Failed to deserialize to ndarray: {:?}", err),
+ }
+ }
+ });
+
+ if batch_ids.len() > 0 {
+ let array = Array2::from_shape_vec(
+ (batch_ids.len(), embedding_len),
+ batch_embeddings.clone(),
+ );
+ match array {
+ Ok(array) => {
+ batches.push((batch_ids.clone(), array));
+ }
+ Err(err) => log::error!("Failed to deserialize to ndarray: {:?}", err),
+ }
+ }
+
+ let mut ids: Vec<usize> = Vec::new();
+ let mut results = Vec::new();
+ for (batch_ids, array) in batches {
+ let scores = array
+ .dot(&query.t())
+ .to_vec()
+ .iter()
+ .map(|score| OrderedFloat(*score))
+ .collect::<Vec<OrderedFloat<f32>>>();
+ results.extend(scores);
+ ids.extend(batch_ids);
+ }
+
+ let sorted_idx = argsort(&results);
+ let mut sorted_results = Vec::new();
+ let last_idx = limit.min(sorted_idx.len());
+ for idx in &sorted_idx[0..last_idx] {
+ sorted_results.push((ids[*idx] as i64, results[*idx]))
+ }
+
+ Ok(sorted_results)
+ })
+ }
+
+ pub fn retrieve_included_file_ids(
+ &self,
+ worktree_ids: &[i64],
+ includes: &[PathMatcher],
+ excludes: &[PathMatcher],
+ ) -> impl Future<Output = Result<Vec<i64>>> {
+ let worktree_ids = worktree_ids.to_vec();
+ let includes = includes.to_vec();
+ let excludes = excludes.to_vec();
+ self.transact(move |db| {
+ let mut file_query = db.prepare(
+ "
+ SELECT
+ id, relative_path
+ FROM
+ files
+ WHERE
+ worktree_id IN rarray(?)
+ ",
+ )?;
+
+ let mut file_ids = Vec::<i64>::new();
+ let mut rows = file_query.query([ids_to_sql(&worktree_ids)])?;
+
+ while let Some(row) = rows.next()? {
+ let file_id = row.get(0)?;
+ let relative_path = row.get_ref(1)?.as_str()?;
+ let included =
+ includes.is_empty() || includes.iter().any(|glob| glob.is_match(relative_path));
+ let excluded = excludes.iter().any(|glob| glob.is_match(relative_path));
+ if included && !excluded {
+ file_ids.push(file_id);
+ }
+ }
+
+ anyhow::Ok(file_ids)
+ })
+ }
+
+ pub fn spans_for_ids(
+ &self,
+ ids: &[i64],
+ ) -> impl Future<Output = Result<Vec<(i64, PathBuf, Range<usize>)>>> {
+ let ids = ids.to_vec();
+ self.transact(move |db| {
+ let mut statement = db.prepare(
+ "
+ SELECT
+ spans.id,
+ files.worktree_id,
+ files.relative_path,
+ spans.start_byte,
+ spans.end_byte
+ FROM
+ spans, files
+ WHERE
+ spans.file_id = files.id AND
+ spans.id in rarray(?)
+ ",
+ )?;
+
+ let result_iter = statement.query_map(params![ids_to_sql(&ids)], |row| {
+ Ok((
+ row.get::<_, i64>(0)?,
+ row.get::<_, i64>(1)?,
+ row.get::<_, String>(2)?.into(),
+ row.get(3)?..row.get(4)?,
+ ))
+ })?;
+
+ let mut values_by_id = HashMap::<i64, (i64, PathBuf, Range<usize>)>::default();
+ for row in result_iter {
+ let (id, worktree_id, path, range) = row?;
+ values_by_id.insert(id, (worktree_id, path, range));
+ }
+
+ let mut results = Vec::with_capacity(ids.len());
+ for id in &ids {
+ let value = values_by_id
+ .remove(id)
+ .ok_or(anyhow!("missing span id {}", id))?;
+ results.push(value);
+ }
+
+ Ok(results)
+ })
+ }
+}
+
+fn ids_to_sql(ids: &[i64]) -> Rc<Vec<rusqlite::types::Value>> {
+ Rc::new(
+ ids.iter()
+ .copied()
+ .map(|v| rusqlite::types::Value::from(v))
+ .collect::<Vec<_>>(),
+ )
+}
@@ -0,0 +1,169 @@
+use crate::{parsing::Span, JobHandle};
+use ai::embedding::EmbeddingProvider;
+use gpui::BackgroundExecutor;
+use parking_lot::Mutex;
+use smol::channel;
+use std::{mem, ops::Range, path::Path, sync::Arc, time::SystemTime};
+
+#[derive(Clone)]
+pub struct FileToEmbed {
+ pub worktree_id: i64,
+ pub path: Arc<Path>,
+ pub mtime: SystemTime,
+ pub spans: Vec<Span>,
+ pub job_handle: JobHandle,
+}
+
+impl std::fmt::Debug for FileToEmbed {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ f.debug_struct("FileToEmbed")
+ .field("worktree_id", &self.worktree_id)
+ .field("path", &self.path)
+ .field("mtime", &self.mtime)
+ .field("spans", &self.spans)
+ .finish_non_exhaustive()
+ }
+}
+
+impl PartialEq for FileToEmbed {
+ fn eq(&self, other: &Self) -> bool {
+ self.worktree_id == other.worktree_id
+ && self.path == other.path
+ && self.mtime == other.mtime
+ && self.spans == other.spans
+ }
+}
+
+pub struct EmbeddingQueue {
+ embedding_provider: Arc<dyn EmbeddingProvider>,
+ pending_batch: Vec<FileFragmentToEmbed>,
+ executor: BackgroundExecutor,
+ pending_batch_token_count: usize,
+ finished_files_tx: channel::Sender<FileToEmbed>,
+ finished_files_rx: channel::Receiver<FileToEmbed>,
+}
+
+#[derive(Clone)]
+pub struct FileFragmentToEmbed {
+ file: Arc<Mutex<FileToEmbed>>,
+ span_range: Range<usize>,
+}
+
+impl EmbeddingQueue {
+ pub fn new(
+ embedding_provider: Arc<dyn EmbeddingProvider>,
+ executor: BackgroundExecutor,
+ ) -> Self {
+ let (finished_files_tx, finished_files_rx) = channel::unbounded();
+ Self {
+ embedding_provider,
+ executor,
+ pending_batch: Vec::new(),
+ pending_batch_token_count: 0,
+ finished_files_tx,
+ finished_files_rx,
+ }
+ }
+
+ pub fn push(&mut self, file: FileToEmbed) {
+ if file.spans.is_empty() {
+ self.finished_files_tx.try_send(file).unwrap();
+ return;
+ }
+
+ let file = Arc::new(Mutex::new(file));
+
+ self.pending_batch.push(FileFragmentToEmbed {
+ file: file.clone(),
+ span_range: 0..0,
+ });
+
+ let mut fragment_range = &mut self.pending_batch.last_mut().unwrap().span_range;
+ for (ix, span) in file.lock().spans.iter().enumerate() {
+ let span_token_count = if span.embedding.is_none() {
+ span.token_count
+ } else {
+ 0
+ };
+
+ let next_token_count = self.pending_batch_token_count + span_token_count;
+ if next_token_count > self.embedding_provider.max_tokens_per_batch() {
+ let range_end = fragment_range.end;
+ self.flush();
+ self.pending_batch.push(FileFragmentToEmbed {
+ file: file.clone(),
+ span_range: range_end..range_end,
+ });
+ fragment_range = &mut self.pending_batch.last_mut().unwrap().span_range;
+ }
+
+ fragment_range.end = ix + 1;
+ self.pending_batch_token_count += span_token_count;
+ }
+ }
+
+ pub fn flush(&mut self) {
+ let batch = mem::take(&mut self.pending_batch);
+ self.pending_batch_token_count = 0;
+ if batch.is_empty() {
+ return;
+ }
+
+ let finished_files_tx = self.finished_files_tx.clone();
+ let embedding_provider = self.embedding_provider.clone();
+
+ self.executor
+ .spawn(async move {
+ let mut spans = Vec::new();
+ for fragment in &batch {
+ let file = fragment.file.lock();
+ spans.extend(
+ file.spans[fragment.span_range.clone()]
+ .iter()
+ .filter(|d| d.embedding.is_none())
+ .map(|d| d.content.clone()),
+ );
+ }
+
+ // If spans is 0, just send the fragment to the finished files if its the last one.
+ if spans.is_empty() {
+ for fragment in batch.clone() {
+ if let Some(file) = Arc::into_inner(fragment.file) {
+ finished_files_tx.try_send(file.into_inner()).unwrap();
+ }
+ }
+ return;
+ };
+
+ match embedding_provider.embed_batch(spans).await {
+ Ok(embeddings) => {
+ let mut embeddings = embeddings.into_iter();
+ for fragment in batch {
+ for span in &mut fragment.file.lock().spans[fragment.span_range.clone()]
+ .iter_mut()
+ .filter(|d| d.embedding.is_none())
+ {
+ if let Some(embedding) = embeddings.next() {
+ span.embedding = Some(embedding);
+ } else {
+ log::error!("number of embeddings != number of documents");
+ }
+ }
+
+ if let Some(file) = Arc::into_inner(fragment.file) {
+ finished_files_tx.try_send(file.into_inner()).unwrap();
+ }
+ }
+ }
+ Err(error) => {
+ log::error!("{:?}", error);
+ }
+ }
+ })
+ .detach();
+ }
+
+ pub fn finished_files(&self) -> channel::Receiver<FileToEmbed> {
+ self.finished_files_rx.clone()
+ }
+}
@@ -0,0 +1,414 @@
+use ai::{
+ embedding::{Embedding, EmbeddingProvider},
+ models::TruncationDirection,
+};
+use anyhow::{anyhow, Result};
+use language::{Grammar, Language};
+use rusqlite::{
+ types::{FromSql, FromSqlResult, ToSqlOutput, ValueRef},
+ ToSql,
+};
+use sha1::{Digest, Sha1};
+use std::{
+ borrow::Cow,
+ cmp::{self, Reverse},
+ collections::HashSet,
+ ops::Range,
+ path::Path,
+ sync::Arc,
+};
+use tree_sitter::{Parser, QueryCursor};
+
+#[derive(Debug, PartialEq, Eq, Clone, Hash)]
+pub struct SpanDigest(pub [u8; 20]);
+
+impl FromSql for SpanDigest {
+ fn column_result(value: ValueRef) -> FromSqlResult<Self> {
+ let blob = value.as_blob()?;
+ let bytes =
+ blob.try_into()
+ .map_err(|_| rusqlite::types::FromSqlError::InvalidBlobSize {
+ expected_size: 20,
+ blob_size: blob.len(),
+ })?;
+ return Ok(SpanDigest(bytes));
+ }
+}
+
+impl ToSql for SpanDigest {
+ fn to_sql(&self) -> rusqlite::Result<ToSqlOutput> {
+ self.0.to_sql()
+ }
+}
+
+impl From<&'_ str> for SpanDigest {
+ fn from(value: &'_ str) -> Self {
+ let mut sha1 = Sha1::new();
+ sha1.update(value);
+ Self(sha1.finalize().into())
+ }
+}
+
+#[derive(Debug, PartialEq, Clone)]
+pub struct Span {
+ pub name: String,
+ pub range: Range<usize>,
+ pub content: String,
+ pub embedding: Option<Embedding>,
+ pub digest: SpanDigest,
+ pub token_count: usize,
+}
+
+const CODE_CONTEXT_TEMPLATE: &str =
+ "The below code snippet is from file '<path>'\n\n```<language>\n<item>\n```";
+const ENTIRE_FILE_TEMPLATE: &str =
+ "The below snippet is from file '<path>'\n\n```<language>\n<item>\n```";
+const MARKDOWN_CONTEXT_TEMPLATE: &str = "The below file contents is from file '<path>'\n\n<item>";
+pub const PARSEABLE_ENTIRE_FILE_TYPES: &[&str] = &[
+ "TOML", "YAML", "CSS", "HEEX", "ERB", "SVELTE", "HTML", "Scheme",
+];
+
+pub struct CodeContextRetriever {
+ pub parser: Parser,
+ pub cursor: QueryCursor,
+ pub embedding_provider: Arc<dyn EmbeddingProvider>,
+}
+
+// Every match has an item, this represents the fundamental treesitter symbol and anchors the search
+// Every match has one or more 'name' captures. These indicate the display range of the item for deduplication.
+// If there are preceeding comments, we track this with a context capture
+// If there is a piece that should be collapsed in hierarchical queries, we capture it with a collapse capture
+// If there is a piece that should be kept inside a collapsed node, we capture it with a keep capture
+#[derive(Debug, Clone)]
+pub struct CodeContextMatch {
+ pub start_col: usize,
+ pub item_range: Option<Range<usize>>,
+ pub name_range: Option<Range<usize>>,
+ pub context_ranges: Vec<Range<usize>>,
+ pub collapse_ranges: Vec<Range<usize>>,
+}
+
+impl CodeContextRetriever {
+ pub fn new(embedding_provider: Arc<dyn EmbeddingProvider>) -> Self {
+ Self {
+ parser: Parser::new(),
+ cursor: QueryCursor::new(),
+ embedding_provider,
+ }
+ }
+
+ fn parse_entire_file(
+ &self,
+ relative_path: Option<&Path>,
+ language_name: Arc<str>,
+ content: &str,
+ ) -> Result<Vec<Span>> {
+ let document_span = ENTIRE_FILE_TEMPLATE
+ .replace(
+ "<path>",
+ &relative_path.map_or(Cow::Borrowed("untitled"), |path| path.to_string_lossy()),
+ )
+ .replace("<language>", language_name.as_ref())
+ .replace("<item>", &content);
+ let digest = SpanDigest::from(document_span.as_str());
+ let model = self.embedding_provider.base_model();
+ let document_span = model.truncate(
+ &document_span,
+ model.capacity()?,
+ ai::models::TruncationDirection::End,
+ )?;
+ let token_count = model.count_tokens(&document_span)?;
+
+ Ok(vec![Span {
+ range: 0..content.len(),
+ content: document_span,
+ embedding: Default::default(),
+ name: language_name.to_string(),
+ digest,
+ token_count,
+ }])
+ }
+
+ fn parse_markdown_file(
+ &self,
+ relative_path: Option<&Path>,
+ content: &str,
+ ) -> Result<Vec<Span>> {
+ let document_span = MARKDOWN_CONTEXT_TEMPLATE
+ .replace(
+ "<path>",
+ &relative_path.map_or(Cow::Borrowed("untitled"), |path| path.to_string_lossy()),
+ )
+ .replace("<item>", &content);
+ let digest = SpanDigest::from(document_span.as_str());
+
+ let model = self.embedding_provider.base_model();
+ let document_span = model.truncate(
+ &document_span,
+ model.capacity()?,
+ ai::models::TruncationDirection::End,
+ )?;
+ let token_count = model.count_tokens(&document_span)?;
+
+ Ok(vec![Span {
+ range: 0..content.len(),
+ content: document_span,
+ embedding: None,
+ name: "Markdown".to_string(),
+ digest,
+ token_count,
+ }])
+ }
+
+ fn get_matches_in_file(
+ &mut self,
+ content: &str,
+ grammar: &Arc<Grammar>,
+ ) -> Result<Vec<CodeContextMatch>> {
+ let embedding_config = grammar
+ .embedding_config
+ .as_ref()
+ .ok_or_else(|| anyhow!("no embedding queries"))?;
+ self.parser.set_language(grammar.ts_language).unwrap();
+
+ let tree = self
+ .parser
+ .parse(&content, None)
+ .ok_or_else(|| anyhow!("parsing failed"))?;
+
+ let mut captures: Vec<CodeContextMatch> = Vec::new();
+ let mut collapse_ranges: Vec<Range<usize>> = Vec::new();
+ let mut keep_ranges: Vec<Range<usize>> = Vec::new();
+ for mat in self.cursor.matches(
+ &embedding_config.query,
+ tree.root_node(),
+ content.as_bytes(),
+ ) {
+ let mut start_col = 0;
+ let mut item_range: Option<Range<usize>> = None;
+ let mut name_range: Option<Range<usize>> = None;
+ let mut context_ranges: Vec<Range<usize>> = Vec::new();
+ collapse_ranges.clear();
+ keep_ranges.clear();
+ for capture in mat.captures {
+ if capture.index == embedding_config.item_capture_ix {
+ item_range = Some(capture.node.byte_range());
+ start_col = capture.node.start_position().column;
+ } else if Some(capture.index) == embedding_config.name_capture_ix {
+ name_range = Some(capture.node.byte_range());
+ } else if Some(capture.index) == embedding_config.context_capture_ix {
+ context_ranges.push(capture.node.byte_range());
+ } else if Some(capture.index) == embedding_config.collapse_capture_ix {
+ collapse_ranges.push(capture.node.byte_range());
+ } else if Some(capture.index) == embedding_config.keep_capture_ix {
+ keep_ranges.push(capture.node.byte_range());
+ }
+ }
+
+ captures.push(CodeContextMatch {
+ start_col,
+ item_range,
+ name_range,
+ context_ranges,
+ collapse_ranges: subtract_ranges(&collapse_ranges, &keep_ranges),
+ });
+ }
+ Ok(captures)
+ }
+
+ pub fn parse_file_with_template(
+ &mut self,
+ relative_path: Option<&Path>,
+ content: &str,
+ language: Arc<Language>,
+ ) -> Result<Vec<Span>> {
+ let language_name = language.name();
+
+ if PARSEABLE_ENTIRE_FILE_TYPES.contains(&language_name.as_ref()) {
+ return self.parse_entire_file(relative_path, language_name, &content);
+ } else if ["Markdown", "Plain Text"].contains(&language_name.as_ref()) {
+ return self.parse_markdown_file(relative_path, &content);
+ }
+
+ let mut spans = self.parse_file(content, language)?;
+ for span in &mut spans {
+ let document_content = CODE_CONTEXT_TEMPLATE
+ .replace(
+ "<path>",
+ &relative_path.map_or(Cow::Borrowed("untitled"), |path| path.to_string_lossy()),
+ )
+ .replace("<language>", language_name.as_ref())
+ .replace("item", &span.content);
+
+ let model = self.embedding_provider.base_model();
+ let document_content = model.truncate(
+ &document_content,
+ model.capacity()?,
+ TruncationDirection::End,
+ )?;
+ let token_count = model.count_tokens(&document_content)?;
+
+ span.content = document_content;
+ span.token_count = token_count;
+ }
+ Ok(spans)
+ }
+
+ pub fn parse_file(&mut self, content: &str, language: Arc<Language>) -> Result<Vec<Span>> {
+ let grammar = language
+ .grammar()
+ .ok_or_else(|| anyhow!("no grammar for language"))?;
+
+ // Iterate through query matches
+ let matches = self.get_matches_in_file(content, grammar)?;
+
+ let language_scope = language.default_scope();
+ let placeholder = language_scope.collapsed_placeholder();
+
+ let mut spans = Vec::new();
+ let mut collapsed_ranges_within = Vec::new();
+ let mut parsed_name_ranges = HashSet::new();
+ for (i, context_match) in matches.iter().enumerate() {
+ // Items which are collapsible but not embeddable have no item range
+ let item_range = if let Some(item_range) = context_match.item_range.clone() {
+ item_range
+ } else {
+ continue;
+ };
+
+ // Checks for deduplication
+ let name;
+ if let Some(name_range) = context_match.name_range.clone() {
+ name = content
+ .get(name_range.clone())
+ .map_or(String::new(), |s| s.to_string());
+ if parsed_name_ranges.contains(&name_range) {
+ continue;
+ }
+ parsed_name_ranges.insert(name_range);
+ } else {
+ name = String::new();
+ }
+
+ collapsed_ranges_within.clear();
+ 'outer: for remaining_match in &matches[(i + 1)..] {
+ for collapsed_range in &remaining_match.collapse_ranges {
+ if item_range.start <= collapsed_range.start
+ && item_range.end >= collapsed_range.end
+ {
+ collapsed_ranges_within.push(collapsed_range.clone());
+ } else {
+ break 'outer;
+ }
+ }
+ }
+
+ collapsed_ranges_within.sort_by_key(|r| (r.start, Reverse(r.end)));
+
+ let mut span_content = String::new();
+ for context_range in &context_match.context_ranges {
+ add_content_from_range(
+ &mut span_content,
+ content,
+ context_range.clone(),
+ context_match.start_col,
+ );
+ span_content.push_str("\n");
+ }
+
+ let mut offset = item_range.start;
+ for collapsed_range in &collapsed_ranges_within {
+ if collapsed_range.start > offset {
+ add_content_from_range(
+ &mut span_content,
+ content,
+ offset..collapsed_range.start,
+ context_match.start_col,
+ );
+ offset = collapsed_range.start;
+ }
+
+ if collapsed_range.end > offset {
+ span_content.push_str(placeholder);
+ offset = collapsed_range.end;
+ }
+ }
+
+ if offset < item_range.end {
+ add_content_from_range(
+ &mut span_content,
+ content,
+ offset..item_range.end,
+ context_match.start_col,
+ );
+ }
+
+ let sha1 = SpanDigest::from(span_content.as_str());
+ spans.push(Span {
+ name,
+ content: span_content,
+ range: item_range.clone(),
+ embedding: None,
+ digest: sha1,
+ token_count: 0,
+ })
+ }
+
+ return Ok(spans);
+ }
+}
+
+pub(crate) fn subtract_ranges(
+ ranges: &[Range<usize>],
+ ranges_to_subtract: &[Range<usize>],
+) -> Vec<Range<usize>> {
+ let mut result = Vec::new();
+
+ let mut ranges_to_subtract = ranges_to_subtract.iter().peekable();
+
+ for range in ranges {
+ let mut offset = range.start;
+
+ while offset < range.end {
+ if let Some(range_to_subtract) = ranges_to_subtract.peek() {
+ if offset < range_to_subtract.start {
+ let next_offset = cmp::min(range_to_subtract.start, range.end);
+ result.push(offset..next_offset);
+ offset = next_offset;
+ } else {
+ let next_offset = cmp::min(range_to_subtract.end, range.end);
+ offset = next_offset;
+ }
+
+ if offset >= range_to_subtract.end {
+ ranges_to_subtract.next();
+ }
+ } else {
+ result.push(offset..range.end);
+ offset = range.end;
+ }
+ }
+ }
+
+ result
+}
+
+fn add_content_from_range(
+ output: &mut String,
+ content: &str,
+ range: Range<usize>,
+ start_col: usize,
+) {
+ for mut line in content.get(range.clone()).unwrap_or("").lines() {
+ for _ in 0..start_col {
+ if line.starts_with(' ') {
+ line = &line[1..];
+ } else {
+ break;
+ }
+ }
+ output.push_str(line);
+ output.push('\n');
+ }
+ output.pop();
+}
@@ -0,0 +1,1280 @@
+mod db;
+mod embedding_queue;
+mod parsing;
+pub mod semantic_index_settings;
+
+#[cfg(test)]
+mod semantic_index_tests;
+
+use crate::semantic_index_settings::SemanticIndexSettings;
+use ai::embedding::{Embedding, EmbeddingProvider};
+use ai::providers::open_ai::OpenAIEmbeddingProvider;
+use anyhow::{anyhow, Context as _, Result};
+use collections::{BTreeMap, HashMap, HashSet};
+use db::VectorDatabase;
+use embedding_queue::{EmbeddingQueue, FileToEmbed};
+use futures::{future, FutureExt, StreamExt};
+use gpui::{
+ AppContext, AsyncAppContext, BorrowWindow, Context, Model, ModelContext, Task, ViewContext,
+ WeakModel,
+};
+use language::{Anchor, Bias, Buffer, Language, LanguageRegistry};
+use lazy_static::lazy_static;
+use ordered_float::OrderedFloat;
+use parking_lot::Mutex;
+use parsing::{CodeContextRetriever, Span, SpanDigest, PARSEABLE_ENTIRE_FILE_TYPES};
+use postage::watch;
+use project::{Fs, PathChange, Project, ProjectEntryId, Worktree, WorktreeId};
+use settings::Settings;
+use smol::channel;
+use std::{
+ cmp::Reverse,
+ env,
+ future::Future,
+ mem,
+ ops::Range,
+ path::{Path, PathBuf},
+ sync::{Arc, Weak},
+ time::{Duration, Instant, SystemTime},
+};
+use util::paths::PathMatcher;
+use util::{channel::RELEASE_CHANNEL_NAME, http::HttpClient, paths::EMBEDDINGS_DIR, ResultExt};
+use workspace::Workspace;
+
+const SEMANTIC_INDEX_VERSION: usize = 11;
+const BACKGROUND_INDEXING_DELAY: Duration = Duration::from_secs(5 * 60);
+const EMBEDDING_QUEUE_FLUSH_TIMEOUT: Duration = Duration::from_millis(250);
+
+lazy_static! {
+ static ref OPENAI_API_KEY: Option<String> = env::var("OPENAI_API_KEY").ok();
+}
+
+pub fn init(
+ fs: Arc<dyn Fs>,
+ http_client: Arc<dyn HttpClient>,
+ language_registry: Arc<LanguageRegistry>,
+ cx: &mut AppContext,
+) {
+ SemanticIndexSettings::register(cx);
+
+ let db_file_path = EMBEDDINGS_DIR
+ .join(Path::new(RELEASE_CHANNEL_NAME.as_str()))
+ .join("embeddings_db");
+
+ cx.observe_new_views(
+ |workspace: &mut Workspace, cx: &mut ViewContext<Workspace>| {
+ let Some(semantic_index) = SemanticIndex::global(cx) else {
+ return;
+ };
+ let project = workspace.project().clone();
+
+ if project.read(cx).is_local() {
+ cx.app_mut()
+ .spawn(|mut cx| async move {
+ let previously_indexed = semantic_index
+ .update(&mut cx, |index, cx| {
+ index.project_previously_indexed(&project, cx)
+ })?
+ .await?;
+ if previously_indexed {
+ semantic_index
+ .update(&mut cx, |index, cx| index.index_project(project, cx))?
+ .await?;
+ }
+ anyhow::Ok(())
+ })
+ .detach_and_log_err(cx);
+ }
+ },
+ )
+ .detach();
+
+ cx.spawn(move |cx| async move {
+ let semantic_index = SemanticIndex::new(
+ fs,
+ db_file_path,
+ Arc::new(OpenAIEmbeddingProvider::new(
+ http_client,
+ cx.background_executor().clone(),
+ )),
+ language_registry,
+ cx.clone(),
+ )
+ .await?;
+
+ cx.update(|cx| cx.set_global(semantic_index.clone()))?;
+
+ anyhow::Ok(())
+ })
+ .detach();
+}
+
+#[derive(Copy, Clone, Debug)]
+pub enum SemanticIndexStatus {
+ NotAuthenticated,
+ NotIndexed,
+ Indexed,
+ Indexing {
+ remaining_files: usize,
+ rate_limit_expiry: Option<Instant>,
+ },
+}
+
+pub struct SemanticIndex {
+ fs: Arc<dyn Fs>,
+ db: VectorDatabase,
+ embedding_provider: Arc<dyn EmbeddingProvider>,
+ language_registry: Arc<LanguageRegistry>,
+ parsing_files_tx: channel::Sender<(Arc<HashMap<SpanDigest, Embedding>>, PendingFile)>,
+ _embedding_task: Task<()>,
+ _parsing_files_tasks: Vec<Task<()>>,
+ projects: HashMap<WeakModel<Project>, ProjectState>,
+}
+
+struct ProjectState {
+ worktrees: HashMap<WorktreeId, WorktreeState>,
+ pending_file_count_rx: watch::Receiver<usize>,
+ pending_file_count_tx: Arc<Mutex<watch::Sender<usize>>>,
+ pending_index: usize,
+ _subscription: gpui::Subscription,
+ _observe_pending_file_count: Task<()>,
+}
+
+enum WorktreeState {
+ Registering(RegisteringWorktreeState),
+ Registered(RegisteredWorktreeState),
+}
+
+impl WorktreeState {
+ fn is_registered(&self) -> bool {
+ matches!(self, Self::Registered(_))
+ }
+
+ fn paths_changed(
+ &mut self,
+ changes: Arc<[(Arc<Path>, ProjectEntryId, PathChange)]>,
+ worktree: &Worktree,
+ ) {
+ let changed_paths = match self {
+ Self::Registering(state) => &mut state.changed_paths,
+ Self::Registered(state) => &mut state.changed_paths,
+ };
+
+ for (path, entry_id, change) in changes.iter() {
+ let Some(entry) = worktree.entry_for_id(*entry_id) else {
+ continue;
+ };
+ if entry.is_ignored || entry.is_symlink || entry.is_external || entry.is_dir() {
+ continue;
+ }
+ changed_paths.insert(
+ path.clone(),
+ ChangedPathInfo {
+ mtime: entry.mtime,
+ is_deleted: *change == PathChange::Removed,
+ },
+ );
+ }
+ }
+}
+
+struct RegisteringWorktreeState {
+ changed_paths: BTreeMap<Arc<Path>, ChangedPathInfo>,
+ done_rx: watch::Receiver<Option<()>>,
+ _registration: Task<()>,
+}
+
+impl RegisteringWorktreeState {
+ fn done(&self) -> impl Future<Output = ()> {
+ let mut done_rx = self.done_rx.clone();
+ async move {
+ while let Some(result) = done_rx.next().await {
+ if result.is_some() {
+ break;
+ }
+ }
+ }
+ }
+}
+
+struct RegisteredWorktreeState {
+ db_id: i64,
+ changed_paths: BTreeMap<Arc<Path>, ChangedPathInfo>,
+}
+
+struct ChangedPathInfo {
+ mtime: SystemTime,
+ is_deleted: bool,
+}
+
+#[derive(Clone)]
+pub struct JobHandle {
+ /// The outer Arc is here to count the clones of a JobHandle instance;
+ /// when the last handle to a given job is dropped, we decrement a counter (just once).
+ tx: Arc<Weak<Mutex<watch::Sender<usize>>>>,
+}
+
+impl JobHandle {
+ fn new(tx: &Arc<Mutex<watch::Sender<usize>>>) -> Self {
+ *tx.lock().borrow_mut() += 1;
+ Self {
+ tx: Arc::new(Arc::downgrade(&tx)),
+ }
+ }
+}
+
+impl ProjectState {
+ fn new(subscription: gpui::Subscription, cx: &mut ModelContext<SemanticIndex>) -> Self {
+ let (pending_file_count_tx, pending_file_count_rx) = watch::channel_with(0);
+ let pending_file_count_tx = Arc::new(Mutex::new(pending_file_count_tx));
+ Self {
+ worktrees: Default::default(),
+ pending_file_count_rx: pending_file_count_rx.clone(),
+ pending_file_count_tx,
+ pending_index: 0,
+ _subscription: subscription,
+ _observe_pending_file_count: cx.spawn({
+ let mut pending_file_count_rx = pending_file_count_rx.clone();
+ |this, mut cx| async move {
+ while let Some(_) = pending_file_count_rx.next().await {
+ if this.update(&mut cx, |_, cx| cx.notify()).is_err() {
+ break;
+ }
+ }
+ }
+ }),
+ }
+ }
+
+ fn worktree_id_for_db_id(&self, id: i64) -> Option<WorktreeId> {
+ self.worktrees
+ .iter()
+ .find_map(|(worktree_id, worktree_state)| match worktree_state {
+ WorktreeState::Registered(state) if state.db_id == id => Some(*worktree_id),
+ _ => None,
+ })
+ }
+}
+
+#[derive(Clone)]
+pub struct PendingFile {
+ worktree_db_id: i64,
+ relative_path: Arc<Path>,
+ absolute_path: PathBuf,
+ language: Option<Arc<Language>>,
+ modified_time: SystemTime,
+ job_handle: JobHandle,
+}
+
+#[derive(Clone)]
+pub struct SearchResult {
+ pub buffer: Model<Buffer>,
+ pub range: Range<Anchor>,
+ pub similarity: OrderedFloat<f32>,
+}
+
+impl SemanticIndex {
+ pub fn global(cx: &mut AppContext) -> Option<Model<SemanticIndex>> {
+ if cx.has_global::<Model<Self>>() {
+ Some(cx.global::<Model<SemanticIndex>>().clone())
+ } else {
+ None
+ }
+ }
+
+ pub fn authenticate(&mut self, cx: &mut AppContext) -> bool {
+ if !self.embedding_provider.has_credentials() {
+ self.embedding_provider.retrieve_credentials(cx);
+ } else {
+ return true;
+ }
+
+ self.embedding_provider.has_credentials()
+ }
+
+ pub fn is_authenticated(&self) -> bool {
+ self.embedding_provider.has_credentials()
+ }
+
+ pub fn enabled(cx: &AppContext) -> bool {
+ SemanticIndexSettings::get_global(cx).enabled
+ }
+
+ pub fn status(&self, project: &Model<Project>) -> SemanticIndexStatus {
+ if !self.is_authenticated() {
+ return SemanticIndexStatus::NotAuthenticated;
+ }
+
+ if let Some(project_state) = self.projects.get(&project.downgrade()) {
+ if project_state
+ .worktrees
+ .values()
+ .all(|worktree| worktree.is_registered())
+ && project_state.pending_index == 0
+ {
+ SemanticIndexStatus::Indexed
+ } else {
+ SemanticIndexStatus::Indexing {
+ remaining_files: project_state.pending_file_count_rx.borrow().clone(),
+ rate_limit_expiry: self.embedding_provider.rate_limit_expiration(),
+ }
+ }
+ } else {
+ SemanticIndexStatus::NotIndexed
+ }
+ }
+
+ pub async fn new(
+ fs: Arc<dyn Fs>,
+ database_path: PathBuf,
+ embedding_provider: Arc<dyn EmbeddingProvider>,
+ language_registry: Arc<LanguageRegistry>,
+ mut cx: AsyncAppContext,
+ ) -> Result<Model<Self>> {
+ let t0 = Instant::now();
+ let database_path = Arc::from(database_path);
+ let db = VectorDatabase::new(fs.clone(), database_path, cx.background_executor().clone())
+ .await?;
+
+ log::trace!(
+ "db initialization took {:?} milliseconds",
+ t0.elapsed().as_millis()
+ );
+
+ cx.build_model(|cx| {
+ let t0 = Instant::now();
+ let embedding_queue =
+ EmbeddingQueue::new(embedding_provider.clone(), cx.background_executor().clone());
+ let _embedding_task = cx.background_executor().spawn({
+ let embedded_files = embedding_queue.finished_files();
+ let db = db.clone();
+ async move {
+ while let Ok(file) = embedded_files.recv().await {
+ db.insert_file(file.worktree_id, file.path, file.mtime, file.spans)
+ .await
+ .log_err();
+ }
+ }
+ });
+
+ // Parse files into embeddable spans.
+ let (parsing_files_tx, parsing_files_rx) =
+ channel::unbounded::<(Arc<HashMap<SpanDigest, Embedding>>, PendingFile)>();
+ let embedding_queue = Arc::new(Mutex::new(embedding_queue));
+ let mut _parsing_files_tasks = Vec::new();
+ for _ in 0..cx.background_executor().num_cpus() {
+ let fs = fs.clone();
+ let mut parsing_files_rx = parsing_files_rx.clone();
+ let embedding_provider = embedding_provider.clone();
+ let embedding_queue = embedding_queue.clone();
+ let background = cx.background_executor().clone();
+ _parsing_files_tasks.push(cx.background_executor().spawn(async move {
+ let mut retriever = CodeContextRetriever::new(embedding_provider.clone());
+ loop {
+ let mut timer = background.timer(EMBEDDING_QUEUE_FLUSH_TIMEOUT).fuse();
+ let mut next_file_to_parse = parsing_files_rx.next().fuse();
+ futures::select_biased! {
+ next_file_to_parse = next_file_to_parse => {
+ if let Some((embeddings_for_digest, pending_file)) = next_file_to_parse {
+ Self::parse_file(
+ &fs,
+ pending_file,
+ &mut retriever,
+ &embedding_queue,
+ &embeddings_for_digest,
+ )
+ .await
+ } else {
+ break;
+ }
+ },
+ _ = timer => {
+ embedding_queue.lock().flush();
+ }
+ }
+ }
+ }));
+ }
+
+ log::trace!(
+ "semantic index task initialization took {:?} milliseconds",
+ t0.elapsed().as_millis()
+ );
+ Self {
+ fs,
+ db,
+ embedding_provider,
+ language_registry,
+ parsing_files_tx,
+ _embedding_task,
+ _parsing_files_tasks,
+ projects: Default::default(),
+ }
+ })
+ }
+
+ async fn parse_file(
+ fs: &Arc<dyn Fs>,
+ pending_file: PendingFile,
+ retriever: &mut CodeContextRetriever,
+ embedding_queue: &Arc<Mutex<EmbeddingQueue>>,
+ embeddings_for_digest: &HashMap<SpanDigest, Embedding>,
+ ) {
+ let Some(language) = pending_file.language else {
+ return;
+ };
+
+ if let Some(content) = fs.load(&pending_file.absolute_path).await.log_err() {
+ if let Some(mut spans) = retriever
+ .parse_file_with_template(Some(&pending_file.relative_path), &content, language)
+ .log_err()
+ {
+ log::trace!(
+ "parsed path {:?}: {} spans",
+ pending_file.relative_path,
+ spans.len()
+ );
+
+ for span in &mut spans {
+ if let Some(embedding) = embeddings_for_digest.get(&span.digest) {
+ span.embedding = Some(embedding.to_owned());
+ }
+ }
+
+ embedding_queue.lock().push(FileToEmbed {
+ worktree_id: pending_file.worktree_db_id,
+ path: pending_file.relative_path,
+ mtime: pending_file.modified_time,
+ job_handle: pending_file.job_handle,
+ spans,
+ });
+ }
+ }
+ }
+
+ pub fn project_previously_indexed(
+ &mut self,
+ project: &Model<Project>,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Result<bool>> {
+ let worktrees_indexed_previously = project
+ .read(cx)
+ .worktrees()
+ .map(|worktree| {
+ self.db
+ .worktree_previously_indexed(&worktree.read(cx).abs_path())
+ })
+ .collect::<Vec<_>>();
+ cx.spawn(|_, _cx| async move {
+ let worktree_indexed_previously =
+ futures::future::join_all(worktrees_indexed_previously).await;
+
+ Ok(worktree_indexed_previously
+ .iter()
+ .filter(|worktree| worktree.is_ok())
+ .all(|v| v.as_ref().log_err().is_some_and(|v| v.to_owned())))
+ })
+ }
+
+ fn project_entries_changed(
+ &mut self,
+ project: Model<Project>,
+ worktree_id: WorktreeId,
+ changes: Arc<[(Arc<Path>, ProjectEntryId, PathChange)]>,
+ cx: &mut ModelContext<Self>,
+ ) {
+ let Some(worktree) = project.read(cx).worktree_for_id(worktree_id.clone(), cx) else {
+ return;
+ };
+ let project = project.downgrade();
+ let Some(project_state) = self.projects.get_mut(&project) else {
+ return;
+ };
+
+ let worktree = worktree.read(cx);
+ let worktree_state =
+ if let Some(worktree_state) = project_state.worktrees.get_mut(&worktree_id) {
+ worktree_state
+ } else {
+ return;
+ };
+ worktree_state.paths_changed(changes, worktree);
+ if let WorktreeState::Registered(_) = worktree_state {
+ cx.spawn(|this, mut cx| async move {
+ cx.background_executor()
+ .timer(BACKGROUND_INDEXING_DELAY)
+ .await;
+ if let Some((this, project)) = this.upgrade().zip(project.upgrade()) {
+ this.update(&mut cx, |this, cx| {
+ this.index_project(project, cx).detach_and_log_err(cx)
+ })?;
+ }
+ anyhow::Ok(())
+ })
+ .detach_and_log_err(cx);
+ }
+ }
+
+ fn register_worktree(
+ &mut self,
+ project: Model<Project>,
+ worktree: Model<Worktree>,
+ cx: &mut ModelContext<Self>,
+ ) {
+ let project = project.downgrade();
+ let project_state = if let Some(project_state) = self.projects.get_mut(&project) {
+ project_state
+ } else {
+ return;
+ };
+ let worktree = if let Some(worktree) = worktree.read(cx).as_local() {
+ worktree
+ } else {
+ return;
+ };
+ let worktree_abs_path = worktree.abs_path().clone();
+ let scan_complete = worktree.scan_complete();
+ let worktree_id = worktree.id();
+ let db = self.db.clone();
+ let language_registry = self.language_registry.clone();
+ let (mut done_tx, done_rx) = watch::channel();
+ let registration = cx.spawn(|this, mut cx| {
+ async move {
+ let register = async {
+ scan_complete.await;
+ let db_id = db.find_or_create_worktree(worktree_abs_path).await?;
+ let mut file_mtimes = db.get_file_mtimes(db_id).await?;
+ let worktree = if let Some(project) = project.upgrade() {
+ project
+ .read_with(&cx, |project, cx| project.worktree_for_id(worktree_id, cx))
+ .ok()
+ .flatten()
+ .context("worktree not found")?
+ } else {
+ return anyhow::Ok(());
+ };
+ let worktree = worktree.read_with(&cx, |worktree, _| worktree.snapshot())?;
+ let mut changed_paths = cx
+ .background_executor()
+ .spawn(async move {
+ let mut changed_paths = BTreeMap::new();
+ for file in worktree.files(false, 0) {
+ let absolute_path = worktree.absolutize(&file.path);
+
+ if file.is_external || file.is_ignored || file.is_symlink {
+ continue;
+ }
+
+ if let Ok(language) = language_registry
+ .language_for_file(&absolute_path, None)
+ .await
+ {
+ // Test if file is valid parseable file
+ if !PARSEABLE_ENTIRE_FILE_TYPES
+ .contains(&language.name().as_ref())
+ && &language.name().as_ref() != &"Markdown"
+ && language
+ .grammar()
+ .and_then(|grammar| grammar.embedding_config.as_ref())
+ .is_none()
+ {
+ continue;
+ }
+
+ let stored_mtime = file_mtimes.remove(&file.path.to_path_buf());
+ let already_stored = stored_mtime
+ .map_or(false, |existing_mtime| {
+ existing_mtime == file.mtime
+ });
+
+ if !already_stored {
+ changed_paths.insert(
+ file.path.clone(),
+ ChangedPathInfo {
+ mtime: file.mtime,
+ is_deleted: false,
+ },
+ );
+ }
+ }
+ }
+
+ // Clean up entries from database that are no longer in the worktree.
+ for (path, mtime) in file_mtimes {
+ changed_paths.insert(
+ path.into(),
+ ChangedPathInfo {
+ mtime,
+ is_deleted: true,
+ },
+ );
+ }
+
+ anyhow::Ok(changed_paths)
+ })
+ .await?;
+ this.update(&mut cx, |this, cx| {
+ let project_state = this
+ .projects
+ .get_mut(&project)
+ .context("project not registered")?;
+ let project = project.upgrade().context("project was dropped")?;
+
+ if let Some(WorktreeState::Registering(state)) =
+ project_state.worktrees.remove(&worktree_id)
+ {
+ changed_paths.extend(state.changed_paths);
+ }
+ project_state.worktrees.insert(
+ worktree_id,
+ WorktreeState::Registered(RegisteredWorktreeState {
+ db_id,
+ changed_paths,
+ }),
+ );
+ this.index_project(project, cx).detach_and_log_err(cx);
+
+ anyhow::Ok(())
+ })??;
+
+ anyhow::Ok(())
+ };
+
+ if register.await.log_err().is_none() {
+ // Stop tracking this worktree if the registration failed.
+ this.update(&mut cx, |this, _| {
+ this.projects.get_mut(&project).map(|project_state| {
+ project_state.worktrees.remove(&worktree_id);
+ });
+ })
+ .ok();
+ }
+
+ *done_tx.borrow_mut() = Some(());
+ }
+ });
+ project_state.worktrees.insert(
+ worktree_id,
+ WorktreeState::Registering(RegisteringWorktreeState {
+ changed_paths: Default::default(),
+ done_rx,
+ _registration: registration,
+ }),
+ );
+ }
+
+ fn project_worktrees_changed(&mut self, project: Model<Project>, cx: &mut ModelContext<Self>) {
+ let project_state = if let Some(project_state) = self.projects.get_mut(&project.downgrade())
+ {
+ project_state
+ } else {
+ return;
+ };
+
+ let mut worktrees = project
+ .read(cx)
+ .worktrees()
+ .filter(|worktree| worktree.read(cx).is_local())
+ .collect::<Vec<_>>();
+ let worktree_ids = worktrees
+ .iter()
+ .map(|worktree| worktree.read(cx).id())
+ .collect::<HashSet<_>>();
+
+ // Remove worktrees that are no longer present
+ project_state
+ .worktrees
+ .retain(|worktree_id, _| worktree_ids.contains(worktree_id));
+
+ // Register new worktrees
+ worktrees.retain(|worktree| {
+ let worktree_id = worktree.read(cx).id();
+ !project_state.worktrees.contains_key(&worktree_id)
+ });
+ for worktree in worktrees {
+ self.register_worktree(project.clone(), worktree, cx);
+ }
+ }
+
+ pub fn pending_file_count(&self, project: &Model<Project>) -> Option<watch::Receiver<usize>> {
+ Some(
+ self.projects
+ .get(&project.downgrade())?
+ .pending_file_count_rx
+ .clone(),
+ )
+ }
+
+ pub fn search_project(
+ &mut self,
+ project: Model<Project>,
+ query: String,
+ limit: usize,
+ includes: Vec<PathMatcher>,
+ excludes: Vec<PathMatcher>,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Result<Vec<SearchResult>>> {
+ if query.is_empty() {
+ return Task::ready(Ok(Vec::new()));
+ }
+
+ let index = self.index_project(project.clone(), cx);
+ let embedding_provider = self.embedding_provider.clone();
+
+ cx.spawn(|this, mut cx| async move {
+ index.await?;
+ let t0 = Instant::now();
+
+ let query = embedding_provider
+ .embed_batch(vec![query])
+ .await?
+ .pop()
+ .context("could not embed query")?;
+ log::trace!("Embedding Search Query: {:?}ms", t0.elapsed().as_millis());
+
+ let search_start = Instant::now();
+ let modified_buffer_results = this.update(&mut cx, |this, cx| {
+ this.search_modified_buffers(
+ &project,
+ query.clone(),
+ limit,
+ &includes,
+ &excludes,
+ cx,
+ )
+ })?;
+ let file_results = this.update(&mut cx, |this, cx| {
+ this.search_files(project, query, limit, includes, excludes, cx)
+ })?;
+ let (modified_buffer_results, file_results) =
+ futures::join!(modified_buffer_results, file_results);
+
+ // Weave together the results from modified buffers and files.
+ let mut results = Vec::new();
+ let mut modified_buffers = HashSet::default();
+ for result in modified_buffer_results.log_err().unwrap_or_default() {
+ modified_buffers.insert(result.buffer.clone());
+ results.push(result);
+ }
+ for result in file_results.log_err().unwrap_or_default() {
+ if !modified_buffers.contains(&result.buffer) {
+ results.push(result);
+ }
+ }
+ results.sort_by_key(|result| Reverse(result.similarity));
+ results.truncate(limit);
+ log::trace!("Semantic search took {:?}", search_start.elapsed());
+ Ok(results)
+ })
+ }
+
+ pub fn search_files(
+ &mut self,
+ project: Model<Project>,
+ query: Embedding,
+ limit: usize,
+ includes: Vec<PathMatcher>,
+ excludes: Vec<PathMatcher>,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Result<Vec<SearchResult>>> {
+ let db_path = self.db.path().clone();
+ let fs = self.fs.clone();
+ cx.spawn(|this, mut cx| async move {
+ let database = VectorDatabase::new(
+ fs.clone(),
+ db_path.clone(),
+ cx.background_executor().clone(),
+ )
+ .await?;
+
+ let worktree_db_ids = this.read_with(&cx, |this, _| {
+ let project_state = this
+ .projects
+ .get(&project.downgrade())
+ .context("project was not indexed")?;
+ let worktree_db_ids = project_state
+ .worktrees
+ .values()
+ .filter_map(|worktree| {
+ if let WorktreeState::Registered(worktree) = worktree {
+ Some(worktree.db_id)
+ } else {
+ None
+ }
+ })
+ .collect::<Vec<i64>>();
+ anyhow::Ok(worktree_db_ids)
+ })??;
+
+ let file_ids = database
+ .retrieve_included_file_ids(&worktree_db_ids, &includes, &excludes)
+ .await?;
+
+ let batch_n = cx.background_executor().num_cpus();
+ let ids_len = file_ids.clone().len();
+ let minimum_batch_size = 50;
+
+ let batch_size = {
+ let size = ids_len / batch_n;
+ if size < minimum_batch_size {
+ minimum_batch_size
+ } else {
+ size
+ }
+ };
+
+ let mut batch_results = Vec::new();
+ for batch in file_ids.chunks(batch_size) {
+ let batch = batch.into_iter().map(|v| *v).collect::<Vec<i64>>();
+ let limit = limit.clone();
+ let fs = fs.clone();
+ let db_path = db_path.clone();
+ let query = query.clone();
+ if let Some(db) =
+ VectorDatabase::new(fs, db_path.clone(), cx.background_executor().clone())
+ .await
+ .log_err()
+ {
+ batch_results.push(async move {
+ db.top_k_search(&query, limit, batch.as_slice()).await
+ });
+ }
+ }
+
+ let batch_results = futures::future::join_all(batch_results).await;
+
+ let mut results = Vec::new();
+ for batch_result in batch_results {
+ if batch_result.is_ok() {
+ for (id, similarity) in batch_result.unwrap() {
+ let ix = match results
+ .binary_search_by_key(&Reverse(similarity), |(_, s)| Reverse(*s))
+ {
+ Ok(ix) => ix,
+ Err(ix) => ix,
+ };
+
+ results.insert(ix, (id, similarity));
+ results.truncate(limit);
+ }
+ }
+ }
+
+ let ids = results.iter().map(|(id, _)| *id).collect::<Vec<i64>>();
+ let scores = results
+ .into_iter()
+ .map(|(_, score)| score)
+ .collect::<Vec<_>>();
+ let spans = database.spans_for_ids(ids.as_slice()).await?;
+
+ let mut tasks = Vec::new();
+ let mut ranges = Vec::new();
+ let weak_project = project.downgrade();
+ project.update(&mut cx, |project, cx| {
+ let this = this.upgrade().context("index was dropped")?;
+ for (worktree_db_id, file_path, byte_range) in spans {
+ let project_state =
+ if let Some(state) = this.read(cx).projects.get(&weak_project) {
+ state
+ } else {
+ return Err(anyhow!("project not added"));
+ };
+ if let Some(worktree_id) = project_state.worktree_id_for_db_id(worktree_db_id) {
+ tasks.push(project.open_buffer((worktree_id, file_path), cx));
+ ranges.push(byte_range);
+ }
+ }
+
+ Ok(())
+ })??;
+
+ let buffers = futures::future::join_all(tasks).await;
+ Ok(buffers
+ .into_iter()
+ .zip(ranges)
+ .zip(scores)
+ .filter_map(|((buffer, range), similarity)| {
+ let buffer = buffer.log_err()?;
+ let range = buffer
+ .read_with(&cx, |buffer, _| {
+ let start = buffer.clip_offset(range.start, Bias::Left);
+ let end = buffer.clip_offset(range.end, Bias::Right);
+ buffer.anchor_before(start)..buffer.anchor_after(end)
+ })
+ .log_err()?;
+ Some(SearchResult {
+ buffer,
+ range,
+ similarity,
+ })
+ })
+ .collect())
+ })
+ }
+
+ fn search_modified_buffers(
+ &self,
+ project: &Model<Project>,
+ query: Embedding,
+ limit: usize,
+ includes: &[PathMatcher],
+ excludes: &[PathMatcher],
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Result<Vec<SearchResult>>> {
+ let modified_buffers = project
+ .read(cx)
+ .opened_buffers()
+ .into_iter()
+ .filter_map(|buffer_handle| {
+ let buffer = buffer_handle.read(cx);
+ let snapshot = buffer.snapshot();
+ let excluded = snapshot.resolve_file_path(cx, false).map_or(false, |path| {
+ excludes.iter().any(|matcher| matcher.is_match(&path))
+ });
+
+ let included = if includes.len() == 0 {
+ true
+ } else {
+ snapshot.resolve_file_path(cx, false).map_or(false, |path| {
+ includes.iter().any(|matcher| matcher.is_match(&path))
+ })
+ };
+
+ if buffer.is_dirty() && !excluded && included {
+ Some((buffer_handle, snapshot))
+ } else {
+ None
+ }
+ })
+ .collect::<HashMap<_, _>>();
+
+ let embedding_provider = self.embedding_provider.clone();
+ let fs = self.fs.clone();
+ let db_path = self.db.path().clone();
+ let background = cx.background_executor().clone();
+ cx.background_executor().spawn(async move {
+ let db = VectorDatabase::new(fs, db_path.clone(), background).await?;
+ let mut results = Vec::<SearchResult>::new();
+
+ let mut retriever = CodeContextRetriever::new(embedding_provider.clone());
+ for (buffer, snapshot) in modified_buffers {
+ let language = snapshot
+ .language_at(0)
+ .cloned()
+ .unwrap_or_else(|| language::PLAIN_TEXT.clone());
+ let mut spans = retriever
+ .parse_file_with_template(None, &snapshot.text(), language)
+ .log_err()
+ .unwrap_or_default();
+ if Self::embed_spans(&mut spans, embedding_provider.as_ref(), &db)
+ .await
+ .log_err()
+ .is_some()
+ {
+ for span in spans {
+ let similarity = span.embedding.unwrap().similarity(&query);
+ let ix = match results
+ .binary_search_by_key(&Reverse(similarity), |result| {
+ Reverse(result.similarity)
+ }) {
+ Ok(ix) => ix,
+ Err(ix) => ix,
+ };
+
+ let range = {
+ let start = snapshot.clip_offset(span.range.start, Bias::Left);
+ let end = snapshot.clip_offset(span.range.end, Bias::Right);
+ snapshot.anchor_before(start)..snapshot.anchor_after(end)
+ };
+
+ results.insert(
+ ix,
+ SearchResult {
+ buffer: buffer.clone(),
+ range,
+ similarity,
+ },
+ );
+ results.truncate(limit);
+ }
+ }
+ }
+
+ Ok(results)
+ })
+ }
+
+ pub fn index_project(
+ &mut self,
+ project: Model<Project>,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Result<()>> {
+ if !self.is_authenticated() {
+ if !self.authenticate(cx) {
+ return Task::ready(Err(anyhow!("user is not authenticated")));
+ }
+ }
+
+ if !self.projects.contains_key(&project.downgrade()) {
+ let subscription = cx.subscribe(&project, |this, project, event, cx| match event {
+ project::Event::WorktreeAdded | project::Event::WorktreeRemoved(_) => {
+ this.project_worktrees_changed(project.clone(), cx);
+ }
+ project::Event::WorktreeUpdatedEntries(worktree_id, changes) => {
+ this.project_entries_changed(project, *worktree_id, changes.clone(), cx);
+ }
+ _ => {}
+ });
+ let project_state = ProjectState::new(subscription, cx);
+ self.projects.insert(project.downgrade(), project_state);
+ self.project_worktrees_changed(project.clone(), cx);
+ }
+ let project_state = self.projects.get_mut(&project.downgrade()).unwrap();
+ project_state.pending_index += 1;
+ cx.notify();
+
+ let mut pending_file_count_rx = project_state.pending_file_count_rx.clone();
+ let db = self.db.clone();
+ let language_registry = self.language_registry.clone();
+ let parsing_files_tx = self.parsing_files_tx.clone();
+ let worktree_registration = self.wait_for_worktree_registration(&project, cx);
+
+ cx.spawn(|this, mut cx| async move {
+ worktree_registration.await?;
+
+ let mut pending_files = Vec::new();
+ let mut files_to_delete = Vec::new();
+ this.update(&mut cx, |this, cx| {
+ let project_state = this
+ .projects
+ .get_mut(&project.downgrade())
+ .context("project was dropped")?;
+ let pending_file_count_tx = &project_state.pending_file_count_tx;
+
+ project_state
+ .worktrees
+ .retain(|worktree_id, worktree_state| {
+ let worktree = if let Some(worktree) =
+ project.read(cx).worktree_for_id(*worktree_id, cx)
+ {
+ worktree
+ } else {
+ return false;
+ };
+ let worktree_state =
+ if let WorktreeState::Registered(worktree_state) = worktree_state {
+ worktree_state
+ } else {
+ return true;
+ };
+
+ worktree_state.changed_paths.retain(|path, info| {
+ if info.is_deleted {
+ files_to_delete.push((worktree_state.db_id, path.clone()));
+ } else {
+ let absolute_path = worktree.read(cx).absolutize(path);
+ let job_handle = JobHandle::new(pending_file_count_tx);
+ pending_files.push(PendingFile {
+ absolute_path,
+ relative_path: path.clone(),
+ language: None,
+ job_handle,
+ modified_time: info.mtime,
+ worktree_db_id: worktree_state.db_id,
+ });
+ }
+
+ false
+ });
+ true
+ });
+
+ anyhow::Ok(())
+ })??;
+
+ cx.background_executor()
+ .spawn(async move {
+ for (worktree_db_id, path) in files_to_delete {
+ db.delete_file(worktree_db_id, path).await.log_err();
+ }
+
+ let embeddings_for_digest = {
+ let mut files = HashMap::default();
+ for pending_file in &pending_files {
+ files
+ .entry(pending_file.worktree_db_id)
+ .or_insert(Vec::new())
+ .push(pending_file.relative_path.clone());
+ }
+ Arc::new(
+ db.embeddings_for_files(files)
+ .await
+ .log_err()
+ .unwrap_or_default(),
+ )
+ };
+
+ for mut pending_file in pending_files {
+ if let Ok(language) = language_registry
+ .language_for_file(&pending_file.relative_path, None)
+ .await
+ {
+ if !PARSEABLE_ENTIRE_FILE_TYPES.contains(&language.name().as_ref())
+ && &language.name().as_ref() != &"Markdown"
+ && language
+ .grammar()
+ .and_then(|grammar| grammar.embedding_config.as_ref())
+ .is_none()
+ {
+ continue;
+ }
+ pending_file.language = Some(language);
+ }
+ parsing_files_tx
+ .try_send((embeddings_for_digest.clone(), pending_file))
+ .ok();
+ }
+
+ // Wait until we're done indexing.
+ while let Some(count) = pending_file_count_rx.next().await {
+ if count == 0 {
+ break;
+ }
+ }
+ })
+ .await;
+
+ this.update(&mut cx, |this, cx| {
+ let project_state = this
+ .projects
+ .get_mut(&project.downgrade())
+ .context("project was dropped")?;
+ project_state.pending_index -= 1;
+ cx.notify();
+ anyhow::Ok(())
+ })??;
+
+ Ok(())
+ })
+ }
+
+ fn wait_for_worktree_registration(
+ &self,
+ project: &Model<Project>,
+ cx: &mut ModelContext<Self>,
+ ) -> Task<Result<()>> {
+ let project = project.downgrade();
+ cx.spawn(|this, cx| async move {
+ loop {
+ let mut pending_worktrees = Vec::new();
+ this.upgrade()
+ .context("semantic index dropped")?
+ .read_with(&cx, |this, _| {
+ if let Some(project) = this.projects.get(&project) {
+ for worktree in project.worktrees.values() {
+ if let WorktreeState::Registering(worktree) = worktree {
+ pending_worktrees.push(worktree.done());
+ }
+ }
+ }
+ })?;
+
+ if pending_worktrees.is_empty() {
+ break;
+ } else {
+ future::join_all(pending_worktrees).await;
+ }
+ }
+ Ok(())
+ })
+ }
+
+ async fn embed_spans(
+ spans: &mut [Span],
+ embedding_provider: &dyn EmbeddingProvider,
+ db: &VectorDatabase,
+ ) -> Result<()> {
+ let mut batch = Vec::new();
+ let mut batch_tokens = 0;
+ let mut embeddings = Vec::new();
+
+ let digests = spans
+ .iter()
+ .map(|span| span.digest.clone())
+ .collect::<Vec<_>>();
+ let embeddings_for_digests = db
+ .embeddings_for_digests(digests)
+ .await
+ .log_err()
+ .unwrap_or_default();
+
+ for span in &*spans {
+ if embeddings_for_digests.contains_key(&span.digest) {
+ continue;
+ };
+
+ if batch_tokens + span.token_count > embedding_provider.max_tokens_per_batch() {
+ let batch_embeddings = embedding_provider
+ .embed_batch(mem::take(&mut batch))
+ .await?;
+ embeddings.extend(batch_embeddings);
+ batch_tokens = 0;
+ }
+
+ batch_tokens += span.token_count;
+ batch.push(span.content.clone());
+ }
+
+ if !batch.is_empty() {
+ let batch_embeddings = embedding_provider
+ .embed_batch(mem::take(&mut batch))
+ .await?;
+
+ embeddings.extend(batch_embeddings);
+ }
+
+ let mut embeddings = embeddings.into_iter();
+ for span in spans {
+ let embedding = if let Some(embedding) = embeddings_for_digests.get(&span.digest) {
+ Some(embedding.clone())
+ } else {
+ embeddings.next()
+ };
+ let embedding = embedding.context("failed to embed spans")?;
+ span.embedding = Some(embedding);
+ }
+ Ok(())
+ }
+}
+
+impl Drop for JobHandle {
+ fn drop(&mut self) {
+ if let Some(inner) = Arc::get_mut(&mut self.tx) {
+ // This is the last instance of the JobHandle (regardless of it's origin - whether it was cloned or not)
+ if let Some(tx) = inner.upgrade() {
+ let mut tx = tx.lock();
+ *tx.borrow_mut() -= 1;
+ }
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+
+ use super::*;
+ #[test]
+ fn test_job_handle() {
+ let (job_count_tx, job_count_rx) = watch::channel_with(0);
+ let tx = Arc::new(Mutex::new(job_count_tx));
+ let job_handle = JobHandle::new(&tx);
+
+ assert_eq!(1, *job_count_rx.borrow());
+ let new_job_handle = job_handle.clone();
+ assert_eq!(1, *job_count_rx.borrow());
+ drop(job_handle);
+ assert_eq!(1, *job_count_rx.borrow());
+ drop(new_job_handle);
+ assert_eq!(0, *job_count_rx.borrow());
+ }
+}
@@ -0,0 +1,28 @@
+use anyhow;
+use schemars::JsonSchema;
+use serde::{Deserialize, Serialize};
+use settings::Settings;
+
+#[derive(Deserialize, Debug)]
+pub struct SemanticIndexSettings {
+ pub enabled: bool,
+}
+
+#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)]
+pub struct SemanticIndexSettingsContent {
+ pub enabled: Option<bool>,
+}
+
+impl Settings for SemanticIndexSettings {
+ const KEY: Option<&'static str> = Some("semantic_index");
+
+ type FileContent = SemanticIndexSettingsContent;
+
+ fn load(
+ default_value: &Self::FileContent,
+ user_values: &[&Self::FileContent],
+ _: &mut gpui::AppContext,
+ ) -> anyhow::Result<Self> {
+ Self::load_via_json_merge(default_value, user_values)
+ }
+}
@@ -0,0 +1,1697 @@
+use crate::{
+ embedding_queue::EmbeddingQueue,
+ parsing::{subtract_ranges, CodeContextRetriever, Span, SpanDigest},
+ semantic_index_settings::SemanticIndexSettings,
+ FileToEmbed, JobHandle, SearchResult, SemanticIndex, EMBEDDING_QUEUE_FLUSH_TIMEOUT,
+};
+use ai::test::FakeEmbeddingProvider;
+
+use gpui::{Task, TestAppContext};
+use language::{Language, LanguageConfig, LanguageRegistry, ToOffset};
+use parking_lot::Mutex;
+use pretty_assertions::assert_eq;
+use project::{project_settings::ProjectSettings, FakeFs, Fs, Project};
+use rand::{rngs::StdRng, Rng};
+use serde_json::json;
+use settings::{Settings, SettingsStore};
+use std::{path::Path, sync::Arc, time::SystemTime};
+use unindent::Unindent;
+use util::{paths::PathMatcher, RandomCharIter};
+
+#[ctor::ctor]
+fn init_logger() {
+ if std::env::var("RUST_LOG").is_ok() {
+ env_logger::init();
+ }
+}
+
+#[gpui::test]
+async fn test_semantic_index(cx: &mut TestAppContext) {
+ init_test(cx);
+
+ let fs = FakeFs::new(cx.background_executor.clone());
+ fs.insert_tree(
+ "/the-root",
+ json!({
+ "src": {
+ "file1.rs": "
+ fn aaa() {
+ println!(\"aaaaaaaaaaaa!\");
+ }
+
+ fn zzzzz() {
+ println!(\"SLEEPING\");
+ }
+ ".unindent(),
+ "file2.rs": "
+ fn bbb() {
+ println!(\"bbbbbbbbbbbbb!\");
+ }
+ struct pqpqpqp {}
+ ".unindent(),
+ "file3.toml": "
+ ZZZZZZZZZZZZZZZZZZ = 5
+ ".unindent(),
+ }
+ }),
+ )
+ .await;
+
+ let languages = Arc::new(LanguageRegistry::new(Task::ready(())));
+ let rust_language = rust_lang();
+ let toml_language = toml_lang();
+ languages.add(rust_language);
+ languages.add(toml_language);
+
+ let db_dir = tempdir::TempDir::new("vector-store").unwrap();
+ let db_path = db_dir.path().join("db.sqlite");
+
+ let embedding_provider = Arc::new(FakeEmbeddingProvider::default());
+ let semantic_index = SemanticIndex::new(
+ fs.clone(),
+ db_path,
+ embedding_provider.clone(),
+ languages,
+ cx.to_async(),
+ )
+ .await
+ .unwrap();
+
+ let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
+
+ let search_results = semantic_index.update(cx, |store, cx| {
+ store.search_project(
+ project.clone(),
+ "aaaaaabbbbzz".to_string(),
+ 5,
+ vec![],
+ vec![],
+ cx,
+ )
+ });
+ let pending_file_count =
+ semantic_index.read_with(cx, |index, _| index.pending_file_count(&project).unwrap());
+ cx.background_executor.run_until_parked();
+ assert_eq!(*pending_file_count.borrow(), 3);
+ cx.background_executor
+ .advance_clock(EMBEDDING_QUEUE_FLUSH_TIMEOUT);
+ assert_eq!(*pending_file_count.borrow(), 0);
+
+ let search_results = search_results.await.unwrap();
+ assert_search_results(
+ &search_results,
+ &[
+ (Path::new("src/file1.rs").into(), 0),
+ (Path::new("src/file2.rs").into(), 0),
+ (Path::new("src/file3.toml").into(), 0),
+ (Path::new("src/file1.rs").into(), 45),
+ (Path::new("src/file2.rs").into(), 45),
+ ],
+ cx,
+ );
+
+ // Test Include Files Functonality
+ let include_files = vec![PathMatcher::new("*.rs").unwrap()];
+ let exclude_files = vec![PathMatcher::new("*.rs").unwrap()];
+ let rust_only_search_results = semantic_index
+ .update(cx, |store, cx| {
+ store.search_project(
+ project.clone(),
+ "aaaaaabbbbzz".to_string(),
+ 5,
+ include_files,
+ vec![],
+ cx,
+ )
+ })
+ .await
+ .unwrap();
+
+ assert_search_results(
+ &rust_only_search_results,
+ &[
+ (Path::new("src/file1.rs").into(), 0),
+ (Path::new("src/file2.rs").into(), 0),
+ (Path::new("src/file1.rs").into(), 45),
+ (Path::new("src/file2.rs").into(), 45),
+ ],
+ cx,
+ );
+
+ let no_rust_search_results = semantic_index
+ .update(cx, |store, cx| {
+ store.search_project(
+ project.clone(),
+ "aaaaaabbbbzz".to_string(),
+ 5,
+ vec![],
+ exclude_files,
+ cx,
+ )
+ })
+ .await
+ .unwrap();
+
+ assert_search_results(
+ &no_rust_search_results,
+ &[(Path::new("src/file3.toml").into(), 0)],
+ cx,
+ );
+
+ fs.save(
+ "/the-root/src/file2.rs".as_ref(),
+ &"
+ fn dddd() { println!(\"ddddd!\"); }
+ struct pqpqpqp {}
+ "
+ .unindent()
+ .into(),
+ Default::default(),
+ )
+ .await
+ .unwrap();
+
+ cx.background_executor
+ .advance_clock(EMBEDDING_QUEUE_FLUSH_TIMEOUT);
+
+ let prev_embedding_count = embedding_provider.embedding_count();
+ let index = semantic_index.update(cx, |store, cx| store.index_project(project.clone(), cx));
+ cx.background_executor.run_until_parked();
+ assert_eq!(*pending_file_count.borrow(), 1);
+ cx.background_executor
+ .advance_clock(EMBEDDING_QUEUE_FLUSH_TIMEOUT);
+ assert_eq!(*pending_file_count.borrow(), 0);
+ index.await.unwrap();
+
+ assert_eq!(
+ embedding_provider.embedding_count() - prev_embedding_count,
+ 1
+ );
+}
+
+#[gpui::test(iterations = 10)]
+async fn test_embedding_batching(cx: &mut TestAppContext, mut rng: StdRng) {
+ let (outstanding_job_count, _) = postage::watch::channel_with(0);
+ let outstanding_job_count = Arc::new(Mutex::new(outstanding_job_count));
+
+ let files = (1..=3)
+ .map(|file_ix| FileToEmbed {
+ worktree_id: 5,
+ path: Path::new(&format!("path-{file_ix}")).into(),
+ mtime: SystemTime::now(),
+ spans: (0..rng.gen_range(4..22))
+ .map(|document_ix| {
+ let content_len = rng.gen_range(10..100);
+ let content = RandomCharIter::new(&mut rng)
+ .with_simple_text()
+ .take(content_len)
+ .collect::<String>();
+ let digest = SpanDigest::from(content.as_str());
+ Span {
+ range: 0..10,
+ embedding: None,
+ name: format!("document {document_ix}"),
+ content,
+ digest,
+ token_count: rng.gen_range(10..30),
+ }
+ })
+ .collect(),
+ job_handle: JobHandle::new(&outstanding_job_count),
+ })
+ .collect::<Vec<_>>();
+
+ let embedding_provider = Arc::new(FakeEmbeddingProvider::default());
+
+ let mut queue = EmbeddingQueue::new(embedding_provider.clone(), cx.background_executor.clone());
+ for file in &files {
+ queue.push(file.clone());
+ }
+ queue.flush();
+
+ cx.background_executor.run_until_parked();
+ let finished_files = queue.finished_files();
+ let mut embedded_files: Vec<_> = files
+ .iter()
+ .map(|_| finished_files.try_recv().expect("no finished file"))
+ .collect();
+
+ let expected_files: Vec<_> = files
+ .iter()
+ .map(|file| {
+ let mut file = file.clone();
+ for doc in &mut file.spans {
+ doc.embedding = Some(embedding_provider.embed_sync(doc.content.as_ref()));
+ }
+ file
+ })
+ .collect();
+
+ embedded_files.sort_by_key(|f| f.path.clone());
+
+ assert_eq!(embedded_files, expected_files);
+}
+
+#[track_caller]
+fn assert_search_results(
+ actual: &[SearchResult],
+ expected: &[(Arc<Path>, usize)],
+ cx: &TestAppContext,
+) {
+ let actual = actual
+ .iter()
+ .map(|search_result| {
+ search_result.buffer.read_with(cx, |buffer, _cx| {
+ (
+ buffer.file().unwrap().path().clone(),
+ search_result.range.start.to_offset(buffer),
+ )
+ })
+ })
+ .collect::<Vec<_>>();
+ assert_eq!(actual, expected);
+}
+
+#[gpui::test]
+async fn test_code_context_retrieval_rust() {
+ let language = rust_lang();
+ let embedding_provider = Arc::new(FakeEmbeddingProvider::default());
+ let mut retriever = CodeContextRetriever::new(embedding_provider);
+
+ let text = "
+ /// A doc comment
+ /// that spans multiple lines
+ #[gpui::test]
+ fn a() {
+ b
+ }
+
+ impl C for D {
+ }
+
+ impl E {
+ // This is also a preceding comment
+ pub fn function_1() -> Option<()> {
+ unimplemented!();
+ }
+
+ // This is a preceding comment
+ fn function_2() -> Result<()> {
+ unimplemented!();
+ }
+ }
+
+ #[derive(Clone)]
+ struct D {
+ name: String
+ }
+ "
+ .unindent();
+
+ let documents = retriever.parse_file(&text, language).unwrap();
+
+ assert_documents_eq(
+ &documents,
+ &[
+ (
+ "
+ /// A doc comment
+ /// that spans multiple lines
+ #[gpui::test]
+ fn a() {
+ b
+ }"
+ .unindent(),
+ text.find("fn a").unwrap(),
+ ),
+ (
+ "
+ impl C for D {
+ }"
+ .unindent(),
+ text.find("impl C").unwrap(),
+ ),
+ (
+ "
+ impl E {
+ // This is also a preceding comment
+ pub fn function_1() -> Option<()> { /* ... */ }
+
+ // This is a preceding comment
+ fn function_2() -> Result<()> { /* ... */ }
+ }"
+ .unindent(),
+ text.find("impl E").unwrap(),
+ ),
+ (
+ "
+ // This is also a preceding comment
+ pub fn function_1() -> Option<()> {
+ unimplemented!();
+ }"
+ .unindent(),
+ text.find("pub fn function_1").unwrap(),
+ ),
+ (
+ "
+ // This is a preceding comment
+ fn function_2() -> Result<()> {
+ unimplemented!();
+ }"
+ .unindent(),
+ text.find("fn function_2").unwrap(),
+ ),
+ (
+ "
+ #[derive(Clone)]
+ struct D {
+ name: String
+ }"
+ .unindent(),
+ text.find("struct D").unwrap(),
+ ),
+ ],
+ );
+}
+
+#[gpui::test]
+async fn test_code_context_retrieval_json() {
+ let language = json_lang();
+ let embedding_provider = Arc::new(FakeEmbeddingProvider::default());
+ let mut retriever = CodeContextRetriever::new(embedding_provider);
+
+ let text = r#"
+ {
+ "array": [1, 2, 3, 4],
+ "string": "abcdefg",
+ "nested_object": {
+ "array_2": [5, 6, 7, 8],
+ "string_2": "hijklmnop",
+ "boolean": true,
+ "none": null
+ }
+ }
+ "#
+ .unindent();
+
+ let documents = retriever.parse_file(&text, language.clone()).unwrap();
+
+ assert_documents_eq(
+ &documents,
+ &[(
+ r#"
+ {
+ "array": [],
+ "string": "",
+ "nested_object": {
+ "array_2": [],
+ "string_2": "",
+ "boolean": true,
+ "none": null
+ }
+ }"#
+ .unindent(),
+ text.find("{").unwrap(),
+ )],
+ );
+
+ let text = r#"
+ [
+ {
+ "name": "somebody",
+ "age": 42
+ },
+ {
+ "name": "somebody else",
+ "age": 43
+ }
+ ]
+ "#
+ .unindent();
+
+ let documents = retriever.parse_file(&text, language.clone()).unwrap();
+
+ assert_documents_eq(
+ &documents,
+ &[(
+ r#"
+ [{
+ "name": "",
+ "age": 42
+ }]"#
+ .unindent(),
+ text.find("[").unwrap(),
+ )],
+ );
+}
+
+fn assert_documents_eq(
+ documents: &[Span],
+ expected_contents_and_start_offsets: &[(String, usize)],
+) {
+ assert_eq!(
+ documents
+ .iter()
+ .map(|document| (document.content.clone(), document.range.start))
+ .collect::<Vec<_>>(),
+ expected_contents_and_start_offsets
+ );
+}
+
+#[gpui::test]
+async fn test_code_context_retrieval_javascript() {
+ let language = js_lang();
+ let embedding_provider = Arc::new(FakeEmbeddingProvider::default());
+ let mut retriever = CodeContextRetriever::new(embedding_provider);
+
+ let text = "
+ /* globals importScripts, backend */
+ function _authorize() {}
+
+ /**
+ * Sometimes the frontend build is way faster than backend.
+ */
+ export async function authorizeBank() {
+ _authorize(pushModal, upgradingAccountId, {});
+ }
+
+ export class SettingsPage {
+ /* This is a test setting */
+ constructor(page) {
+ this.page = page;
+ }
+ }
+
+ /* This is a test comment */
+ class TestClass {}
+
+ /* Schema for editor_events in Clickhouse. */
+ export interface ClickhouseEditorEvent {
+ installation_id: string
+ operation: string
+ }
+ "
+ .unindent();
+
+ let documents = retriever.parse_file(&text, language.clone()).unwrap();
+
+ assert_documents_eq(
+ &documents,
+ &[
+ (
+ "
+ /* globals importScripts, backend */
+ function _authorize() {}"
+ .unindent(),
+ 37,
+ ),
+ (
+ "
+ /**
+ * Sometimes the frontend build is way faster than backend.
+ */
+ export async function authorizeBank() {
+ _authorize(pushModal, upgradingAccountId, {});
+ }"
+ .unindent(),
+ 131,
+ ),
+ (
+ "
+ export class SettingsPage {
+ /* This is a test setting */
+ constructor(page) {
+ this.page = page;
+ }
+ }"
+ .unindent(),
+ 225,
+ ),
+ (
+ "
+ /* This is a test setting */
+ constructor(page) {
+ this.page = page;
+ }"
+ .unindent(),
+ 290,
+ ),
+ (
+ "
+ /* This is a test comment */
+ class TestClass {}"
+ .unindent(),
+ 374,
+ ),
+ (
+ "
+ /* Schema for editor_events in Clickhouse. */
+ export interface ClickhouseEditorEvent {
+ installation_id: string
+ operation: string
+ }"
+ .unindent(),
+ 440,
+ ),
+ ],
+ )
+}
+
+#[gpui::test]
+async fn test_code_context_retrieval_lua() {
+ let language = lua_lang();
+ let embedding_provider = Arc::new(FakeEmbeddingProvider::default());
+ let mut retriever = CodeContextRetriever::new(embedding_provider);
+
+ let text = r#"
+ -- Creates a new class
+ -- @param baseclass The Baseclass of this class, or nil.
+ -- @return A new class reference.
+ function classes.class(baseclass)
+ -- Create the class definition and metatable.
+ local classdef = {}
+ -- Find the super class, either Object or user-defined.
+ baseclass = baseclass or classes.Object
+ -- If this class definition does not know of a function, it will 'look up' to the Baseclass via the __index of the metatable.
+ setmetatable(classdef, { __index = baseclass })
+ -- All class instances have a reference to the class object.
+ classdef.class = classdef
+ --- Recursivly allocates the inheritance tree of the instance.
+ -- @param mastertable The 'root' of the inheritance tree.
+ -- @return Returns the instance with the allocated inheritance tree.
+ function classdef.alloc(mastertable)
+ -- All class instances have a reference to a superclass object.
+ local instance = { super = baseclass.alloc(mastertable) }
+ -- Any functions this instance does not know of will 'look up' to the superclass definition.
+ setmetatable(instance, { __index = classdef, __newindex = mastertable })
+ return instance
+ end
+ end
+ "#.unindent();
+
+ let documents = retriever.parse_file(&text, language.clone()).unwrap();
+
+ assert_documents_eq(
+ &documents,
+ &[
+ (r#"
+ -- Creates a new class
+ -- @param baseclass The Baseclass of this class, or nil.
+ -- @return A new class reference.
+ function classes.class(baseclass)
+ -- Create the class definition and metatable.
+ local classdef = {}
+ -- Find the super class, either Object or user-defined.
+ baseclass = baseclass or classes.Object
+ -- If this class definition does not know of a function, it will 'look up' to the Baseclass via the __index of the metatable.
+ setmetatable(classdef, { __index = baseclass })
+ -- All class instances have a reference to the class object.
+ classdef.class = classdef
+ --- Recursivly allocates the inheritance tree of the instance.
+ -- @param mastertable The 'root' of the inheritance tree.
+ -- @return Returns the instance with the allocated inheritance tree.
+ function classdef.alloc(mastertable)
+ --[ ... ]--
+ --[ ... ]--
+ end
+ end"#.unindent(),
+ 114),
+ (r#"
+ --- Recursivly allocates the inheritance tree of the instance.
+ -- @param mastertable The 'root' of the inheritance tree.
+ -- @return Returns the instance with the allocated inheritance tree.
+ function classdef.alloc(mastertable)
+ -- All class instances have a reference to a superclass object.
+ local instance = { super = baseclass.alloc(mastertable) }
+ -- Any functions this instance does not know of will 'look up' to the superclass definition.
+ setmetatable(instance, { __index = classdef, __newindex = mastertable })
+ return instance
+ end"#.unindent(), 809),
+ ]
+ );
+}
+
+#[gpui::test]
+async fn test_code_context_retrieval_elixir() {
+ let language = elixir_lang();
+ let embedding_provider = Arc::new(FakeEmbeddingProvider::default());
+ let mut retriever = CodeContextRetriever::new(embedding_provider);
+
+ let text = r#"
+ defmodule File.Stream do
+ @moduledoc """
+ Defines a `File.Stream` struct returned by `File.stream!/3`.
+
+ The following fields are public:
+
+ * `path` - the file path
+ * `modes` - the file modes
+ * `raw` - a boolean indicating if bin functions should be used
+ * `line_or_bytes` - if reading should read lines or a given number of bytes
+ * `node` - the node the file belongs to
+
+ """
+
+ defstruct path: nil, modes: [], line_or_bytes: :line, raw: true, node: nil
+
+ @type t :: %__MODULE__{}
+
+ @doc false
+ def __build__(path, modes, line_or_bytes) do
+ raw = :lists.keyfind(:encoding, 1, modes) == false
+
+ modes =
+ case raw do
+ true ->
+ case :lists.keyfind(:read_ahead, 1, modes) do
+ {:read_ahead, false} -> [:raw | :lists.keydelete(:read_ahead, 1, modes)]
+ {:read_ahead, _} -> [:raw | modes]
+ false -> [:raw, :read_ahead | modes]
+ end
+
+ false ->
+ modes
+ end
+
+ %File.Stream{path: path, modes: modes, raw: raw, line_or_bytes: line_or_bytes, node: node()}
+
+ end"#
+ .unindent();
+
+ let documents = retriever.parse_file(&text, language.clone()).unwrap();
+
+ assert_documents_eq(
+ &documents,
+ &[(
+ r#"
+ defmodule File.Stream do
+ @moduledoc """
+ Defines a `File.Stream` struct returned by `File.stream!/3`.
+
+ The following fields are public:
+
+ * `path` - the file path
+ * `modes` - the file modes
+ * `raw` - a boolean indicating if bin functions should be used
+ * `line_or_bytes` - if reading should read lines or a given number of bytes
+ * `node` - the node the file belongs to
+
+ """
+
+ defstruct path: nil, modes: [], line_or_bytes: :line, raw: true, node: nil
+
+ @type t :: %__MODULE__{}
+
+ @doc false
+ def __build__(path, modes, line_or_bytes) do
+ raw = :lists.keyfind(:encoding, 1, modes) == false
+
+ modes =
+ case raw do
+ true ->
+ case :lists.keyfind(:read_ahead, 1, modes) do
+ {:read_ahead, false} -> [:raw | :lists.keydelete(:read_ahead, 1, modes)]
+ {:read_ahead, _} -> [:raw | modes]
+ false -> [:raw, :read_ahead | modes]
+ end
+
+ false ->
+ modes
+ end
+
+ %File.Stream{path: path, modes: modes, raw: raw, line_or_bytes: line_or_bytes, node: node()}
+
+ end"#
+ .unindent(),
+ 0,
+ ),(r#"
+ @doc false
+ def __build__(path, modes, line_or_bytes) do
+ raw = :lists.keyfind(:encoding, 1, modes) == false
+
+ modes =
+ case raw do
+ true ->
+ case :lists.keyfind(:read_ahead, 1, modes) do
+ {:read_ahead, false} -> [:raw | :lists.keydelete(:read_ahead, 1, modes)]
+ {:read_ahead, _} -> [:raw | modes]
+ false -> [:raw, :read_ahead | modes]
+ end
+
+ false ->
+ modes
+ end
+
+ %File.Stream{path: path, modes: modes, raw: raw, line_or_bytes: line_or_bytes, node: node()}
+
+ end"#.unindent(), 574)],
+ );
+}
+
+#[gpui::test]
+async fn test_code_context_retrieval_cpp() {
+ let language = cpp_lang();
+ let embedding_provider = Arc::new(FakeEmbeddingProvider::default());
+ let mut retriever = CodeContextRetriever::new(embedding_provider);
+
+ let text = "
+ /**
+ * @brief Main function
+ * @returns 0 on exit
+ */
+ int main() { return 0; }
+
+ /**
+ * This is a test comment
+ */
+ class MyClass { // The class
+ public: // Access specifier
+ int myNum; // Attribute (int variable)
+ string myString; // Attribute (string variable)
+ };
+
+ // This is a test comment
+ enum Color { red, green, blue };
+
+ /** This is a preceding block comment
+ * This is the second line
+ */
+ struct { // Structure declaration
+ int myNum; // Member (int variable)
+ string myString; // Member (string variable)
+ } myStructure;
+
+ /**
+ * @brief Matrix class.
+ */
+ template <typename T,
+ typename = typename std::enable_if<
+ std::is_integral<T>::value || std::is_floating_point<T>::value,
+ bool>::type>
+ class Matrix2 {
+ std::vector<std::vector<T>> _mat;
+
+ public:
+ /**
+ * @brief Constructor
+ * @tparam Integer ensuring integers are being evaluated and not other
+ * data types.
+ * @param size denoting the size of Matrix as size x size
+ */
+ template <typename Integer,
+ typename = typename std::enable_if<std::is_integral<Integer>::value,
+ Integer>::type>
+ explicit Matrix(const Integer size) {
+ for (size_t i = 0; i < size; ++i) {
+ _mat.emplace_back(std::vector<T>(size, 0));
+ }
+ }
+ }"
+ .unindent();
+
+ let documents = retriever.parse_file(&text, language.clone()).unwrap();
+
+ assert_documents_eq(
+ &documents,
+ &[
+ (
+ "
+ /**
+ * @brief Main function
+ * @returns 0 on exit
+ */
+ int main() { return 0; }"
+ .unindent(),
+ 54,
+ ),
+ (
+ "
+ /**
+ * This is a test comment
+ */
+ class MyClass { // The class
+ public: // Access specifier
+ int myNum; // Attribute (int variable)
+ string myString; // Attribute (string variable)
+ }"
+ .unindent(),
+ 112,
+ ),
+ (
+ "
+ // This is a test comment
+ enum Color { red, green, blue }"
+ .unindent(),
+ 322,
+ ),
+ (
+ "
+ /** This is a preceding block comment
+ * This is the second line
+ */
+ struct { // Structure declaration
+ int myNum; // Member (int variable)
+ string myString; // Member (string variable)
+ } myStructure;"
+ .unindent(),
+ 425,
+ ),
+ (
+ "
+ /**
+ * @brief Matrix class.
+ */
+ template <typename T,
+ typename = typename std::enable_if<
+ std::is_integral<T>::value || std::is_floating_point<T>::value,
+ bool>::type>
+ class Matrix2 {
+ std::vector<std::vector<T>> _mat;
+
+ public:
+ /**
+ * @brief Constructor
+ * @tparam Integer ensuring integers are being evaluated and not other
+ * data types.
+ * @param size denoting the size of Matrix as size x size
+ */
+ template <typename Integer,
+ typename = typename std::enable_if<std::is_integral<Integer>::value,
+ Integer>::type>
+ explicit Matrix(const Integer size) {
+ for (size_t i = 0; i < size; ++i) {
+ _mat.emplace_back(std::vector<T>(size, 0));
+ }
+ }
+ }"
+ .unindent(),
+ 612,
+ ),
+ (
+ "
+ explicit Matrix(const Integer size) {
+ for (size_t i = 0; i < size; ++i) {
+ _mat.emplace_back(std::vector<T>(size, 0));
+ }
+ }"
+ .unindent(),
+ 1226,
+ ),
+ ],
+ );
+}
+
+#[gpui::test]
+async fn test_code_context_retrieval_ruby() {
+ let language = ruby_lang();
+ let embedding_provider = Arc::new(FakeEmbeddingProvider::default());
+ let mut retriever = CodeContextRetriever::new(embedding_provider);
+
+ let text = r#"
+ # This concern is inspired by "sudo mode" on GitHub. It
+ # is a way to re-authenticate a user before allowing them
+ # to see or perform an action.
+ #
+ # Add `before_action :require_challenge!` to actions you
+ # want to protect.
+ #
+ # The user will be shown a page to enter the challenge (which
+ # is either the password, or just the username when no
+ # password exists). Upon passing, there is a grace period
+ # during which no challenge will be asked from the user.
+ #
+ # Accessing challenge-protected resources during the grace
+ # period will refresh the grace period.
+ module ChallengableConcern
+ extend ActiveSupport::Concern
+
+ CHALLENGE_TIMEOUT = 1.hour.freeze
+
+ def require_challenge!
+ return if skip_challenge?
+
+ if challenge_passed_recently?
+ session[:challenge_passed_at] = Time.now.utc
+ return
+ end
+
+ @challenge = Form::Challenge.new(return_to: request.url)
+
+ if params.key?(:form_challenge)
+ if challenge_passed?
+ session[:challenge_passed_at] = Time.now.utc
+ else
+ flash.now[:alert] = I18n.t('challenge.invalid_password')
+ render_challenge
+ end
+ else
+ render_challenge
+ end
+ end
+
+ def challenge_passed?
+ current_user.valid_password?(challenge_params[:current_password])
+ end
+ end
+
+ class Animal
+ include Comparable
+
+ attr_reader :legs
+
+ def initialize(name, legs)
+ @name, @legs = name, legs
+ end
+
+ def <=>(other)
+ legs <=> other.legs
+ end
+ end
+
+ # Singleton method for car object
+ def car.wheels
+ puts "There are four wheels"
+ end"#
+ .unindent();
+
+ let documents = retriever.parse_file(&text, language.clone()).unwrap();
+
+ assert_documents_eq(
+ &documents,
+ &[
+ (
+ r#"
+ # This concern is inspired by "sudo mode" on GitHub. It
+ # is a way to re-authenticate a user before allowing them
+ # to see or perform an action.
+ #
+ # Add `before_action :require_challenge!` to actions you
+ # want to protect.
+ #
+ # The user will be shown a page to enter the challenge (which
+ # is either the password, or just the username when no
+ # password exists). Upon passing, there is a grace period
+ # during which no challenge will be asked from the user.
+ #
+ # Accessing challenge-protected resources during the grace
+ # period will refresh the grace period.
+ module ChallengableConcern
+ extend ActiveSupport::Concern
+
+ CHALLENGE_TIMEOUT = 1.hour.freeze
+
+ def require_challenge!
+ # ...
+ end
+
+ def challenge_passed?
+ # ...
+ end
+ end"#
+ .unindent(),
+ 558,
+ ),
+ (
+ r#"
+ def require_challenge!
+ return if skip_challenge?
+
+ if challenge_passed_recently?
+ session[:challenge_passed_at] = Time.now.utc
+ return
+ end
+
+ @challenge = Form::Challenge.new(return_to: request.url)
+
+ if params.key?(:form_challenge)
+ if challenge_passed?
+ session[:challenge_passed_at] = Time.now.utc
+ else
+ flash.now[:alert] = I18n.t('challenge.invalid_password')
+ render_challenge
+ end
+ else
+ render_challenge
+ end
+ end"#
+ .unindent(),
+ 663,
+ ),
+ (
+ r#"
+ def challenge_passed?
+ current_user.valid_password?(challenge_params[:current_password])
+ end"#
+ .unindent(),
+ 1254,
+ ),
+ (
+ r#"
+ class Animal
+ include Comparable
+
+ attr_reader :legs
+
+ def initialize(name, legs)
+ # ...
+ end
+
+ def <=>(other)
+ # ...
+ end
+ end"#
+ .unindent(),
+ 1363,
+ ),
+ (
+ r#"
+ def initialize(name, legs)
+ @name, @legs = name, legs
+ end"#
+ .unindent(),
+ 1427,
+ ),
+ (
+ r#"
+ def <=>(other)
+ legs <=> other.legs
+ end"#
+ .unindent(),
+ 1501,
+ ),
+ (
+ r#"
+ # Singleton method for car object
+ def car.wheels
+ puts "There are four wheels"
+ end"#
+ .unindent(),
+ 1591,
+ ),
+ ],
+ );
+}
+
+#[gpui::test]
+async fn test_code_context_retrieval_php() {
+ let language = php_lang();
+ let embedding_provider = Arc::new(FakeEmbeddingProvider::default());
+ let mut retriever = CodeContextRetriever::new(embedding_provider);
+
+ let text = r#"
+ <?php
+
+ namespace LevelUp\Experience\Concerns;
+
+ /*
+ This is a multiple-lines comment block
+ that spans over multiple
+ lines
+ */
+ function functionName() {
+ echo "Hello world!";
+ }
+
+ trait HasAchievements
+ {
+ /**
+ * @throws \Exception
+ */
+ public function grantAchievement(Achievement $achievement, $progress = null): void
+ {
+ if ($progress > 100) {
+ throw new Exception(message: 'Progress cannot be greater than 100');
+ }
+
+ if ($this->achievements()->find($achievement->id)) {
+ throw new Exception(message: 'User already has this Achievement');
+ }
+
+ $this->achievements()->attach($achievement, [
+ 'progress' => $progress ?? null,
+ ]);
+
+ $this->when(value: ($progress === null) || ($progress === 100), callback: fn (): ?array => event(new AchievementAwarded(achievement: $achievement, user: $this)));
+ }
+
+ public function achievements(): BelongsToMany
+ {
+ return $this->belongsToMany(related: Achievement::class)
+ ->withPivot(columns: 'progress')
+ ->where('is_secret', false)
+ ->using(AchievementUser::class);
+ }
+ }
+
+ interface Multiplier
+ {
+ public function qualifies(array $data): bool;
+
+ public function setMultiplier(): int;
+ }
+
+ enum AuditType: string
+ {
+ case Add = 'add';
+ case Remove = 'remove';
+ case Reset = 'reset';
+ case LevelUp = 'level_up';
+ }
+
+ ?>"#
+ .unindent();
+
+ let documents = retriever.parse_file(&text, language.clone()).unwrap();
+
+ assert_documents_eq(
+ &documents,
+ &[
+ (
+ r#"
+ /*
+ This is a multiple-lines comment block
+ that spans over multiple
+ lines
+ */
+ function functionName() {
+ echo "Hello world!";
+ }"#
+ .unindent(),
+ 123,
+ ),
+ (
+ r#"
+ trait HasAchievements
+ {
+ /**
+ * @throws \Exception
+ */
+ public function grantAchievement(Achievement $achievement, $progress = null): void
+ {/* ... */}
+
+ public function achievements(): BelongsToMany
+ {/* ... */}
+ }"#
+ .unindent(),
+ 177,
+ ),
+ (r#"
+ /**
+ * @throws \Exception
+ */
+ public function grantAchievement(Achievement $achievement, $progress = null): void
+ {
+ if ($progress > 100) {
+ throw new Exception(message: 'Progress cannot be greater than 100');
+ }
+
+ if ($this->achievements()->find($achievement->id)) {
+ throw new Exception(message: 'User already has this Achievement');
+ }
+
+ $this->achievements()->attach($achievement, [
+ 'progress' => $progress ?? null,
+ ]);
+
+ $this->when(value: ($progress === null) || ($progress === 100), callback: fn (): ?array => event(new AchievementAwarded(achievement: $achievement, user: $this)));
+ }"#.unindent(), 245),
+ (r#"
+ public function achievements(): BelongsToMany
+ {
+ return $this->belongsToMany(related: Achievement::class)
+ ->withPivot(columns: 'progress')
+ ->where('is_secret', false)
+ ->using(AchievementUser::class);
+ }"#.unindent(), 902),
+ (r#"
+ interface Multiplier
+ {
+ public function qualifies(array $data): bool;
+
+ public function setMultiplier(): int;
+ }"#.unindent(),
+ 1146),
+ (r#"
+ enum AuditType: string
+ {
+ case Add = 'add';
+ case Remove = 'remove';
+ case Reset = 'reset';
+ case LevelUp = 'level_up';
+ }"#.unindent(), 1265)
+ ],
+ );
+}
+
+fn js_lang() -> Arc<Language> {
+ Arc::new(
+ Language::new(
+ LanguageConfig {
+ name: "Javascript".into(),
+ path_suffixes: vec!["js".into()],
+ ..Default::default()
+ },
+ Some(tree_sitter_typescript::language_tsx()),
+ )
+ .with_embedding_query(
+ &r#"
+
+ (
+ (comment)* @context
+ .
+ [
+ (export_statement
+ (function_declaration
+ "async"? @name
+ "function" @name
+ name: (_) @name))
+ (function_declaration
+ "async"? @name
+ "function" @name
+ name: (_) @name)
+ ] @item
+ )
+
+ (
+ (comment)* @context
+ .
+ [
+ (export_statement
+ (class_declaration
+ "class" @name
+ name: (_) @name))
+ (class_declaration
+ "class" @name
+ name: (_) @name)
+ ] @item
+ )
+
+ (
+ (comment)* @context
+ .
+ [
+ (export_statement
+ (interface_declaration
+ "interface" @name
+ name: (_) @name))
+ (interface_declaration
+ "interface" @name
+ name: (_) @name)
+ ] @item
+ )
+
+ (
+ (comment)* @context
+ .
+ [
+ (export_statement
+ (enum_declaration
+ "enum" @name
+ name: (_) @name))
+ (enum_declaration
+ "enum" @name
+ name: (_) @name)
+ ] @item
+ )
+
+ (
+ (comment)* @context
+ .
+ (method_definition
+ [
+ "get"
+ "set"
+ "async"
+ "*"
+ "static"
+ ]* @name
+ name: (_) @name) @item
+ )
+
+ "#
+ .unindent(),
+ )
+ .unwrap(),
+ )
+}
+
+fn rust_lang() -> Arc<Language> {
+ Arc::new(
+ Language::new(
+ LanguageConfig {
+ name: "Rust".into(),
+ path_suffixes: vec!["rs".into()],
+ collapsed_placeholder: " /* ... */ ".to_string(),
+ ..Default::default()
+ },
+ Some(tree_sitter_rust::language()),
+ )
+ .with_embedding_query(
+ r#"
+ (
+ [(line_comment) (attribute_item)]* @context
+ .
+ [
+ (struct_item
+ name: (_) @name)
+
+ (enum_item
+ name: (_) @name)
+
+ (impl_item
+ trait: (_)? @name
+ "for"? @name
+ type: (_) @name)
+
+ (trait_item
+ name: (_) @name)
+
+ (function_item
+ name: (_) @name
+ body: (block
+ "{" @keep
+ "}" @keep) @collapse)
+
+ (macro_definition
+ name: (_) @name)
+ ] @item
+ )
+
+ (attribute_item) @collapse
+ (use_declaration) @collapse
+ "#,
+ )
+ .unwrap(),
+ )
+}
+
+fn json_lang() -> Arc<Language> {
+ Arc::new(
+ Language::new(
+ LanguageConfig {
+ name: "JSON".into(),
+ path_suffixes: vec!["json".into()],
+ ..Default::default()
+ },
+ Some(tree_sitter_json::language()),
+ )
+ .with_embedding_query(
+ r#"
+ (document) @item
+
+ (array
+ "[" @keep
+ .
+ (object)? @keep
+ "]" @keep) @collapse
+
+ (pair value: (string
+ "\"" @keep
+ "\"" @keep) @collapse)
+ "#,
+ )
+ .unwrap(),
+ )
+}
+
+fn toml_lang() -> Arc<Language> {
+ Arc::new(Language::new(
+ LanguageConfig {
+ name: "TOML".into(),
+ path_suffixes: vec!["toml".into()],
+ ..Default::default()
+ },
+ Some(tree_sitter_toml::language()),
+ ))
+}
+
+fn cpp_lang() -> Arc<Language> {
+ Arc::new(
+ Language::new(
+ LanguageConfig {
+ name: "CPP".into(),
+ path_suffixes: vec!["cpp".into()],
+ ..Default::default()
+ },
+ Some(tree_sitter_cpp::language()),
+ )
+ .with_embedding_query(
+ r#"
+ (
+ (comment)* @context
+ .
+ (function_definition
+ (type_qualifier)? @name
+ type: (_)? @name
+ declarator: [
+ (function_declarator
+ declarator: (_) @name)
+ (pointer_declarator
+ "*" @name
+ declarator: (function_declarator
+ declarator: (_) @name))
+ (pointer_declarator
+ "*" @name
+ declarator: (pointer_declarator
+ "*" @name
+ declarator: (function_declarator
+ declarator: (_) @name)))
+ (reference_declarator
+ ["&" "&&"] @name
+ (function_declarator
+ declarator: (_) @name))
+ ]
+ (type_qualifier)? @name) @item
+ )
+
+ (
+ (comment)* @context
+ .
+ (template_declaration
+ (class_specifier
+ "class" @name
+ name: (_) @name)
+ ) @item
+ )
+
+ (
+ (comment)* @context
+ .
+ (class_specifier
+ "class" @name
+ name: (_) @name) @item
+ )
+
+ (
+ (comment)* @context
+ .
+ (enum_specifier
+ "enum" @name
+ name: (_) @name) @item
+ )
+
+ (
+ (comment)* @context
+ .
+ (declaration
+ type: (struct_specifier
+ "struct" @name)
+ declarator: (_) @name) @item
+ )
+
+ "#,
+ )
+ .unwrap(),
+ )
+}
+
+fn lua_lang() -> Arc<Language> {
+ Arc::new(
+ Language::new(
+ LanguageConfig {
+ name: "Lua".into(),
+ path_suffixes: vec!["lua".into()],
+ collapsed_placeholder: "--[ ... ]--".to_string(),
+ ..Default::default()
+ },
+ Some(tree_sitter_lua::language()),
+ )
+ .with_embedding_query(
+ r#"
+ (
+ (comment)* @context
+ .
+ (function_declaration
+ "function" @name
+ name: (_) @name
+ (comment)* @collapse
+ body: (block) @collapse
+ ) @item
+ )
+ "#,
+ )
+ .unwrap(),
+ )
+}
+
+fn php_lang() -> Arc<Language> {
+ Arc::new(
+ Language::new(
+ LanguageConfig {
+ name: "PHP".into(),
+ path_suffixes: vec!["php".into()],
+ collapsed_placeholder: "/* ... */".into(),
+ ..Default::default()
+ },
+ Some(tree_sitter_php::language()),
+ )
+ .with_embedding_query(
+ r#"
+ (
+ (comment)* @context
+ .
+ [
+ (function_definition
+ "function" @name
+ name: (_) @name
+ body: (_
+ "{" @keep
+ "}" @keep) @collapse
+ )
+
+ (trait_declaration
+ "trait" @name
+ name: (_) @name)
+
+ (method_declaration
+ "function" @name
+ name: (_) @name
+ body: (_
+ "{" @keep
+ "}" @keep) @collapse
+ )
+
+ (interface_declaration
+ "interface" @name
+ name: (_) @name
+ )
+
+ (enum_declaration
+ "enum" @name
+ name: (_) @name
+ )
+
+ ] @item
+ )
+ "#,
+ )
+ .unwrap(),
+ )
+}
+
+fn ruby_lang() -> Arc<Language> {
+ Arc::new(
+ Language::new(
+ LanguageConfig {
+ name: "Ruby".into(),
+ path_suffixes: vec!["rb".into()],
+ collapsed_placeholder: "# ...".to_string(),
+ ..Default::default()
+ },
+ Some(tree_sitter_ruby::language()),
+ )
+ .with_embedding_query(
+ r#"
+ (
+ (comment)* @context
+ .
+ [
+ (module
+ "module" @name
+ name: (_) @name)
+ (method
+ "def" @name
+ name: (_) @name
+ body: (body_statement) @collapse)
+ (class
+ "class" @name
+ name: (_) @name)
+ (singleton_method
+ "def" @name
+ object: (_) @name
+ "." @name
+ name: (_) @name
+ body: (body_statement) @collapse)
+ ] @item
+ )
+ "#,
+ )
+ .unwrap(),
+ )
+}
+
+fn elixir_lang() -> Arc<Language> {
+ Arc::new(
+ Language::new(
+ LanguageConfig {
+ name: "Elixir".into(),
+ path_suffixes: vec!["rs".into()],
+ ..Default::default()
+ },
+ Some(tree_sitter_elixir::language()),
+ )
+ .with_embedding_query(
+ r#"
+ (
+ (unary_operator
+ operator: "@"
+ operand: (call
+ target: (identifier) @unary
+ (#match? @unary "^(doc)$"))
+ ) @context
+ .
+ (call
+ target: (identifier) @name
+ (arguments
+ [
+ (identifier) @name
+ (call
+ target: (identifier) @name)
+ (binary_operator
+ left: (call
+ target: (identifier) @name)
+ operator: "when")
+ ])
+ (#any-match? @name "^(def|defp|defdelegate|defguard|defguardp|defmacro|defmacrop|defn|defnp)$")) @item
+ )
+
+ (call
+ target: (identifier) @name
+ (arguments (alias) @name)
+ (#any-match? @name "^(defmodule|defprotocol)$")) @item
+ "#,
+ )
+ .unwrap(),
+ )
+}
+
+#[gpui::test]
+fn test_subtract_ranges() {
+ // collapsed_ranges: Vec<Range<usize>>, keep_ranges: Vec<Range<usize>>
+
+ assert_eq!(
+ subtract_ranges(&[0..5, 10..21], &[0..1, 4..5]),
+ vec![1..4, 10..21]
+ );
+
+ assert_eq!(subtract_ranges(&[0..5], &[1..2]), &[0..1, 2..5]);
+}
+
+fn init_test(cx: &mut TestAppContext) {
+ cx.update(|cx| {
+ let settings_store = SettingsStore::test(cx);
+ cx.set_global(settings_store);
+ SemanticIndexSettings::register(cx);
+ ProjectSettings::register(cx);
+ });
+}
@@ -1132,6 +1132,7 @@ mod tests {
})
})
.await
+ .unwrap()
.unwrap();
(wt, entry)
@@ -299,11 +299,8 @@ impl TerminalView {
cx: &mut ViewContext<Self>,
) {
self.context_menu = Some(ContextMenu::build(cx, |menu, cx| {
- menu.action("Clear", Box::new(Clear), cx).action(
- "Close",
- Box::new(CloseActiveItem { save_intent: None }),
- cx,
- )
+ menu.action("Clear", Box::new(Clear))
+ .action("Close", Box::new(CloseActiveItem { save_intent: None }))
}));
dbg!(&position);
// todo!()
@@ -1173,6 +1170,7 @@ mod tests {
})
})
.await
+ .unwrap()
.unwrap();
(wt, entry)
@@ -2,14 +2,14 @@ use feature_flags::FeatureFlagAppExt;
use fs::Fs;
use fuzzy::{match_strings, StringMatch, StringMatchCandidate};
use gpui::{
- actions, AppContext, DismissEvent, EventEmitter, FocusableView, Render, SharedString, View,
- ViewContext, VisualContext, WeakView,
+ actions, AppContext, DismissEvent, Div, EventEmitter, FocusableView, Render, SharedString,
+ View, ViewContext, VisualContext, WeakView,
};
use picker::{Picker, PickerDelegate};
use settings::{update_settings_file, SettingsStore};
use std::sync::Arc;
use theme::{Theme, ThemeRegistry, ThemeSettings};
-use ui::{prelude::*, ListItem};
+use ui::{prelude::*, v_stack, ListItem};
use util::ResultExt;
use workspace::{ui::HighlightedLabel, Workspace};
@@ -65,10 +65,10 @@ impl FocusableView for ThemeSelector {
}
impl Render for ThemeSelector {
- type Element = View<Picker<ThemeSelectorDelegate>>;
+ type Element = Div;
fn render(&mut self, _cx: &mut ViewContext<Self>) -> Self::Element {
- self.picker.clone()
+ v_stack().min_w_96().child(self.picker.clone())
}
}
@@ -98,7 +98,7 @@ impl ThemeSelectorDelegate {
let original_theme = cx.theme().clone();
let staff_mode = cx.is_staff();
- let registry = cx.global::<Arc<ThemeRegistry>>();
+ let registry = cx.global::<ThemeRegistry>();
let theme_names = registry.list(staff_mode).collect::<Vec<_>>();
//todo!(theme sorting)
// theme_names.sort_unstable_by(|a, b| a.is_light.cmp(&b.is_light).then(a.name.cmp(&b.name)));
@@ -126,7 +126,7 @@ impl ThemeSelectorDelegate {
fn show_selected_theme(&mut self, cx: &mut ViewContext<Picker<ThemeSelectorDelegate>>) {
if let Some(mat) = self.matches.get(self.selected_index) {
- let registry = cx.global::<Arc<ThemeRegistry>>();
+ let registry = cx.global::<ThemeRegistry>();
match registry.get(&mat.string) {
Ok(theme) => {
Self::set_theme(theme, cx);
@@ -7,7 +7,7 @@ use gpui::{
IntoElement, Render, View, VisualContext,
};
use menu::{SelectFirst, SelectLast, SelectNext, SelectPrev};
-use std::rc::Rc;
+use std::{rc::Rc, time::Duration};
pub enum ContextMenuItem {
Separator,
@@ -16,7 +16,7 @@ pub enum ContextMenuItem {
label: SharedString,
icon: Option<Icon>,
handler: Rc<dyn Fn(&mut WindowContext)>,
- key_binding: Option<KeyBinding>,
+ action: Option<Box<dyn Action>>,
},
}
@@ -24,6 +24,7 @@ pub struct ContextMenu {
items: Vec<ContextMenuItem>,
focus_handle: FocusHandle,
selected_index: Option<usize>,
+ delayed: bool,
}
impl FocusableView for ContextMenu {
@@ -46,6 +47,7 @@ impl ContextMenu {
items: Default::default(),
focus_handle: cx.focus_handle(),
selected_index: None,
+ delayed: false,
},
cx,
)
@@ -70,36 +72,26 @@ impl ContextMenu {
self.items.push(ContextMenuItem::Entry {
label: label.into(),
handler: Rc::new(on_click),
- key_binding: None,
icon: None,
+ action: None,
});
self
}
- pub fn action(
- mut self,
- label: impl Into<SharedString>,
- action: Box<dyn Action>,
- cx: &mut WindowContext,
- ) -> Self {
+ pub fn action(mut self, label: impl Into<SharedString>, action: Box<dyn Action>) -> Self {
self.items.push(ContextMenuItem::Entry {
label: label.into(),
- key_binding: KeyBinding::for_action(&*action, cx),
+ action: Some(action.boxed_clone()),
handler: Rc::new(move |cx| cx.dispatch_action(action.boxed_clone())),
icon: None,
});
self
}
- pub fn link(
- mut self,
- label: impl Into<SharedString>,
- action: Box<dyn Action>,
- cx: &mut WindowContext,
- ) -> Self {
+ pub fn link(mut self, label: impl Into<SharedString>, action: Box<dyn Action>) -> Self {
self.items.push(ContextMenuItem::Entry {
label: label.into(),
- key_binding: KeyBinding::for_action(&*action, cx),
+ action: Some(action.boxed_clone()),
handler: Rc::new(move |cx| cx.dispatch_action(action.boxed_clone())),
icon: Some(Icon::Link),
});
@@ -161,6 +153,37 @@ impl ContextMenu {
self.select_last(&Default::default(), cx);
}
}
+
+ pub fn on_action_dispatch(&mut self, dispatched: &Box<dyn Action>, cx: &mut ViewContext<Self>) {
+ if let Some(ix) = self.items.iter().position(|item| {
+ if let ContextMenuItem::Entry {
+ action: Some(action),
+ ..
+ } = item
+ {
+ action.partial_eq(&**dispatched)
+ } else {
+ false
+ }
+ }) {
+ self.selected_index = Some(ix);
+ self.delayed = true;
+ cx.notify();
+ let action = dispatched.boxed_clone();
+ cx.spawn(|this, mut cx| async move {
+ cx.background_executor()
+ .timer(Duration::from_millis(50))
+ .await;
+ this.update(&mut cx, |this, cx| {
+ cx.dispatch_action(action);
+ this.cancel(&Default::default(), cx)
+ })
+ })
+ .detach_and_log_err(cx);
+ } else {
+ cx.propagate()
+ }
+ }
}
impl ContextMenuItem {
@@ -185,6 +208,22 @@ impl Render for ContextMenu {
.on_action(cx.listener(ContextMenu::select_prev))
.on_action(cx.listener(ContextMenu::confirm))
.on_action(cx.listener(ContextMenu::cancel))
+ .when(!self.delayed, |mut el| {
+ for item in self.items.iter() {
+ if let ContextMenuItem::Entry {
+ action: Some(action),
+ ..
+ } = item
+ {
+ el = el.on_boxed_action(
+ action,
+ cx.listener(ContextMenu::on_action_dispatch),
+ );
+ }
+ }
+ el
+ })
+ .on_blur(cx.listener(|this, _, cx| this.cancel(&Default::default(), cx)))
.flex_none()
.child(
List::new().children(self.items.iter().enumerate().map(
@@ -196,8 +235,8 @@ impl Render for ContextMenu {
ContextMenuItem::Entry {
label,
handler,
- key_binding,
icon,
+ action,
} => {
let handler = handler.clone();
let dismiss = cx.listener(|_, _, cx| cx.emit(DismissEvent));
@@ -218,11 +257,10 @@ impl Render for ContextMenu {
.w_full()
.justify_between()
.child(label_element)
- .children(
- key_binding
- .clone()
- .map(|binding| div().ml_1().child(binding)),
- ),
+ .children(action.as_ref().and_then(|action| {
+ KeyBinding::for_action(&**action, cx)
+ .map(|binding| div().ml_1().child(binding))
+ })),
)
.selected(Some(ix) == self.selected_index)
.on_click(move |event, cx| {
@@ -1,5 +1,5 @@
use crate::{h_stack, prelude::*, Icon, IconElement, IconSize};
-use gpui::{relative, rems, Action, Div, IntoElement, Keystroke};
+use gpui::{relative, rems, Action, Div, FocusHandle, IntoElement, Keystroke};
#[derive(IntoElement, Clone)]
pub struct KeyBinding {
@@ -49,12 +49,21 @@ impl RenderOnce for KeyBinding {
impl KeyBinding {
pub fn for_action(action: &dyn Action, cx: &mut WindowContext) -> Option<Self> {
- // todo! this last is arbitrary, we want to prefer users key bindings over defaults,
- // and vim over normal (in vim mode), etc.
let key_binding = cx.bindings_for_action(action).last().cloned()?;
Some(Self::new(key_binding))
}
+ // like for_action(), but lets you specify the context from which keybindings
+ // are matched.
+ pub fn for_action_in(
+ action: &dyn Action,
+ focus: &FocusHandle,
+ cx: &mut WindowContext,
+ ) -> Option<Self> {
+ let key_binding = cx.bindings_for_action_in(action, focus).last().cloned()?;
+ Some(Self::new(key_binding))
+ }
+
fn icon_for_key(keystroke: &Keystroke) -> Option<Icon> {
let mut icon: Option<Icon> = None;
@@ -1,7 +1,8 @@
use std::rc::Rc;
use gpui::{
- px, AnyElement, ClickEvent, Div, ImageSource, MouseButton, MouseDownEvent, Pixels, Stateful,
+ px, AnyElement, AnyView, ClickEvent, Div, ImageSource, MouseButton, MouseDownEvent, Pixels,
+ Stateful,
};
use smallvec::SmallVec;
@@ -21,6 +22,7 @@ pub struct ListItem {
inset: bool,
on_click: Option<Rc<dyn Fn(&ClickEvent, &mut WindowContext) + 'static>>,
on_toggle: Option<Rc<dyn Fn(&ClickEvent, &mut WindowContext) + 'static>>,
+ tooltip: Option<Box<dyn Fn(&mut WindowContext) -> AnyView + 'static>>,
on_secondary_mouse_down: Option<Rc<dyn Fn(&MouseDownEvent, &mut WindowContext) + 'static>>,
children: SmallVec<[AnyElement; 2]>,
}
@@ -38,6 +40,7 @@ impl ListItem {
on_click: None,
on_secondary_mouse_down: None,
on_toggle: None,
+ tooltip: None,
children: SmallVec::new(),
}
}
@@ -55,6 +58,11 @@ impl ListItem {
self
}
+ pub fn tooltip(mut self, tooltip: impl Fn(&mut WindowContext) -> AnyView + 'static) -> Self {
+ self.tooltip = Some(Box::new(tooltip));
+ self
+ }
+
pub fn inset(mut self, inset: bool) -> Self {
self.inset = inset;
self
@@ -149,6 +157,7 @@ impl RenderOnce for ListItem {
(on_mouse_down)(event, cx)
})
})
+ .when_some(self.tooltip, |this, tooltip| this.tooltip(tooltip))
.child(
div()
.when(self.inset, |this| this.px_2())
@@ -219,9 +219,11 @@ impl PathMatcher {
}
pub fn is_match<P: AsRef<Path>>(&self, other: P) -> bool {
- other.as_ref().starts_with(&self.maybe_path)
- || self.glob.is_match(&other)
- || self.check_with_end_separator(other.as_ref())
+ let other_path = other.as_ref();
+ other_path.starts_with(&self.maybe_path)
+ || other_path.ends_with(&self.maybe_path)
+ || self.glob.is_match(other_path)
+ || self.check_with_end_separator(other_path)
}
fn check_with_end_separator(&self, path: &Path) -> bool {
@@ -418,4 +420,14 @@ mod tests {
"Path matcher {path_matcher} should match {path:?}"
);
}
+
+ #[test]
+ fn project_search() {
+ let path = Path::new("/Users/someonetoignore/work/zed/zed.dev/node_modules");
+ let path_matcher = PathMatcher::new("**/node_modules/**").unwrap();
+ assert!(
+ path_matcher.is_match(&path),
+ "Path matcher {path_matcher} should match {path:?}"
+ );
+ }
}
@@ -481,18 +481,21 @@ impl Pane {
pub(crate) fn open_item(
&mut self,
- project_entry_id: ProjectEntryId,
+ project_entry_id: Option<ProjectEntryId>,
focus_item: bool,
cx: &mut ViewContext<Self>,
build_item: impl FnOnce(&mut ViewContext<Pane>) -> Box<dyn ItemHandle>,
) -> Box<dyn ItemHandle> {
let mut existing_item = None;
- for (index, item) in self.items.iter().enumerate() {
- if item.is_singleton(cx) && item.project_entry_ids(cx).as_slice() == [project_entry_id]
- {
- let item = item.boxed_clone();
- existing_item = Some((index, item));
- break;
+ if let Some(project_entry_id) = project_entry_id {
+ for (index, item) in self.items.iter().enumerate() {
+ if item.is_singleton(cx)
+ && item.project_entry_ids(cx).as_slice() == [project_entry_id]
+ {
+ let item = item.boxed_clone();
+ existing_item = Some((index, item));
+ break;
+ }
}
}
@@ -2129,13 +2129,13 @@ impl Workspace {
})
}
- pub(crate) fn load_path(
+ fn load_path(
&mut self,
path: ProjectPath,
cx: &mut ViewContext<Self>,
) -> Task<
Result<(
- ProjectEntryId,
+ Option<ProjectEntryId>,
impl 'static + FnOnce(&mut ViewContext<Pane>) -> Box<dyn ItemHandle>,
)>,
> {
@@ -20,6 +20,7 @@ test-support = [
[dependencies]
db = { path = "../db2", package = "db2" }
+call = { path = "../call2", package = "call2" }
client = { path = "../client2", package = "client2" }
collections = { path = "../collections" }
# context_menu = { path = "../context_menu" }
@@ -36,7 +37,6 @@ theme = { path = "../theme2", package = "theme2" }
util = { path = "../util" }
ui = { package = "ui2", path = "../ui2" }
-async-trait.workspace = true
async-recursion = "1.0.0"
itertools = "0.10"
bincode = "1.2.1"
@@ -538,18 +538,21 @@ impl Pane {
pub(crate) fn open_item(
&mut self,
- project_entry_id: ProjectEntryId,
+ project_entry_id: Option<ProjectEntryId>,
focus_item: bool,
cx: &mut ViewContext<Self>,
build_item: impl FnOnce(&mut ViewContext<Pane>) -> Box<dyn ItemHandle>,
) -> Box<dyn ItemHandle> {
let mut existing_item = None;
- for (index, item) in self.items.iter().enumerate() {
- if item.is_singleton(cx) && item.project_entry_ids(cx).as_slice() == [project_entry_id]
- {
- let item = item.boxed_clone();
- existing_item = Some((index, item));
- break;
+ if let Some(project_entry_id) = project_entry_id {
+ for (index, item) in self.items.iter().enumerate() {
+ if item.is_singleton(cx)
+ && item.project_entry_ids(cx).as_slice() == [project_entry_id]
+ {
+ let item = item.boxed_clone();
+ existing_item = Some((index, item));
+ break;
+ }
}
}
@@ -1545,22 +1548,17 @@ impl Pane {
right_click_menu(ix).trigger(tab).menu(|cx| {
ContextMenu::build(cx, |menu, cx| {
- menu.action(
- "Close",
- CloseActiveItem { save_intent: None }.boxed_clone(),
- cx,
- )
- .action("Close Others", CloseInactiveItems.boxed_clone(), cx)
- .separator()
- .action("Close Left", CloseItemsToTheLeft.boxed_clone(), cx)
- .action("Close Right", CloseItemsToTheRight.boxed_clone(), cx)
- .separator()
- .action("Close Clean", CloseCleanItems.boxed_clone(), cx)
- .action(
- "Close All",
- CloseAllItems { save_intent: None }.boxed_clone(),
- cx,
- )
+ menu.action("Close", CloseActiveItem { save_intent: None }.boxed_clone())
+ .action("Close Others", CloseInactiveItems.boxed_clone())
+ .separator()
+ .action("Close Left", CloseItemsToTheLeft.boxed_clone())
+ .action("Close Right", CloseItemsToTheRight.boxed_clone())
+ .separator()
+ .action("Close Clean", CloseCleanItems.boxed_clone())
+ .action(
+ "Close All",
+ CloseAllItems { save_intent: None }.boxed_clone(),
+ )
})
})
}
@@ -1653,13 +1651,12 @@ impl Pane {
.icon_size(IconSize::Small)
.on_click(cx.listener(|this, _, cx| {
let menu = ContextMenu::build(cx, |menu, cx| {
- menu.action("New File", NewFile.boxed_clone(), cx)
+ menu.action("New File", NewFile.boxed_clone())
.action(
"New Terminal",
NewCenterTerminal.boxed_clone(),
- cx,
)
- .action("New Search", NewSearch.boxed_clone(), cx)
+ .action("New Search", NewSearch.boxed_clone())
});
cx.subscribe(&menu, |this, _, event: &DismissEvent, cx| {
this.focus(cx);
@@ -1677,10 +1674,10 @@ impl Pane {
.icon_size(IconSize::Small)
.on_click(cx.listener(|this, _, cx| {
let menu = ContextMenu::build(cx, |menu, cx| {
- menu.action("Split Right", SplitRight.boxed_clone(), cx)
- .action("Split Left", SplitLeft.boxed_clone(), cx)
- .action("Split Up", SplitUp.boxed_clone(), cx)
- .action("Split Down", SplitDown.boxed_clone(), cx)
+ menu.action("Split Right", SplitRight.boxed_clone())
+ .action("Split Left", SplitLeft.boxed_clone())
+ .action("Split Up", SplitUp.boxed_clone())
+ .action("Split Down", SplitDown.boxed_clone())
});
cx.subscribe(&menu, |this, _, event: &DismissEvent, cx| {
this.focus(cx);
@@ -1,18 +1,20 @@
use crate::{AppState, FollowerState, Pane, Workspace};
use anyhow::{anyhow, bail, Result};
+use call::{ActiveCall, ParticipantLocation};
use collections::HashMap;
use db::sqlez::{
bindable::{Bind, Column, StaticColumnCount},
statement::Statement,
};
use gpui::{
- point, size, AnyWeakView, Bounds, Div, IntoElement, Model, Pixels, Point, View, ViewContext,
+ point, size, AnyWeakView, Bounds, Div, Entity as _, IntoElement, Model, Pixels, Point, View,
+ ViewContext,
};
use parking_lot::Mutex;
use project::Project;
use serde::Deserialize;
use std::sync::Arc;
-use ui::prelude::*;
+use ui::{prelude::*, Button};
const HANDLE_HITBOX_SIZE: f32 = 4.0;
const HORIZONTAL_MIN_SIZE: f32 = 80.;
@@ -126,6 +128,7 @@ impl PaneGroup {
&self,
project: &Model<Project>,
follower_states: &HashMap<View<Pane>, FollowerState>,
+ active_call: Option<&Model<ActiveCall>>,
active_pane: &View<Pane>,
zoomed: Option<&AnyWeakView>,
app_state: &Arc<AppState>,
@@ -135,6 +138,7 @@ impl PaneGroup {
project,
0,
follower_states,
+ active_call,
active_pane,
zoomed,
app_state,
@@ -196,6 +200,7 @@ impl Member {
project: &Model<Project>,
basis: usize,
follower_states: &HashMap<View<Pane>, FollowerState>,
+ active_call: Option<&Model<ActiveCall>>,
active_pane: &View<Pane>,
zoomed: Option<&AnyWeakView>,
app_state: &Arc<AppState>,
@@ -203,19 +208,89 @@ impl Member {
) -> impl IntoElement {
match self {
Member::Pane(pane) => {
- // todo!()
- // let pane_element = if Some(pane.into()) == zoomed {
- // None
- // } else {
- // Some(pane)
- // };
-
- div().size_full().child(pane.clone()).into_any()
-
- // Stack::new()
- // .with_child(pane_element.contained().with_border(leader_border))
- // .with_children(leader_status_box)
- // .into_any()
+ let leader = follower_states.get(pane).and_then(|state| {
+ let room = active_call?.read(cx).room()?.read(cx);
+ room.remote_participant_for_peer_id(state.leader_id)
+ });
+
+ let mut leader_border = None;
+ let mut leader_status_box = None;
+ if let Some(leader) = &leader {
+ let mut leader_color = cx
+ .theme()
+ .players()
+ .color_for_participant(leader.participant_index.0)
+ .cursor;
+ leader_color.fade_out(0.3);
+ leader_border = Some(leader_color);
+
+ leader_status_box = match leader.location {
+ ParticipantLocation::SharedProject {
+ project_id: leader_project_id,
+ } => {
+ if Some(leader_project_id) == project.read(cx).remote_id() {
+ None
+ } else {
+ let leader_user = leader.user.clone();
+ let leader_user_id = leader.user.id;
+ Some(
+ Button::new(
+ ("leader-status", pane.entity_id()),
+ format!(
+ "Follow {} to their active project",
+ leader_user.github_login,
+ ),
+ )
+ .on_click(cx.listener(
+ move |this, _, cx| {
+ crate::join_remote_project(
+ leader_project_id,
+ leader_user_id,
+ this.app_state().clone(),
+ cx,
+ )
+ .detach_and_log_err(cx);
+ },
+ )),
+ )
+ }
+ }
+ ParticipantLocation::UnsharedProject => Some(Button::new(
+ ("leader-status", pane.entity_id()),
+ format!(
+ "{} is viewing an unshared Zed project",
+ leader.user.github_login
+ ),
+ )),
+ ParticipantLocation::External => Some(Button::new(
+ ("leader-status", pane.entity_id()),
+ format!(
+ "{} is viewing a window outside of Zed",
+ leader.user.github_login
+ ),
+ )),
+ };
+ }
+
+ div()
+ .relative()
+ .size_full()
+ .child(pane.clone())
+ .when_some(leader_border, |this, color| {
+ this.border_2().border_color(color)
+ })
+ .when_some(leader_status_box, |this, status_box| {
+ this.child(
+ div()
+ .absolute()
+ .w_96()
+ .bottom_3()
+ .right_3()
+ .z_index(1)
+ .child(status_box),
+ )
+ })
+ .into_any()
// let el = div()
// .flex()
@@ -1,5 +1,9 @@
-use crate::participant::{Frame, RemoteVideoTrack};
+use crate::{
+ item::{Item, ItemEvent},
+ ItemNavHistory, WorkspaceId,
+};
use anyhow::Result;
+use call::participant::{Frame, RemoteVideoTrack};
use client::{proto::PeerId, User};
use futures::StreamExt;
use gpui::{
@@ -9,7 +13,6 @@ use gpui::{
};
use std::sync::{Arc, Weak};
use ui::{h_stack, Icon, IconElement};
-use workspace::{item::Item, ItemNavHistory, WorkspaceId};
pub enum Event {
Close,
@@ -56,7 +59,7 @@ impl SharedScreen {
}
impl EventEmitter<Event> for SharedScreen {}
-impl EventEmitter<workspace::item::ItemEvent> for SharedScreen {}
+impl EventEmitter<ItemEvent> for SharedScreen {}
impl FocusableView for SharedScreen {
fn focus_handle(&self, _: &AppContext) -> FocusHandle {
@@ -10,15 +10,16 @@ mod persistence;
pub mod searchable;
// todo!()
mod modal_layer;
+pub mod shared_screen;
mod status_bar;
mod toolbar;
mod workspace_settings;
use anyhow::{anyhow, Context as _, Result};
-use async_trait::async_trait;
+use call::ActiveCall;
use client::{
proto::{self, PeerId},
- Client, TypedEnvelope, User, UserStore,
+ Client, Status, TypedEnvelope, UserStore,
};
use collections::{hash_map, HashMap, HashSet};
use dock::{Dock, DockPosition, Panel, PanelButtons, PanelHandle};
@@ -28,11 +29,11 @@ use futures::{
Future, FutureExt, StreamExt,
};
use gpui::{
- actions, div, point, size, Action, AnyModel, AnyView, AnyWeakView, AppContext, AsyncAppContext,
- AsyncWindowContext, Bounds, Context, Div, Entity, EntityId, EventEmitter, FocusHandle,
- FocusableView, GlobalPixels, InteractiveElement, KeyContext, ManagedView, Model, ModelContext,
- ParentElement, PathPromptOptions, Point, PromptLevel, Render, Size, Styled, Subscription, Task,
- View, ViewContext, VisualContext, WeakModel, WeakView, WindowBounds, WindowContext,
+ actions, div, point, size, Action, AnyModel, AnyView, AnyWeakView, AnyWindowHandle, AppContext,
+ AsyncAppContext, AsyncWindowContext, Bounds, Context, Div, Entity, EntityId, EventEmitter,
+ FocusHandle, FocusableView, GlobalPixels, InteractiveElement, KeyContext, ManagedView, Model,
+ ModelContext, ParentElement, PathPromptOptions, Point, PromptLevel, Render, Size, Styled,
+ Subscription, Task, View, ViewContext, VisualContext, WeakView, WindowBounds, WindowContext,
WindowHandle, WindowOptions,
};
use item::{FollowableItem, FollowableItemHandle, Item, ItemHandle, ItemSettings, ProjectItem};
@@ -52,6 +53,7 @@ use postage::stream::Stream;
use project::{Project, ProjectEntryId, ProjectPath, Worktree, WorktreeId};
use serde::Deserialize;
use settings::Settings;
+use shared_screen::SharedScreen;
use status_bar::StatusBar;
pub use status_bar::StatusItemView;
use std::{
@@ -209,6 +211,7 @@ pub fn init_settings(cx: &mut AppContext) {
pub fn init(app_state: Arc<AppState>, cx: &mut AppContext) {
init_settings(cx);
notifications::init(cx);
+
// cx.add_global_action({
// let app_state = Arc::downgrade(&app_state);
// move |_: &Open, cx: &mut AppContext| {
@@ -302,7 +305,6 @@ pub struct AppState {
pub user_store: Model<UserStore>,
pub workspace_store: Model<WorkspaceStore>,
pub fs: Arc<dyn fs::Fs>,
- pub call_factory: CallFactory,
pub build_window_options:
fn(Option<WindowBounds>, Option<Uuid>, &mut AppContext) -> WindowOptions,
pub node_runtime: Arc<dyn NodeRuntime>,
@@ -321,69 +323,6 @@ struct Follower {
peer_id: PeerId,
}
-#[cfg(any(test, feature = "test-support"))]
-pub struct TestCallHandler;
-
-#[cfg(any(test, feature = "test-support"))]
-impl CallHandler for TestCallHandler {
- fn peer_state(
- &mut self,
- id: PeerId,
- project: &Model<Project>,
- cx: &mut ViewContext<Workspace>,
- ) -> Option<(bool, bool)> {
- None
- }
-
- fn shared_screen_for_peer(
- &self,
- peer_id: PeerId,
- pane: &View<Pane>,
- cx: &mut ViewContext<Workspace>,
- ) -> Option<Box<dyn ItemHandle>> {
- None
- }
-
- fn room_id(&self, cx: &AppContext) -> Option<u64> {
- None
- }
-
- fn hang_up(&self, cx: &mut AppContext) -> Task<Result<()>> {
- Task::ready(Err(anyhow!("TestCallHandler should not be hanging up")))
- }
-
- fn active_project(&self, cx: &AppContext) -> Option<WeakModel<Project>> {
- None
- }
-
- fn invite(
- &mut self,
- called_user_id: u64,
- initial_project: Option<Model<Project>>,
- cx: &mut AppContext,
- ) -> Task<Result<()>> {
- unimplemented!()
- }
-
- fn remote_participants(&self, cx: &AppContext) -> Option<Vec<(Arc<User>, PeerId)>> {
- None
- }
-
- fn is_muted(&self, cx: &AppContext) -> Option<bool> {
- None
- }
-
- fn toggle_mute(&self, cx: &mut AppContext) {}
-
- fn toggle_screen_share(&self, cx: &mut AppContext) {}
-
- fn toggle_deafen(&self, cx: &mut AppContext) {}
-
- fn is_deafened(&self, cx: &AppContext) -> Option<bool> {
- None
- }
-}
-
impl AppState {
#[cfg(any(test, feature = "test-support"))]
pub fn test(cx: &mut AppContext) -> Arc<Self> {
@@ -414,7 +353,6 @@ impl AppState {
workspace_store,
node_runtime: FakeNodeRuntime::new(),
build_window_options: |_, _, _| Default::default(),
- call_factory: |_| Box::new(TestCallHandler),
})
}
}
@@ -471,40 +409,6 @@ pub enum Event {
WorkspaceCreated(WeakView<Workspace>),
}
-#[async_trait(?Send)]
-pub trait CallHandler {
- fn peer_state(
- &mut self,
- id: PeerId,
- project: &Model<Project>,
- cx: &mut ViewContext<Workspace>,
- ) -> Option<(bool, bool)>;
- fn shared_screen_for_peer(
- &self,
- peer_id: PeerId,
- pane: &View<Pane>,
- cx: &mut ViewContext<Workspace>,
- ) -> Option<Box<dyn ItemHandle>>;
- fn room_id(&self, cx: &AppContext) -> Option<u64>;
- fn is_in_room(&self, cx: &mut ViewContext<Workspace>) -> bool {
- self.room_id(cx).is_some()
- }
- fn hang_up(&self, cx: &mut AppContext) -> Task<Result<()>>;
- fn active_project(&self, cx: &AppContext) -> Option<WeakModel<Project>>;
- fn invite(
- &mut self,
- called_user_id: u64,
- initial_project: Option<Model<Project>>,
- cx: &mut AppContext,
- ) -> Task<Result<()>>;
- fn remote_participants(&self, cx: &AppContext) -> Option<Vec<(Arc<User>, PeerId)>>;
- fn is_muted(&self, cx: &AppContext) -> Option<bool>;
- fn is_deafened(&self, cx: &AppContext) -> Option<bool>;
- fn toggle_mute(&self, cx: &mut AppContext);
- fn toggle_deafen(&self, cx: &mut AppContext);
- fn toggle_screen_share(&self, cx: &mut AppContext);
-}
-
pub struct Workspace {
window_self: WindowHandle<Self>,
weak_self: WeakView<Self>,
@@ -525,10 +429,10 @@ pub struct Workspace {
titlebar_item: Option<AnyView>,
notifications: Vec<(TypeId, usize, Box<dyn NotificationHandle>)>,
project: Model<Project>,
- call_handler: Box<dyn CallHandler>,
follower_states: HashMap<View<Pane>, FollowerState>,
last_leaders_by_pane: HashMap<WeakView<Pane>, PeerId>,
window_edited: bool,
+ active_call: Option<(Model<ActiveCall>, Vec<Subscription>)>,
leader_updates_tx: mpsc::UnboundedSender<(PeerId, proto::UpdateFollowers)>,
database_id: WorkspaceId,
app_state: Arc<AppState>,
@@ -556,7 +460,6 @@ struct FollowerState {
enum WorkspaceBounds {}
-type CallFactory = fn(&mut ViewContext<Workspace>) -> Box<dyn CallHandler>;
impl Workspace {
pub fn new(
workspace_id: WorkspaceId,
@@ -648,19 +551,9 @@ impl Workspace {
mpsc::unbounded::<(PeerId, proto::UpdateFollowers)>();
let _apply_leader_updates = cx.spawn(|this, mut cx| async move {
while let Some((leader_id, update)) = leader_updates_rx.next().await {
- let mut cx2 = cx.clone();
- let t = this.clone();
-
- Workspace::process_leader_update(&this, leader_id, update, &mut cx)
+ Self::process_leader_update(&this, leader_id, update, &mut cx)
.await
.log_err();
-
- // this.update(&mut cx, |this, cxx| {
- // this.call_handler
- // .process_leader_update(leader_id, update, cx2)
- // })?
- // .await
- // .log_err();
}
Ok(())
@@ -693,6 +586,14 @@ impl Workspace {
// drag_and_drop.register_container(weak_handle.clone());
// });
+ let mut active_call = None;
+ if cx.has_global::<Model<ActiveCall>>() {
+ let call = cx.global::<Model<ActiveCall>>().clone();
+ let mut subscriptions = Vec::new();
+ subscriptions.push(cx.subscribe(&call, Self::on_active_call_event));
+ active_call = Some((call, subscriptions));
+ }
+
let subscriptions = vec![
cx.observe_window_activation(Self::on_window_activation_changed),
cx.observe_window_bounds(move |_, cx| {
@@ -769,8 +670,7 @@ impl Workspace {
follower_states: Default::default(),
last_leaders_by_pane: Default::default(),
window_edited: false,
-
- call_handler: (app_state.call_factory)(cx),
+ active_call,
database_id: workspace_id,
app_state,
_observe_current_user,
@@ -1217,7 +1117,7 @@ impl Workspace {
cx: &mut ViewContext<Self>,
) -> Task<Result<bool>> {
//todo!(saveing)
-
+ let active_call = self.active_call().cloned();
let window = cx.window_handle();
cx.spawn(|this, mut cx| async move {
@@ -1228,27 +1128,27 @@ impl Workspace {
.count()
})?;
- if !quitting
- && workspace_count == 1
- && this
- .update(&mut cx, |this, cx| this.call_handler.is_in_room(cx))
- .log_err()
- .unwrap_or_default()
- {
- let answer = window.update(&mut cx, |_, cx| {
- cx.prompt(
- PromptLevel::Warning,
- "Do you want to leave the current call?",
- &["Close window and hang up", "Cancel"],
- )
- })?;
+ if let Some(active_call) = active_call {
+ if !quitting
+ && workspace_count == 1
+ && active_call.read_with(&cx, |call, _| call.room().is_some())?
+ {
+ let answer = window.update(&mut cx, |_, cx| {
+ cx.prompt(
+ PromptLevel::Warning,
+ "Do you want to leave the current call?",
+ &["Close window and hang up", "Cancel"],
+ )
+ })?;
- if answer.await.log_err() == Some(1) {
- return anyhow::Ok(false);
- } else {
- this.update(&mut cx, |this, cx| this.call_handler.hang_up(cx))?
- .await
- .log_err();
+ if answer.await.log_err() == Some(1) {
+ return anyhow::Ok(false);
+ } else {
+ active_call
+ .update(&mut cx, |call, cx| call.hang_up(cx))?
+ .await
+ .log_err();
+ }
}
}
@@ -1953,13 +1853,13 @@ impl Workspace {
})
}
- pub(crate) fn load_path(
+ fn load_path(
&mut self,
path: ProjectPath,
cx: &mut ViewContext<Self>,
) -> Task<
Result<(
- ProjectEntryId,
+ Option<ProjectEntryId>,
impl 'static + Send + FnOnce(&mut ViewContext<Pane>) -> Box<dyn ItemHandle>,
)>,
> {
@@ -2032,7 +1932,7 @@ impl Workspace {
pub fn open_shared_screen(&mut self, peer_id: PeerId, cx: &mut ViewContext<Self>) {
if let Some(shared_screen) = self.shared_screen_for_peer(peer_id, &self.active_pane, cx) {
self.active_pane.update(cx, |pane, cx| {
- pane.add_item(shared_screen, false, true, None, cx)
+ pane.add_item(Box::new(shared_screen), false, true, None, cx)
});
}
}
@@ -2370,60 +2270,60 @@ impl Workspace {
cx.notify();
}
- // fn start_following(
- // &mut self,
- // leader_id: PeerId,
- // cx: &mut ViewContext<Self>,
- // ) -> Option<Task<Result<()>>> {
- // let pane = self.active_pane().clone();
-
- // self.last_leaders_by_pane
- // .insert(pane.downgrade(), leader_id);
- // self.unfollow(&pane, cx);
- // self.follower_states.insert(
- // pane.clone(),
- // FollowerState {
- // leader_id,
- // active_view_id: None,
- // items_by_leader_view_id: Default::default(),
- // },
- // );
- // cx.notify();
-
- // let room_id = self.active_call()?.read(cx).room()?.read(cx).id();
- // let project_id = self.project.read(cx).remote_id();
- // let request = self.app_state.client.request(proto::Follow {
- // room_id,
- // project_id,
- // leader_id: Some(leader_id),
- // });
+ fn start_following(
+ &mut self,
+ leader_id: PeerId,
+ cx: &mut ViewContext<Self>,
+ ) -> Option<Task<Result<()>>> {
+ let pane = self.active_pane().clone();
+
+ self.last_leaders_by_pane
+ .insert(pane.downgrade(), leader_id);
+ self.unfollow(&pane, cx);
+ self.follower_states.insert(
+ pane.clone(),
+ FollowerState {
+ leader_id,
+ active_view_id: None,
+ items_by_leader_view_id: Default::default(),
+ },
+ );
+ cx.notify();
- // Some(cx.spawn(|this, mut cx| async move {
- // let response = request.await?;
- // this.update(&mut cx, |this, _| {
- // let state = this
- // .follower_states
- // .get_mut(&pane)
- // .ok_or_else(|| anyhow!("following interrupted"))?;
- // state.active_view_id = if let Some(active_view_id) = response.active_view_id {
- // Some(ViewId::from_proto(active_view_id)?)
- // } else {
- // None
- // };
- // Ok::<_, anyhow::Error>(())
- // })??;
- // Self::add_views_from_leader(
- // this.clone(),
- // leader_id,
- // vec![pane],
- // response.views,
- // &mut cx,
- // )
- // .await?;
- // this.update(&mut cx, |this, cx| this.leader_updated(leader_id, cx))?;
- // Ok(())
- // }))
- // }
+ let room_id = self.active_call()?.read(cx).room()?.read(cx).id();
+ let project_id = self.project.read(cx).remote_id();
+ let request = self.app_state.client.request(proto::Follow {
+ room_id,
+ project_id,
+ leader_id: Some(leader_id),
+ });
+
+ Some(cx.spawn(|this, mut cx| async move {
+ let response = request.await?;
+ this.update(&mut cx, |this, _| {
+ let state = this
+ .follower_states
+ .get_mut(&pane)
+ .ok_or_else(|| anyhow!("following interrupted"))?;
+ state.active_view_id = if let Some(active_view_id) = response.active_view_id {
+ Some(ViewId::from_proto(active_view_id)?)
+ } else {
+ None
+ };
+ Ok::<_, anyhow::Error>(())
+ })??;
+ Self::add_views_from_leader(
+ this.clone(),
+ leader_id,
+ vec![pane],
+ response.views,
+ &mut cx,
+ )
+ .await?;
+ this.update(&mut cx, |this, cx| this.leader_updated(leader_id, cx))?;
+ Ok(())
+ }))
+ }
// pub fn follow_next_collaborator(
// &mut self,
@@ -2462,67 +2362,67 @@ impl Workspace {
// self.follow(leader_id, cx)
// }
- // pub fn follow(
- // &mut self,
- // leader_id: PeerId,
- // cx: &mut ViewContext<Self>,
- // ) -> Option<Task<Result<()>>> {
- // let room = ActiveCall::global(cx).read(cx).room()?.read(cx);
- // let project = self.project.read(cx);
+ pub fn follow(
+ &mut self,
+ leader_id: PeerId,
+ cx: &mut ViewContext<Self>,
+ ) -> Option<Task<Result<()>>> {
+ let room = ActiveCall::global(cx).read(cx).room()?.read(cx);
+ let project = self.project.read(cx);
- // let Some(remote_participant) = room.remote_participant_for_peer_id(leader_id) else {
- // return None;
- // };
+ let Some(remote_participant) = room.remote_participant_for_peer_id(leader_id) else {
+ return None;
+ };
- // let other_project_id = match remote_participant.location {
- // call::ParticipantLocation::External => None,
- // call::ParticipantLocation::UnsharedProject => None,
- // call::ParticipantLocation::SharedProject { project_id } => {
- // if Some(project_id) == project.remote_id() {
- // None
- // } else {
- // Some(project_id)
- // }
- // }
- // };
+ let other_project_id = match remote_participant.location {
+ call::ParticipantLocation::External => None,
+ call::ParticipantLocation::UnsharedProject => None,
+ call::ParticipantLocation::SharedProject { project_id } => {
+ if Some(project_id) == project.remote_id() {
+ None
+ } else {
+ Some(project_id)
+ }
+ }
+ };
- // // if they are active in another project, follow there.
- // if let Some(project_id) = other_project_id {
- // let app_state = self.app_state.clone();
- // return Some(crate::join_remote_project(
- // project_id,
- // remote_participant.user.id,
- // app_state,
- // cx,
- // ));
- // }
+ // if they are active in another project, follow there.
+ if let Some(project_id) = other_project_id {
+ let app_state = self.app_state.clone();
+ return Some(crate::join_remote_project(
+ project_id,
+ remote_participant.user.id,
+ app_state,
+ cx,
+ ));
+ }
- // // if you're already following, find the right pane and focus it.
- // for (pane, state) in &self.follower_states {
- // if leader_id == state.leader_id {
- // cx.focus(pane);
- // return None;
- // }
- // }
+ // if you're already following, find the right pane and focus it.
+ for (pane, state) in &self.follower_states {
+ if leader_id == state.leader_id {
+ cx.focus_view(pane);
+ return None;
+ }
+ }
- // // Otherwise, follow.
- // self.start_following(leader_id, cx)
- // }
+ // Otherwise, follow.
+ self.start_following(leader_id, cx)
+ }
pub fn unfollow(&mut self, pane: &View<Pane>, cx: &mut ViewContext<Self>) -> Option<PeerId> {
- let follower_states = &mut self.follower_states;
- let state = follower_states.remove(pane)?;
+ let state = self.follower_states.remove(pane)?;
let leader_id = state.leader_id;
for (_, item) in state.items_by_leader_view_id {
item.set_leader_peer_id(None, cx);
}
- if follower_states
+ if self
+ .follower_states
.values()
.all(|state| state.leader_id != state.leader_id)
{
let project_id = self.project.read(cx).remote_id();
- let room_id = self.call_handler.room_id(cx)?;
+ let room_id = self.active_call()?.read(cx).room()?.read(cx).id();
self.app_state
.client
.send(proto::Unfollow {
@@ -2657,57 +2557,55 @@ impl Workspace {
}
}
- // // RPC handlers
+ // RPC handlers
fn handle_follow(
&mut self,
- _follower_project_id: Option<u64>,
- _cx: &mut ViewContext<Self>,
+ follower_project_id: Option<u64>,
+ cx: &mut ViewContext<Self>,
) -> proto::FollowResponse {
- todo!()
+ let client = &self.app_state.client;
+ let project_id = self.project.read(cx).remote_id();
- // let client = &self.app_state.client;
- // let project_id = self.project.read(cx).remote_id();
+ let active_view_id = self.active_item(cx).and_then(|i| {
+ Some(
+ i.to_followable_item_handle(cx)?
+ .remote_id(client, cx)?
+ .to_proto(),
+ )
+ });
- // let active_view_id = self.active_item(cx).and_then(|i| {
- // Some(
- // i.to_followable_item_handle(cx)?
- // .remote_id(client, cx)?
- // .to_proto(),
- // )
- // });
+ cx.notify();
- // cx.notify();
-
- // self.last_active_view_id = active_view_id.clone();
- // proto::FollowResponse {
- // active_view_id,
- // views: self
- // .panes()
- // .iter()
- // .flat_map(|pane| {
- // let leader_id = self.leader_for_pane(pane);
- // pane.read(cx).items().filter_map({
- // let cx = &cx;
- // move |item| {
- // let item = item.to_followable_item_handle(cx)?;
- // if (project_id.is_none() || project_id != follower_project_id)
- // && item.is_project_item(cx)
- // {
- // return None;
- // }
- // let id = item.remote_id(client, cx)?.to_proto();
- // let variant = item.to_state_proto(cx)?;
- // Some(proto::View {
- // id: Some(id),
- // leader_id,
- // variant: Some(variant),
- // })
- // }
- // })
- // })
- // .collect(),
- // }
+ self.last_active_view_id = active_view_id.clone();
+ proto::FollowResponse {
+ active_view_id,
+ views: self
+ .panes()
+ .iter()
+ .flat_map(|pane| {
+ let leader_id = self.leader_for_pane(pane);
+ pane.read(cx).items().filter_map({
+ let cx = &cx;
+ move |item| {
+ let item = item.to_followable_item_handle(cx)?;
+ if (project_id.is_none() || project_id != follower_project_id)
+ && item.is_project_item(cx)
+ {
+ return None;
+ }
+ let id = item.remote_id(client, cx)?.to_proto();
+ let variant = item.to_state_proto(cx)?;
+ Some(proto::View {
+ id: Some(id),
+ leader_id,
+ variant: Some(variant),
+ })
+ }
+ })
+ })
+ .collect(),
+ }
}
fn handle_update_followers(
@@ -2727,6 +2625,8 @@ impl Workspace {
update: proto::UpdateFollowers,
cx: &mut AsyncWindowContext,
) -> Result<()> {
+ dbg!("process_leader_update", &update);
+
match update.variant.ok_or_else(|| anyhow!("invalid update"))? {
proto::update_followers::Variant::UpdateActiveView(update_active_view) => {
this.update(cx, |this, _| {
@@ -2878,9 +2778,8 @@ impl Workspace {
} else {
None
};
- let room_id = self.call_handler.room_id(cx)?;
self.app_state().workspace_store.update(cx, |store, cx| {
- store.update_followers(project_id, room_id, update, cx)
+ store.update_followers(project_id, update, cx)
})
}
@@ -2888,12 +2787,31 @@ impl Workspace {
self.follower_states.get(pane).map(|state| state.leader_id)
}
- pub fn leader_updated(&mut self, leader_id: PeerId, cx: &mut ViewContext<Self>) -> Option<()> {
+ fn leader_updated(&mut self, leader_id: PeerId, cx: &mut ViewContext<Self>) -> Option<()> {
cx.notify();
- let (leader_in_this_project, leader_in_this_app) =
- self.call_handler.peer_state(leader_id, &self.project, cx)?;
+ let call = self.active_call()?;
+ let room = call.read(cx).room()?.read(cx);
+ let participant = room.remote_participant_for_peer_id(leader_id)?;
let mut items_to_activate = Vec::new();
+
+ let leader_in_this_app;
+ let leader_in_this_project;
+ match participant.location {
+ call::ParticipantLocation::SharedProject { project_id } => {
+ leader_in_this_app = true;
+ leader_in_this_project = Some(project_id) == self.project.read(cx).remote_id();
+ }
+ call::ParticipantLocation::UnsharedProject => {
+ leader_in_this_app = true;
+ leader_in_this_project = false;
+ }
+ call::ParticipantLocation::External => {
+ leader_in_this_app = false;
+ leader_in_this_project = false;
+ }
+ };
+
for (pane, state) in &self.follower_states {
if state.leader_id != leader_id {
continue;
@@ -2914,7 +2832,7 @@ impl Workspace {
}
if let Some(shared_screen) = self.shared_screen_for_peer(leader_id, pane, cx) {
- items_to_activate.push((pane.clone(), shared_screen));
+ items_to_activate.push((pane.clone(), Box::new(shared_screen)));
}
}
@@ -2923,8 +2841,8 @@ impl Workspace {
if let Some(index) = pane.update(cx, |pane, _| pane.index_for_item(item.as_ref())) {
pane.update(cx, |pane, cx| pane.activate_item(index, false, false, cx));
} else {
- pane.update(cx, |pane, mut cx| {
- pane.add_item(item.boxed_clone(), false, false, None, &mut cx)
+ pane.update(cx, |pane, cx| {
+ pane.add_item(item.boxed_clone(), false, false, None, cx)
});
}
@@ -2941,21 +2859,20 @@ impl Workspace {
peer_id: PeerId,
pane: &View<Pane>,
cx: &mut ViewContext<Self>,
- ) -> Option<Box<dyn ItemHandle>> {
- self.call_handler.shared_screen_for_peer(peer_id, pane, cx)
- // let call = self.active_call()?;
- // let room = call.read(cx).room()?.read(cx);
- // let participant = room.remote_participant_for_peer_id(peer_id)?;
- // let track = participant.video_tracks.values().next()?.clone();
- // let user = participant.user.clone();
-
- // for item in pane.read(cx).items_of_type::<SharedScreen>() {
- // if item.read(cx).peer_id == peer_id {
- // return Some(item);
- // }
- // }
+ ) -> Option<View<SharedScreen>> {
+ let call = self.active_call()?;
+ let room = call.read(cx).room()?.read(cx);
+ let participant = room.remote_participant_for_peer_id(peer_id)?;
+ let track = participant.video_tracks.values().next()?.clone();
+ let user = participant.user.clone();
+
+ for item in pane.read(cx).items_of_type::<SharedScreen>() {
+ if item.read(cx).peer_id == peer_id {
+ return Some(item);
+ }
+ }
- // Some(cx.build_view(|cx| SharedScreen::new(&track, peer_id, user.clone(), cx)))
+ Some(cx.build_view(|cx| SharedScreen::new(&track, peer_id, user.clone(), cx)))
}
pub fn on_window_activation_changed(&mut self, cx: &mut ViewContext<Self>) {
@@ -2984,6 +2901,25 @@ impl Workspace {
}
}
+ fn active_call(&self) -> Option<&Model<ActiveCall>> {
+ self.active_call.as_ref().map(|(call, _)| call)
+ }
+
+ fn on_active_call_event(
+ &mut self,
+ _: Model<ActiveCall>,
+ event: &call::room::Event,
+ cx: &mut ViewContext<Self>,
+ ) {
+ match event {
+ call::room::Event::ParticipantLocationChanged { participant_id }
+ | call::room::Event::RemoteVideoTracksChanged { participant_id } => {
+ self.leader_updated(*participant_id, cx);
+ }
+ _ => {}
+ }
+ }
+
pub fn database_id(&self) -> WorkspaceId {
self.database_id
}
@@ -3393,7 +3329,6 @@ impl Workspace {
fs: project.read(cx).fs().clone(),
build_window_options: |_, _, _| Default::default(),
node_runtime: FakeNodeRuntime::new(),
- call_factory: |_| Box::new(TestCallHandler),
});
let workspace = Self::new(0, project, app_state, cx);
workspace.active_pane.update(cx, |pane, cx| pane.focus(cx));
@@ -3472,10 +3407,6 @@ impl Workspace {
self.modal_layer
.update(cx, |modal_layer, cx| modal_layer.toggle_modal(cx, build))
}
-
- pub fn call_state(&mut self) -> &mut dyn CallHandler {
- &mut *self.call_handler
- }
}
fn window_bounds_env_override(cx: &AsyncAppContext) -> Option<WindowBounds> {
@@ -3676,6 +3607,7 @@ impl Render for Workspace {
.child(self.center.render(
&self.project,
&self.follower_states,
+ self.active_call(),
&self.active_pane,
self.zoomed.as_ref(),
&self.app_state,
@@ -3830,15 +3762,15 @@ impl Render for Workspace {
// }
impl WorkspaceStore {
- pub fn new(client: Arc<Client>, _cx: &mut ModelContext<Self>) -> Self {
+ pub fn new(client: Arc<Client>, cx: &mut ModelContext<Self>) -> Self {
Self {
workspaces: Default::default(),
followers: Default::default(),
- _subscriptions: vec![],
- // client.add_request_handler(cx.weak_model(), Self::handle_follow),
- // client.add_message_handler(cx.weak_model(), Self::handle_unfollow),
- // client.add_message_handler(cx.weak_model(), Self::handle_update_followers),
- // ],
+ _subscriptions: vec![
+ client.add_request_handler(cx.weak_model(), Self::handle_follow),
+ client.add_message_handler(cx.weak_model(), Self::handle_unfollow),
+ client.add_message_handler(cx.weak_model(), Self::handle_update_followers),
+ ],
client,
}
}
@@ -3846,10 +3778,14 @@ impl WorkspaceStore {
pub fn update_followers(
&self,
project_id: Option<u64>,
- room_id: u64,
update: proto::update_followers::Variant,
cx: &AppContext,
) -> Option<()> {
+ if !cx.has_global::<Model<ActiveCall>>() {
+ return None;
+ }
+
+ let room_id = ActiveCall::global(cx).read(cx).room()?.read(cx).id();
let follower_ids: Vec<_> = self
.followers
.iter()
@@ -3885,17 +3821,9 @@ impl WorkspaceStore {
project_id: envelope.payload.project_id,
peer_id: envelope.original_sender_id()?,
};
+ let active_project = ActiveCall::global(cx).read(cx).location().cloned();
+
let mut response = proto::FollowResponse::default();
- let active_project = this
- .workspaces
- .iter()
- .next()
- .and_then(|workspace| {
- workspace
- .read_with(cx, |this, cx| this.call_handler.active_project(cx))
- .log_err()
- })
- .flatten();
for workspace in &this.workspaces {
workspace
.update(cx, |workspace, cx| {
@@ -3947,11 +3875,13 @@ impl WorkspaceStore {
this: Model<Self>,
envelope: TypedEnvelope<proto::UpdateFollowers>,
_: Arc<Client>,
- mut cx: AsyncWindowContext,
+ mut cx: AsyncAppContext,
) -> Result<()> {
let leader_id = envelope.original_sender_id()?;
let update = envelope.payload;
+ dbg!("handle_upate_followers");
+
this.update(&mut cx, |this, cx| {
for workspace in &this.workspaces {
workspace.update(cx, |workspace, cx| {
@@ -4012,8 +3942,6 @@ impl std::fmt::Debug for OpenPaths {
}
}
-pub struct WorkspaceCreated(pub WeakView<Workspace>);
-
pub fn activate_workspace_for_project(
cx: &mut AppContext,
predicate: impl Fn(&Project, &AppContext) -> bool + Send + 'static,
@@ -4048,187 +3976,184 @@ pub async fn last_opened_workspace_paths() -> Option<WorkspaceLocation> {
DB.last_workspace().await.log_err().flatten()
}
-// async fn join_channel_internal(
-// channel_id: u64,
-// app_state: &Arc<AppState>,
-// requesting_window: Option<WindowHandle<Workspace>>,
-// active_call: &ModelHandle<ActiveCall>,
-// cx: &mut AsyncAppContext,
-// ) -> Result<bool> {
-// let (should_prompt, open_room) = active_call.read_with(cx, |active_call, cx| {
-// let Some(room) = active_call.room().map(|room| room.read(cx)) else {
-// return (false, None);
-// };
-
-// let already_in_channel = room.channel_id() == Some(channel_id);
-// let should_prompt = room.is_sharing_project()
-// && room.remote_participants().len() > 0
-// && !already_in_channel;
-// let open_room = if already_in_channel {
-// active_call.room().cloned()
-// } else {
-// None
-// };
-// (should_prompt, open_room)
-// });
-
-// if let Some(room) = open_room {
-// let task = room.update(cx, |room, cx| {
-// if let Some((project, host)) = room.most_active_project(cx) {
-// return Some(join_remote_project(project, host, app_state.clone(), cx));
-// }
-
-// None
-// });
-// if let Some(task) = task {
-// task.await?;
-// }
-// return anyhow::Ok(true);
-// }
+async fn join_channel_internal(
+ channel_id: u64,
+ app_state: &Arc<AppState>,
+ requesting_window: Option<WindowHandle<Workspace>>,
+ active_call: &Model<ActiveCall>,
+ cx: &mut AsyncAppContext,
+) -> Result<bool> {
+ let (should_prompt, open_room) = active_call.read_with(cx, |active_call, cx| {
+ let Some(room) = active_call.room().map(|room| room.read(cx)) else {
+ return (false, None);
+ };
-// if should_prompt {
-// if let Some(workspace) = requesting_window {
-// if let Some(window) = workspace.update(cx, |cx| cx.window()) {
-// let answer = window.prompt(
-// PromptLevel::Warning,
-// "Leaving this call will unshare your current project.\nDo you want to switch channels?",
-// &["Yes, Join Channel", "Cancel"],
-// cx,
-// );
-
-// if let Some(mut answer) = answer {
-// if answer.next().await == Some(1) {
-// return Ok(false);
-// }
-// }
-// } else {
-// return Ok(false); // unreachable!() hopefully
-// }
-// } else {
-// return Ok(false); // unreachable!() hopefully
-// }
-// }
+ let already_in_channel = room.channel_id() == Some(channel_id);
+ let should_prompt = room.is_sharing_project()
+ && room.remote_participants().len() > 0
+ && !already_in_channel;
+ let open_room = if already_in_channel {
+ active_call.room().cloned()
+ } else {
+ None
+ };
+ (should_prompt, open_room)
+ })?;
-// let client = cx.read(|cx| active_call.read(cx).client());
-
-// let mut client_status = client.status();
-
-// // this loop will terminate within client::CONNECTION_TIMEOUT seconds.
-// 'outer: loop {
-// let Some(status) = client_status.recv().await else {
-// return Err(anyhow!("error connecting"));
-// };
-
-// match status {
-// Status::Connecting
-// | Status::Authenticating
-// | Status::Reconnecting
-// | Status::Reauthenticating => continue,
-// Status::Connected { .. } => break 'outer,
-// Status::SignedOut => return Err(anyhow!("not signed in")),
-// Status::UpgradeRequired => return Err(anyhow!("zed is out of date")),
-// Status::ConnectionError | Status::ConnectionLost | Status::ReconnectionError { .. } => {
-// return Err(anyhow!("zed is offline"))
-// }
-// }
-// }
+ if let Some(room) = open_room {
+ let task = room.update(cx, |room, cx| {
+ if let Some((project, host)) = room.most_active_project(cx) {
+ return Some(join_remote_project(project, host, app_state.clone(), cx));
+ }
-// let room = active_call
-// .update(cx, |active_call, cx| {
-// active_call.join_channel(channel_id, cx)
-// })
-// .await?;
-
-// room.update(cx, |room, _| room.room_update_completed())
-// .await;
-
-// let task = room.update(cx, |room, cx| {
-// if let Some((project, host)) = room.most_active_project(cx) {
-// return Some(join_remote_project(project, host, app_state.clone(), cx));
-// }
-
-// None
-// });
-// if let Some(task) = task {
-// task.await?;
-// return anyhow::Ok(true);
-// }
-// anyhow::Ok(false)
-// }
+ None
+ })?;
+ if let Some(task) = task {
+ task.await?;
+ }
+ return anyhow::Ok(true);
+ }
-// pub fn join_channel(
-// channel_id: u64,
-// app_state: Arc<AppState>,
-// requesting_window: Option<WindowHandle<Workspace>>,
-// cx: &mut AppContext,
-// ) -> Task<Result<()>> {
-// let active_call = ActiveCall::global(cx);
-// cx.spawn(|mut cx| async move {
-// let result = join_channel_internal(
-// channel_id,
-// &app_state,
-// requesting_window,
-// &active_call,
-// &mut cx,
-// )
-// .await;
-
-// // join channel succeeded, and opened a window
-// if matches!(result, Ok(true)) {
-// return anyhow::Ok(());
-// }
-
-// if requesting_window.is_some() {
-// return anyhow::Ok(());
-// }
-
-// // find an existing workspace to focus and show call controls
-// let mut active_window = activate_any_workspace_window(&mut cx);
-// if active_window.is_none() {
-// // no open workspaces, make one to show the error in (blergh)
-// cx.update(|cx| Workspace::new_local(vec![], app_state.clone(), requesting_window, cx))
-// .await;
-// }
-
-// active_window = activate_any_workspace_window(&mut cx);
-// if active_window.is_none() {
-// return result.map(|_| ()); // unreachable!() assuming new_local always opens a window
-// }
-
-// if let Err(err) = result {
-// let prompt = active_window.unwrap().prompt(
-// PromptLevel::Critical,
-// &format!("Failed to join channel: {}", err),
-// &["Ok"],
-// &mut cx,
-// );
-// if let Some(mut prompt) = prompt {
-// prompt.next().await;
-// } else {
-// return Err(err);
-// }
-// }
-
-// // return ok, we showed the error to the user.
-// return anyhow::Ok(());
-// })
-// }
+ if should_prompt {
+ if let Some(workspace) = requesting_window {
+ let answer = workspace.update(cx, |_, cx| {
+ cx.prompt(
+ PromptLevel::Warning,
+ "Leaving this call will unshare your current project.\nDo you want to switch channels?",
+ &["Yes, Join Channel", "Cancel"],
+ )
+ })?.await;
-// pub fn activate_any_workspace_window(cx: &mut AsyncAppContext) -> Option<AnyWindowHandle> {
-// for window in cx.windows() {
-// let found = window.update(cx, |cx| {
-// let is_workspace = cx.root_view().clone().downcast::<Workspace>().is_some();
-// if is_workspace {
-// cx.activate_window();
-// }
-// is_workspace
-// });
-// if found == Some(true) {
-// return Some(window);
-// }
-// }
-// None
-// }
+ if answer == Ok(1) {
+ return Ok(false);
+ }
+ } else {
+ return Ok(false); // unreachable!() hopefully
+ }
+ }
+
+ let client = cx.update(|cx| active_call.read(cx).client())?;
+
+ let mut client_status = client.status();
+
+ // this loop will terminate within client::CONNECTION_TIMEOUT seconds.
+ 'outer: loop {
+ let Some(status) = client_status.recv().await else {
+ return Err(anyhow!("error connecting"));
+ };
+
+ match status {
+ Status::Connecting
+ | Status::Authenticating
+ | Status::Reconnecting
+ | Status::Reauthenticating => continue,
+ Status::Connected { .. } => break 'outer,
+ Status::SignedOut => return Err(anyhow!("not signed in")),
+ Status::UpgradeRequired => return Err(anyhow!("zed is out of date")),
+ Status::ConnectionError | Status::ConnectionLost | Status::ReconnectionError { .. } => {
+ return Err(anyhow!("zed is offline"))
+ }
+ }
+ }
+
+ let room = active_call
+ .update(cx, |active_call, cx| {
+ active_call.join_channel(channel_id, cx)
+ })?
+ .await?;
+
+ let Some(room) = room else {
+ return anyhow::Ok(true);
+ };
+
+ room.update(cx, |room, _| room.room_update_completed())?
+ .await;
+
+ let task = room.update(cx, |room, cx| {
+ if let Some((project, host)) = room.most_active_project(cx) {
+ return Some(join_remote_project(project, host, app_state.clone(), cx));
+ }
+
+ None
+ })?;
+ if let Some(task) = task {
+ task.await?;
+ return anyhow::Ok(true);
+ }
+ anyhow::Ok(false)
+}
+
+pub fn join_channel(
+ channel_id: u64,
+ app_state: Arc<AppState>,
+ requesting_window: Option<WindowHandle<Workspace>>,
+ cx: &mut AppContext,
+) -> Task<Result<()>> {
+ let active_call = ActiveCall::global(cx);
+ cx.spawn(|mut cx| async move {
+ let result = join_channel_internal(
+ channel_id,
+ &app_state,
+ requesting_window,
+ &active_call,
+ &mut cx,
+ )
+ .await;
+
+ // join channel succeeded, and opened a window
+ if matches!(result, Ok(true)) {
+ return anyhow::Ok(());
+ }
+
+ if requesting_window.is_some() {
+ return anyhow::Ok(());
+ }
+
+ // find an existing workspace to focus and show call controls
+ let mut active_window = activate_any_workspace_window(&mut cx);
+ if active_window.is_none() {
+ // no open workspaces, make one to show the error in (blergh)
+ cx.update(|cx| Workspace::new_local(vec![], app_state.clone(), requesting_window, cx))?
+ .await?;
+ }
+
+ active_window = activate_any_workspace_window(&mut cx);
+ let Some(active_window) = active_window else {
+ return anyhow::Ok(());
+ };
+
+ if let Err(err) = result {
+ active_window
+ .update(&mut cx, |_, cx| {
+ cx.prompt(
+ PromptLevel::Critical,
+ &format!("Failed to join channel: {}", err),
+ &["Ok"],
+ )
+ })?
+ .await
+ .ok();
+ }
+
+ // return ok, we showed the error to the user.
+ return anyhow::Ok(());
+ })
+}
+
+pub fn activate_any_workspace_window(cx: &mut AsyncAppContext) -> Option<AnyWindowHandle> {
+ cx.update(|cx| {
+ for window in cx.windows() {
+ let is_workspace = window.downcast::<Workspace>().is_some();
+ if is_workspace {
+ window.update(cx, |_, cx| cx.activate_window()).ok();
+ return Some(window);
+ }
+ }
+ None
+ })
+ .ok()
+ .flatten()
+}
#[allow(clippy::type_complexity)]
pub fn open_paths(
@@ -615,8 +615,8 @@ fn open_local_settings_file(
.update(&mut cx, |project, cx| {
project.create_entry((tree_id, dir_path), true, cx)
})
- .ok_or_else(|| anyhow!("worktree was removed"))?
- .await?;
+ .await
+ .context("worktree was removed")?;
}
}
@@ -625,8 +625,8 @@ fn open_local_settings_file(
.update(&mut cx, |project, cx| {
project.create_entry((tree_id, file_path), false, cx)
})
- .ok_or_else(|| anyhow!("worktree was removed"))?
- .await?;
+ .await
+ .context("worktree was removed")?;
}
let editor = workspace
@@ -763,7 +763,7 @@ mod tests {
AppContext, AssetSource, Element, Entity, TestAppContext, View, ViewHandle,
};
use language::LanguageRegistry;
- use project::{Project, ProjectPath};
+ use project::{project_settings::ProjectSettings, Project, ProjectPath};
use serde_json::json;
use settings::{handle_settings_file_changes, watch_config_file, SettingsStore};
use std::{
@@ -1308,6 +1308,122 @@ mod tests {
});
}
+ #[gpui::test]
+ async fn test_opening_excluded_paths(cx: &mut TestAppContext) {
+ let app_state = init_test(cx);
+ cx.update(|cx| {
+ cx.update_global::<SettingsStore, _, _>(|store, cx| {
+ store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
+ project_settings.file_scan_exclusions =
+ Some(vec!["excluded_dir".to_string(), "**/.git".to_string()]);
+ });
+ });
+ });
+ app_state
+ .fs
+ .as_fake()
+ .insert_tree(
+ "/root",
+ json!({
+ ".gitignore": "ignored_dir\n",
+ ".git": {
+ "HEAD": "ref: refs/heads/main",
+ },
+ "regular_dir": {
+ "file": "regular file contents",
+ },
+ "ignored_dir": {
+ "ignored_subdir": {
+ "file": "ignored subfile contents",
+ },
+ "file": "ignored file contents",
+ },
+ "excluded_dir": {
+ "file": "excluded file contents",
+ },
+ }),
+ )
+ .await;
+
+ let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await;
+ let window = cx.add_window(|cx| Workspace::test_new(project, cx));
+ let workspace = window.root(cx);
+
+ let initial_entries = cx.read(|cx| workspace.file_project_paths(cx));
+ let paths_to_open = [
+ Path::new("/root/excluded_dir/file").to_path_buf(),
+ Path::new("/root/.git/HEAD").to_path_buf(),
+ Path::new("/root/excluded_dir/ignored_subdir").to_path_buf(),
+ ];
+ let (opened_workspace, new_items) = cx
+ .update(|cx| workspace::open_paths(&paths_to_open, &app_state, None, cx))
+ .await
+ .unwrap();
+
+ assert_eq!(
+ opened_workspace.id(),
+ workspace.id(),
+ "Excluded files in subfolders of a workspace root should be opened in the workspace"
+ );
+ let mut opened_paths = cx.read(|cx| {
+ assert_eq!(
+ new_items.len(),
+ paths_to_open.len(),
+ "Expect to get the same number of opened items as submitted paths to open"
+ );
+ new_items
+ .iter()
+ .zip(paths_to_open.iter())
+ .map(|(i, path)| {
+ match i {
+ Some(Ok(i)) => {
+ Some(i.project_path(cx).map(|p| p.path.display().to_string()))
+ }
+ Some(Err(e)) => panic!("Excluded file {path:?} failed to open: {e:?}"),
+ None => None,
+ }
+ .flatten()
+ })
+ .collect::<Vec<_>>()
+ });
+ opened_paths.sort();
+ assert_eq!(
+ opened_paths,
+ vec![
+ None,
+ Some(".git/HEAD".to_string()),
+ Some("excluded_dir/file".to_string()),
+ ],
+ "Excluded files should get opened, excluded dir should not get opened"
+ );
+
+ let entries = cx.read(|cx| workspace.file_project_paths(cx));
+ assert_eq!(
+ initial_entries, entries,
+ "Workspace entries should not change after opening excluded files and directories paths"
+ );
+
+ cx.read(|cx| {
+ let pane = workspace.read(cx).active_pane().read(cx);
+ let mut opened_buffer_paths = pane
+ .items()
+ .map(|i| {
+ i.project_path(cx)
+ .expect("all excluded files that got open should have a path")
+ .path
+ .display()
+ .to_string()
+ })
+ .collect::<Vec<_>>();
+ opened_buffer_paths.sort();
+ assert_eq!(
+ opened_buffer_paths,
+ vec![".git/HEAD".to_string(), "excluded_dir/file".to_string()],
+ "Despite not being present in the worktrees, buffers for excluded files are opened and added to the pane"
+ );
+ });
+ }
+
#[gpui::test]
async fn test_save_conflicting_item(cx: &mut TestAppContext) {
let app_state = init_test(cx);
@@ -191,7 +191,6 @@ fn main() {
user_store: user_store.clone(),
fs,
build_window_options,
- call_factory: call::Call::new,
workspace_store,
node_runtime,
});
@@ -583,8 +583,8 @@ fn open_local_settings_file(
.update(&mut cx, |project, cx| {
project.create_entry((tree_id, dir_path), true, cx)
})?
- .ok_or_else(|| anyhow!("worktree was removed"))?
- .await?;
+ .await
+ .context("worktree was removed")?;
}
}
@@ -593,8 +593,8 @@ fn open_local_settings_file(
.update(&mut cx, |project, cx| {
project.create_entry((tree_id, file_path), false, cx)
})?
- .ok_or_else(|| anyhow!("worktree was removed"))?
- .await?;
+ .await
+ .context("worktree was removed")?;
}
let editor = workspace
@@ -719,3 +719,1846 @@ fn open_bundled_file(
})
.detach_and_log_err(cx);
}
+
+// todo!()
+// #[cfg(test)]
+// mod tests {
+// use super::*;
+// use assets::Assets;
+// use editor::{scroll::autoscroll::Autoscroll, DisplayPoint, Editor};
+// use fs::{FakeFs, Fs};
+// use gpui::{
+// actions, elements::Empty, executor::Deterministic, Action, AnyElement, AnyWindowHandle,
+// AppContext, AssetSource, Element, Entity, TestAppContext, View, ViewHandle,
+// };
+// use language::LanguageRegistry;
+// use project::{project_settings::ProjectSettings, Project, ProjectPath};
+// use serde_json::json;
+// use settings::{handle_settings_file_changes, watch_config_file, SettingsStore};
+// use std::{
+// collections::HashSet,
+// path::{Path, PathBuf},
+// };
+// use theme::{ThemeRegistry, ThemeSettings};
+// use workspace::{
+// item::{Item, ItemHandle},
+// open_new, open_paths, pane, NewFile, SaveIntent, SplitDirection, WorkspaceHandle,
+// };
+
+// #[gpui::test]
+// async fn test_open_paths_action(cx: &mut TestAppContext) {
+// let app_state = init_test(cx);
+// app_state
+// .fs
+// .as_fake()
+// .insert_tree(
+// "/root",
+// json!({
+// "a": {
+// "aa": null,
+// "ab": null,
+// },
+// "b": {
+// "ba": null,
+// "bb": null,
+// },
+// "c": {
+// "ca": null,
+// "cb": null,
+// },
+// "d": {
+// "da": null,
+// "db": null,
+// },
+// }),
+// )
+// .await;
+
+// cx.update(|cx| {
+// open_paths(
+// &[PathBuf::from("/root/a"), PathBuf::from("/root/b")],
+// &app_state,
+// None,
+// cx,
+// )
+// })
+// .await
+// .unwrap();
+// assert_eq!(cx.windows().len(), 1);
+
+// cx.update(|cx| open_paths(&[PathBuf::from("/root/a")], &app_state, None, cx))
+// .await
+// .unwrap();
+// assert_eq!(cx.windows().len(), 1);
+// let workspace_1 = cx.windows()[0].downcast::<Workspace>().unwrap().root(cx);
+// workspace_1.update(cx, |workspace, cx| {
+// assert_eq!(workspace.worktrees(cx).count(), 2);
+// assert!(workspace.left_dock().read(cx).is_open());
+// assert!(workspace.active_pane().is_focused(cx));
+// });
+
+// cx.update(|cx| {
+// open_paths(
+// &[PathBuf::from("/root/b"), PathBuf::from("/root/c")],
+// &app_state,
+// None,
+// cx,
+// )
+// })
+// .await
+// .unwrap();
+// assert_eq!(cx.windows().len(), 2);
+
+// // Replace existing windows
+// let window = cx.windows()[0].downcast::<Workspace>().unwrap();
+// cx.update(|cx| {
+// open_paths(
+// &[PathBuf::from("/root/c"), PathBuf::from("/root/d")],
+// &app_state,
+// Some(window),
+// cx,
+// )
+// })
+// .await
+// .unwrap();
+// assert_eq!(cx.windows().len(), 2);
+// let workspace_1 = cx.windows()[0].downcast::<Workspace>().unwrap().root(cx);
+// workspace_1.update(cx, |workspace, cx| {
+// assert_eq!(
+// workspace
+// .worktrees(cx)
+// .map(|w| w.read(cx).abs_path())
+// .collect::<Vec<_>>(),
+// &[Path::new("/root/c").into(), Path::new("/root/d").into()]
+// );
+// assert!(workspace.left_dock().read(cx).is_open());
+// assert!(workspace.active_pane().is_focused(cx));
+// });
+// }
+
+// #[gpui::test]
+// async fn test_window_edit_state(executor: Arc<Deterministic>, cx: &mut TestAppContext) {
+// let app_state = init_test(cx);
+// app_state
+// .fs
+// .as_fake()
+// .insert_tree("/root", json!({"a": "hey"}))
+// .await;
+
+// cx.update(|cx| open_paths(&[PathBuf::from("/root/a")], &app_state, None, cx))
+// .await
+// .unwrap();
+// assert_eq!(cx.windows().len(), 1);
+
+// // When opening the workspace, the window is not in a edited state.
+// let window = cx.windows()[0].downcast::<Workspace>().unwrap();
+// let workspace = window.root(cx);
+// let pane = workspace.read_with(cx, |workspace, _| workspace.active_pane().clone());
+// let editor = workspace.read_with(cx, |workspace, cx| {
+// workspace
+// .active_item(cx)
+// .unwrap()
+// .downcast::<Editor>()
+// .unwrap()
+// });
+// assert!(!window.is_edited(cx));
+
+// // Editing a buffer marks the window as edited.
+// editor.update(cx, |editor, cx| editor.insert("EDIT", cx));
+// assert!(window.is_edited(cx));
+
+// // Undoing the edit restores the window's edited state.
+// editor.update(cx, |editor, cx| editor.undo(&Default::default(), cx));
+// assert!(!window.is_edited(cx));
+
+// // Redoing the edit marks the window as edited again.
+// editor.update(cx, |editor, cx| editor.redo(&Default::default(), cx));
+// assert!(window.is_edited(cx));
+
+// // Closing the item restores the window's edited state.
+// let close = pane.update(cx, |pane, cx| {
+// drop(editor);
+// pane.close_active_item(&Default::default(), cx).unwrap()
+// });
+// executor.run_until_parked();
+
+// window.simulate_prompt_answer(1, cx);
+// close.await.unwrap();
+// assert!(!window.is_edited(cx));
+
+// // Opening the buffer again doesn't impact the window's edited state.
+// cx.update(|cx| open_paths(&[PathBuf::from("/root/a")], &app_state, None, cx))
+// .await
+// .unwrap();
+// let editor = workspace.read_with(cx, |workspace, cx| {
+// workspace
+// .active_item(cx)
+// .unwrap()
+// .downcast::<Editor>()
+// .unwrap()
+// });
+// assert!(!window.is_edited(cx));
+
+// // Editing the buffer marks the window as edited.
+// editor.update(cx, |editor, cx| editor.insert("EDIT", cx));
+// assert!(window.is_edited(cx));
+
+// // Ensure closing the window via the mouse gets preempted due to the
+// // buffer having unsaved changes.
+// assert!(!window.simulate_close(cx));
+// executor.run_until_parked();
+// assert_eq!(cx.windows().len(), 1);
+
+// // The window is successfully closed after the user dismisses the prompt.
+// window.simulate_prompt_answer(1, cx);
+// executor.run_until_parked();
+// assert_eq!(cx.windows().len(), 0);
+// }
+
+// #[gpui::test]
+// async fn test_new_empty_workspace(cx: &mut TestAppContext) {
+// let app_state = init_test(cx);
+// cx.update(|cx| {
+// open_new(&app_state, cx, |workspace, cx| {
+// Editor::new_file(workspace, &Default::default(), cx)
+// })
+// })
+// .await;
+
+// let window = cx
+// .windows()
+// .first()
+// .unwrap()
+// .downcast::<Workspace>()
+// .unwrap();
+// let workspace = window.root(cx);
+
+// let editor = workspace.update(cx, |workspace, cx| {
+// workspace
+// .active_item(cx)
+// .unwrap()
+// .downcast::<editor::Editor>()
+// .unwrap()
+// });
+
+// editor.update(cx, |editor, cx| {
+// assert!(editor.text(cx).is_empty());
+// assert!(!editor.is_dirty(cx));
+// });
+
+// let save_task = workspace.update(cx, |workspace, cx| {
+// workspace.save_active_item(SaveIntent::Save, cx)
+// });
+// app_state.fs.create_dir(Path::new("/root")).await.unwrap();
+// cx.foreground().run_until_parked();
+// cx.simulate_new_path_selection(|_| Some(PathBuf::from("/root/the-new-name")));
+// save_task.await.unwrap();
+// editor.read_with(cx, |editor, cx| {
+// assert!(!editor.is_dirty(cx));
+// assert_eq!(editor.title(cx), "the-new-name");
+// });
+// }
+
+// #[gpui::test]
+// async fn test_open_entry(cx: &mut TestAppContext) {
+// let app_state = init_test(cx);
+// app_state
+// .fs
+// .as_fake()
+// .insert_tree(
+// "/root",
+// json!({
+// "a": {
+// "file1": "contents 1",
+// "file2": "contents 2",
+// "file3": "contents 3",
+// },
+// }),
+// )
+// .await;
+
+// let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await;
+// let window = cx.add_window(|cx| Workspace::test_new(project, cx));
+// let workspace = window.root(cx);
+
+// let entries = cx.read(|cx| workspace.file_project_paths(cx));
+// let file1 = entries[0].clone();
+// let file2 = entries[1].clone();
+// let file3 = entries[2].clone();
+
+// // Open the first entry
+// let entry_1 = workspace
+// .update(cx, |w, cx| w.open_path(file1.clone(), None, true, cx))
+// .await
+// .unwrap();
+// cx.read(|cx| {
+// let pane = workspace.read(cx).active_pane().read(cx);
+// assert_eq!(
+// pane.active_item().unwrap().project_path(cx),
+// Some(file1.clone())
+// );
+// assert_eq!(pane.items_len(), 1);
+// });
+
+// // Open the second entry
+// workspace
+// .update(cx, |w, cx| w.open_path(file2.clone(), None, true, cx))
+// .await
+// .unwrap();
+// cx.read(|cx| {
+// let pane = workspace.read(cx).active_pane().read(cx);
+// assert_eq!(
+// pane.active_item().unwrap().project_path(cx),
+// Some(file2.clone())
+// );
+// assert_eq!(pane.items_len(), 2);
+// });
+
+// // Open the first entry again. The existing pane item is activated.
+// let entry_1b = workspace
+// .update(cx, |w, cx| w.open_path(file1.clone(), None, true, cx))
+// .await
+// .unwrap();
+// assert_eq!(entry_1.id(), entry_1b.id());
+
+// cx.read(|cx| {
+// let pane = workspace.read(cx).active_pane().read(cx);
+// assert_eq!(
+// pane.active_item().unwrap().project_path(cx),
+// Some(file1.clone())
+// );
+// assert_eq!(pane.items_len(), 2);
+// });
+
+// // Split the pane with the first entry, then open the second entry again.
+// workspace
+// .update(cx, |w, cx| {
+// w.split_and_clone(w.active_pane().clone(), SplitDirection::Right, cx);
+// w.open_path(file2.clone(), None, true, cx)
+// })
+// .await
+// .unwrap();
+
+// workspace.read_with(cx, |w, cx| {
+// assert_eq!(
+// w.active_pane()
+// .read(cx)
+// .active_item()
+// .unwrap()
+// .project_path(cx),
+// Some(file2.clone())
+// );
+// });
+
+// // Open the third entry twice concurrently. Only one pane item is added.
+// let (t1, t2) = workspace.update(cx, |w, cx| {
+// (
+// w.open_path(file3.clone(), None, true, cx),
+// w.open_path(file3.clone(), None, true, cx),
+// )
+// });
+// t1.await.unwrap();
+// t2.await.unwrap();
+// cx.read(|cx| {
+// let pane = workspace.read(cx).active_pane().read(cx);
+// assert_eq!(
+// pane.active_item().unwrap().project_path(cx),
+// Some(file3.clone())
+// );
+// let pane_entries = pane
+// .items()
+// .map(|i| i.project_path(cx).unwrap())
+// .collect::<Vec<_>>();
+// assert_eq!(pane_entries, &[file1, file2, file3]);
+// });
+// }
+
+// #[gpui::test]
+// async fn test_open_paths(cx: &mut TestAppContext) {
+// let app_state = init_test(cx);
+
+// app_state
+// .fs
+// .as_fake()
+// .insert_tree(
+// "/",
+// json!({
+// "dir1": {
+// "a.txt": ""
+// },
+// "dir2": {
+// "b.txt": ""
+// },
+// "dir3": {
+// "c.txt": ""
+// },
+// "d.txt": ""
+// }),
+// )
+// .await;
+
+// cx.update(|cx| open_paths(&[PathBuf::from("/dir1/")], &app_state, None, cx))
+// .await
+// .unwrap();
+// assert_eq!(cx.windows().len(), 1);
+// let workspace = cx.windows()[0].downcast::<Workspace>().unwrap().root(cx);
+
+// #[track_caller]
+// fn assert_project_panel_selection(
+// workspace: &Workspace,
+// expected_worktree_path: &Path,
+// expected_entry_path: &Path,
+// cx: &AppContext,
+// ) {
+// let project_panel = [
+// workspace.left_dock().read(cx).panel::<ProjectPanel>(),
+// workspace.right_dock().read(cx).panel::<ProjectPanel>(),
+// workspace.bottom_dock().read(cx).panel::<ProjectPanel>(),
+// ]
+// .into_iter()
+// .find_map(std::convert::identity)
+// .expect("found no project panels")
+// .read(cx);
+// let (selected_worktree, selected_entry) = project_panel
+// .selected_entry(cx)
+// .expect("project panel should have a selected entry");
+// assert_eq!(
+// selected_worktree.abs_path().as_ref(),
+// expected_worktree_path,
+// "Unexpected project panel selected worktree path"
+// );
+// assert_eq!(
+// selected_entry.path.as_ref(),
+// expected_entry_path,
+// "Unexpected project panel selected entry path"
+// );
+// }
+
+// // Open a file within an existing worktree.
+// workspace
+// .update(cx, |view, cx| {
+// view.open_paths(vec!["/dir1/a.txt".into()], true, cx)
+// })
+// .await;
+// cx.read(|cx| {
+// let workspace = workspace.read(cx);
+// assert_project_panel_selection(workspace, Path::new("/dir1"), Path::new("a.txt"), cx);
+// assert_eq!(
+// workspace
+// .active_pane()
+// .read(cx)
+// .active_item()
+// .unwrap()
+// .as_any()
+// .downcast_ref::<Editor>()
+// .unwrap()
+// .read(cx)
+// .title(cx),
+// "a.txt"
+// );
+// });
+
+// // Open a file outside of any existing worktree.
+// workspace
+// .update(cx, |view, cx| {
+// view.open_paths(vec!["/dir2/b.txt".into()], true, cx)
+// })
+// .await;
+// cx.read(|cx| {
+// let workspace = workspace.read(cx);
+// assert_project_panel_selection(workspace, Path::new("/dir2/b.txt"), Path::new(""), cx);
+// let worktree_roots = workspace
+// .worktrees(cx)
+// .map(|w| w.read(cx).as_local().unwrap().abs_path().as_ref())
+// .collect::<HashSet<_>>();
+// assert_eq!(
+// worktree_roots,
+// vec!["/dir1", "/dir2/b.txt"]
+// .into_iter()
+// .map(Path::new)
+// .collect(),
+// );
+// assert_eq!(
+// workspace
+// .active_pane()
+// .read(cx)
+// .active_item()
+// .unwrap()
+// .as_any()
+// .downcast_ref::<Editor>()
+// .unwrap()
+// .read(cx)
+// .title(cx),
+// "b.txt"
+// );
+// });
+
+// // Ensure opening a directory and one of its children only adds one worktree.
+// workspace
+// .update(cx, |view, cx| {
+// view.open_paths(vec!["/dir3".into(), "/dir3/c.txt".into()], true, cx)
+// })
+// .await;
+// cx.read(|cx| {
+// let workspace = workspace.read(cx);
+// assert_project_panel_selection(workspace, Path::new("/dir3"), Path::new("c.txt"), cx);
+// let worktree_roots = workspace
+// .worktrees(cx)
+// .map(|w| w.read(cx).as_local().unwrap().abs_path().as_ref())
+// .collect::<HashSet<_>>();
+// assert_eq!(
+// worktree_roots,
+// vec!["/dir1", "/dir2/b.txt", "/dir3"]
+// .into_iter()
+// .map(Path::new)
+// .collect(),
+// );
+// assert_eq!(
+// workspace
+// .active_pane()
+// .read(cx)
+// .active_item()
+// .unwrap()
+// .as_any()
+// .downcast_ref::<Editor>()
+// .unwrap()
+// .read(cx)
+// .title(cx),
+// "c.txt"
+// );
+// });
+
+// // Ensure opening invisibly a file outside an existing worktree adds a new, invisible worktree.
+// workspace
+// .update(cx, |view, cx| {
+// view.open_paths(vec!["/d.txt".into()], false, cx)
+// })
+// .await;
+// cx.read(|cx| {
+// let workspace = workspace.read(cx);
+// assert_project_panel_selection(workspace, Path::new("/d.txt"), Path::new(""), cx);
+// let worktree_roots = workspace
+// .worktrees(cx)
+// .map(|w| w.read(cx).as_local().unwrap().abs_path().as_ref())
+// .collect::<HashSet<_>>();
+// assert_eq!(
+// worktree_roots,
+// vec!["/dir1", "/dir2/b.txt", "/dir3", "/d.txt"]
+// .into_iter()
+// .map(Path::new)
+// .collect(),
+// );
+
+// let visible_worktree_roots = workspace
+// .visible_worktrees(cx)
+// .map(|w| w.read(cx).as_local().unwrap().abs_path().as_ref())
+// .collect::<HashSet<_>>();
+// assert_eq!(
+// visible_worktree_roots,
+// vec!["/dir1", "/dir2/b.txt", "/dir3"]
+// .into_iter()
+// .map(Path::new)
+// .collect(),
+// );
+
+// assert_eq!(
+// workspace
+// .active_pane()
+// .read(cx)
+// .active_item()
+// .unwrap()
+// .as_any()
+// .downcast_ref::<Editor>()
+// .unwrap()
+// .read(cx)
+// .title(cx),
+// "d.txt"
+// );
+// });
+// }
+
+// #[gpui::test]
+// async fn test_opening_excluded_paths(cx: &mut TestAppContext) {
+// let app_state = init_test(cx);
+// cx.update(|cx| {
+// cx.update_global::<SettingsStore, _, _>(|store, cx| {
+// store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
+// project_settings.file_scan_exclusions =
+// Some(vec!["excluded_dir".to_string(), "**/.git".to_string()]);
+// });
+// });
+// });
+// app_state
+// .fs
+// .as_fake()
+// .insert_tree(
+// "/root",
+// json!({
+// ".gitignore": "ignored_dir\n",
+// ".git": {
+// "HEAD": "ref: refs/heads/main",
+// },
+// "regular_dir": {
+// "file": "regular file contents",
+// },
+// "ignored_dir": {
+// "ignored_subdir": {
+// "file": "ignored subfile contents",
+// },
+// "file": "ignored file contents",
+// },
+// "excluded_dir": {
+// "file": "excluded file contents",
+// },
+// }),
+// )
+// .await;
+
+// let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await;
+// let window = cx.add_window(|cx| Workspace::test_new(project, cx));
+// let workspace = window.root(cx);
+
+// let initial_entries = cx.read(|cx| workspace.file_project_paths(cx));
+// let paths_to_open = [
+// Path::new("/root/excluded_dir/file").to_path_buf(),
+// Path::new("/root/.git/HEAD").to_path_buf(),
+// Path::new("/root/excluded_dir/ignored_subdir").to_path_buf(),
+// ];
+// let (opened_workspace, new_items) = cx
+// .update(|cx| workspace::open_paths(&paths_to_open, &app_state, None, cx))
+// .await
+// .unwrap();
+
+// assert_eq!(
+// opened_workspace.id(),
+// workspace.id(),
+// "Excluded files in subfolders of a workspace root should be opened in the workspace"
+// );
+// let mut opened_paths = cx.read(|cx| {
+// assert_eq!(
+// new_items.len(),
+// paths_to_open.len(),
+// "Expect to get the same number of opened items as submitted paths to open"
+// );
+// new_items
+// .iter()
+// .zip(paths_to_open.iter())
+// .map(|(i, path)| {
+// match i {
+// Some(Ok(i)) => {
+// Some(i.project_path(cx).map(|p| p.path.display().to_string()))
+// }
+// Some(Err(e)) => panic!("Excluded file {path:?} failed to open: {e:?}"),
+// None => None,
+// }
+// .flatten()
+// })
+// .collect::<Vec<_>>()
+// });
+// opened_paths.sort();
+// assert_eq!(
+// opened_paths,
+// vec![
+// None,
+// Some(".git/HEAD".to_string()),
+// Some("excluded_dir/file".to_string()),
+// ],
+// "Excluded files should get opened, excluded dir should not get opened"
+// );
+
+// let entries = cx.read(|cx| workspace.file_project_paths(cx));
+// assert_eq!(
+// initial_entries, entries,
+// "Workspace entries should not change after opening excluded files and directories paths"
+// );
+
+// cx.read(|cx| {
+// let pane = workspace.read(cx).active_pane().read(cx);
+// let mut opened_buffer_paths = pane
+// .items()
+// .map(|i| {
+// i.project_path(cx)
+// .expect("all excluded files that got open should have a path")
+// .path
+// .display()
+// .to_string()
+// })
+// .collect::<Vec<_>>();
+// opened_buffer_paths.sort();
+// assert_eq!(
+// opened_buffer_paths,
+// vec![".git/HEAD".to_string(), "excluded_dir/file".to_string()],
+// "Despite not being present in the worktrees, buffers for excluded files are opened and added to the pane"
+// );
+// });
+// }
+
+// #[gpui::test]
+// async fn test_save_conflicting_item(cx: &mut TestAppContext) {
+// let app_state = init_test(cx);
+// app_state
+// .fs
+// .as_fake()
+// .insert_tree("/root", json!({ "a.txt": "" }))
+// .await;
+
+// let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await;
+// let window = cx.add_window(|cx| Workspace::test_new(project, cx));
+// let workspace = window.root(cx);
+
+// // Open a file within an existing worktree.
+// workspace
+// .update(cx, |view, cx| {
+// view.open_paths(vec![PathBuf::from("/root/a.txt")], true, cx)
+// })
+// .await;
+// let editor = cx.read(|cx| {
+// let pane = workspace.read(cx).active_pane().read(cx);
+// let item = pane.active_item().unwrap();
+// item.downcast::<Editor>().unwrap()
+// });
+
+// editor.update(cx, |editor, cx| editor.handle_input("x", cx));
+// app_state
+// .fs
+// .as_fake()
+// .insert_file("/root/a.txt", "changed".to_string())
+// .await;
+// editor
+// .condition(cx, |editor, cx| editor.has_conflict(cx))
+// .await;
+// cx.read(|cx| assert!(editor.is_dirty(cx)));
+
+// let save_task = workspace.update(cx, |workspace, cx| {
+// workspace.save_active_item(SaveIntent::Save, cx)
+// });
+// cx.foreground().run_until_parked();
+// window.simulate_prompt_answer(0, cx);
+// save_task.await.unwrap();
+// editor.read_with(cx, |editor, cx| {
+// assert!(!editor.is_dirty(cx));
+// assert!(!editor.has_conflict(cx));
+// });
+// }
+
+// #[gpui::test]
+// async fn test_open_and_save_new_file(cx: &mut TestAppContext) {
+// let app_state = init_test(cx);
+// app_state.fs.create_dir(Path::new("/root")).await.unwrap();
+
+// let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await;
+// project.update(cx, |project, _| project.languages().add(rust_lang()));
+// let window = cx.add_window(|cx| Workspace::test_new(project, cx));
+// let workspace = window.root(cx);
+// let worktree = cx.read(|cx| workspace.read(cx).worktrees(cx).next().unwrap());
+
+// // Create a new untitled buffer
+// cx.dispatch_action(window.into(), NewFile);
+// let editor = workspace.read_with(cx, |workspace, cx| {
+// workspace
+// .active_item(cx)
+// .unwrap()
+// .downcast::<Editor>()
+// .unwrap()
+// });
+
+// editor.update(cx, |editor, cx| {
+// assert!(!editor.is_dirty(cx));
+// assert_eq!(editor.title(cx), "untitled");
+// assert!(Arc::ptr_eq(
+// &editor.language_at(0, cx).unwrap(),
+// &languages::PLAIN_TEXT
+// ));
+// editor.handle_input("hi", cx);
+// assert!(editor.is_dirty(cx));
+// });
+
+// // Save the buffer. This prompts for a filename.
+// let save_task = workspace.update(cx, |workspace, cx| {
+// workspace.save_active_item(SaveIntent::Save, cx)
+// });
+// cx.foreground().run_until_parked();
+// cx.simulate_new_path_selection(|parent_dir| {
+// assert_eq!(parent_dir, Path::new("/root"));
+// Some(parent_dir.join("the-new-name.rs"))
+// });
+// cx.read(|cx| {
+// assert!(editor.is_dirty(cx));
+// assert_eq!(editor.read(cx).title(cx), "untitled");
+// });
+
+// // When the save completes, the buffer's title is updated and the language is assigned based
+// // on the path.
+// save_task.await.unwrap();
+// editor.read_with(cx, |editor, cx| {
+// assert!(!editor.is_dirty(cx));
+// assert_eq!(editor.title(cx), "the-new-name.rs");
+// assert_eq!(editor.language_at(0, cx).unwrap().name().as_ref(), "Rust");
+// });
+
+// // Edit the file and save it again. This time, there is no filename prompt.
+// editor.update(cx, |editor, cx| {
+// editor.handle_input(" there", cx);
+// assert!(editor.is_dirty(cx));
+// });
+// let save_task = workspace.update(cx, |workspace, cx| {
+// workspace.save_active_item(SaveIntent::Save, cx)
+// });
+// save_task.await.unwrap();
+// assert!(!cx.did_prompt_for_new_path());
+// editor.read_with(cx, |editor, cx| {
+// assert!(!editor.is_dirty(cx));
+// assert_eq!(editor.title(cx), "the-new-name.rs")
+// });
+
+// // Open the same newly-created file in another pane item. The new editor should reuse
+// // the same buffer.
+// cx.dispatch_action(window.into(), NewFile);
+// workspace
+// .update(cx, |workspace, cx| {
+// workspace.split_and_clone(
+// workspace.active_pane().clone(),
+// SplitDirection::Right,
+// cx,
+// );
+// workspace.open_path((worktree.read(cx).id(), "the-new-name.rs"), None, true, cx)
+// })
+// .await
+// .unwrap();
+// let editor2 = workspace.update(cx, |workspace, cx| {
+// workspace
+// .active_item(cx)
+// .unwrap()
+// .downcast::<Editor>()
+// .unwrap()
+// });
+// cx.read(|cx| {
+// assert_eq!(
+// editor2.read(cx).buffer().read(cx).as_singleton().unwrap(),
+// editor.read(cx).buffer().read(cx).as_singleton().unwrap()
+// );
+// })
+// }
+
+// #[gpui::test]
+// async fn test_setting_language_when_saving_as_single_file_worktree(cx: &mut TestAppContext) {
+// let app_state = init_test(cx);
+// app_state.fs.create_dir(Path::new("/root")).await.unwrap();
+
+// let project = Project::test(app_state.fs.clone(), [], cx).await;
+// project.update(cx, |project, _| project.languages().add(rust_lang()));
+// let window = cx.add_window(|cx| Workspace::test_new(project, cx));
+// let workspace = window.root(cx);
+
+// // Create a new untitled buffer
+// cx.dispatch_action(window.into(), NewFile);
+// let editor = workspace.read_with(cx, |workspace, cx| {
+// workspace
+// .active_item(cx)
+// .unwrap()
+// .downcast::<Editor>()
+// .unwrap()
+// });
+
+// editor.update(cx, |editor, cx| {
+// assert!(Arc::ptr_eq(
+// &editor.language_at(0, cx).unwrap(),
+// &languages::PLAIN_TEXT
+// ));
+// editor.handle_input("hi", cx);
+// assert!(editor.is_dirty(cx));
+// });
+
+// // Save the buffer. This prompts for a filename.
+// let save_task = workspace.update(cx, |workspace, cx| {
+// workspace.save_active_item(SaveIntent::Save, cx)
+// });
+// cx.foreground().run_until_parked();
+// cx.simulate_new_path_selection(|_| Some(PathBuf::from("/root/the-new-name.rs")));
+// save_task.await.unwrap();
+// // The buffer is not dirty anymore and the language is assigned based on the path.
+// editor.read_with(cx, |editor, cx| {
+// assert!(!editor.is_dirty(cx));
+// assert_eq!(editor.language_at(0, cx).unwrap().name().as_ref(), "Rust")
+// });
+// }
+
+// #[gpui::test]
+// async fn test_pane_actions(cx: &mut TestAppContext) {
+// let app_state = init_test(cx);
+// app_state
+// .fs
+// .as_fake()
+// .insert_tree(
+// "/root",
+// json!({
+// "a": {
+// "file1": "contents 1",
+// "file2": "contents 2",
+// "file3": "contents 3",
+// },
+// }),
+// )
+// .await;
+
+// let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await;
+// let window = cx.add_window(|cx| Workspace::test_new(project, cx));
+// let workspace = window.root(cx);
+
+// let entries = cx.read(|cx| workspace.file_project_paths(cx));
+// let file1 = entries[0].clone();
+
+// let pane_1 = cx.read(|cx| workspace.read(cx).active_pane().clone());
+
+// workspace
+// .update(cx, |w, cx| w.open_path(file1.clone(), None, true, cx))
+// .await
+// .unwrap();
+
+// let (editor_1, buffer) = pane_1.update(cx, |pane_1, cx| {
+// let editor = pane_1.active_item().unwrap().downcast::<Editor>().unwrap();
+// assert_eq!(editor.project_path(cx), Some(file1.clone()));
+// let buffer = editor.update(cx, |editor, cx| {
+// editor.insert("dirt", cx);
+// editor.buffer().downgrade()
+// });
+// (editor.downgrade(), buffer)
+// });
+
+// cx.dispatch_action(window.into(), pane::SplitRight);
+// let editor_2 = cx.update(|cx| {
+// let pane_2 = workspace.read(cx).active_pane().clone();
+// assert_ne!(pane_1, pane_2);
+
+// let pane2_item = pane_2.read(cx).active_item().unwrap();
+// assert_eq!(pane2_item.project_path(cx), Some(file1.clone()));
+
+// pane2_item.downcast::<Editor>().unwrap().downgrade()
+// });
+// cx.dispatch_action(
+// window.into(),
+// workspace::CloseActiveItem { save_intent: None },
+// );
+
+// cx.foreground().run_until_parked();
+// workspace.read_with(cx, |workspace, _| {
+// assert_eq!(workspace.panes().len(), 1);
+// assert_eq!(workspace.active_pane(), &pane_1);
+// });
+
+// cx.dispatch_action(
+// window.into(),
+// workspace::CloseActiveItem { save_intent: None },
+// );
+// cx.foreground().run_until_parked();
+// window.simulate_prompt_answer(1, cx);
+// cx.foreground().run_until_parked();
+
+// workspace.read_with(cx, |workspace, cx| {
+// assert_eq!(workspace.panes().len(), 1);
+// assert!(workspace.active_item(cx).is_none());
+// });
+
+// cx.assert_dropped(editor_1);
+// cx.assert_dropped(editor_2);
+// cx.assert_dropped(buffer);
+// }
+
+// #[gpui::test]
+// async fn test_navigation(cx: &mut TestAppContext) {
+// let app_state = init_test(cx);
+// app_state
+// .fs
+// .as_fake()
+// .insert_tree(
+// "/root",
+// json!({
+// "a": {
+// "file1": "contents 1\n".repeat(20),
+// "file2": "contents 2\n".repeat(20),
+// "file3": "contents 3\n".repeat(20),
+// },
+// }),
+// )
+// .await;
+
+// let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await;
+// let workspace = cx
+// .add_window(|cx| Workspace::test_new(project.clone(), cx))
+// .root(cx);
+// let pane = workspace.read_with(cx, |workspace, _| workspace.active_pane().clone());
+
+// let entries = cx.read(|cx| workspace.file_project_paths(cx));
+// let file1 = entries[0].clone();
+// let file2 = entries[1].clone();
+// let file3 = entries[2].clone();
+
+// let editor1 = workspace
+// .update(cx, |w, cx| w.open_path(file1.clone(), None, true, cx))
+// .await
+// .unwrap()
+// .downcast::<Editor>()
+// .unwrap();
+// editor1.update(cx, |editor, cx| {
+// editor.change_selections(Some(Autoscroll::fit()), cx, |s| {
+// s.select_display_ranges([DisplayPoint::new(10, 0)..DisplayPoint::new(10, 0)])
+// });
+// });
+// let editor2 = workspace
+// .update(cx, |w, cx| w.open_path(file2.clone(), None, true, cx))
+// .await
+// .unwrap()
+// .downcast::<Editor>()
+// .unwrap();
+// let editor3 = workspace
+// .update(cx, |w, cx| w.open_path(file3.clone(), None, true, cx))
+// .await
+// .unwrap()
+// .downcast::<Editor>()
+// .unwrap();
+
+// editor3
+// .update(cx, |editor, cx| {
+// editor.change_selections(Some(Autoscroll::fit()), cx, |s| {
+// s.select_display_ranges([DisplayPoint::new(12, 0)..DisplayPoint::new(12, 0)])
+// });
+// editor.newline(&Default::default(), cx);
+// editor.newline(&Default::default(), cx);
+// editor.move_down(&Default::default(), cx);
+// editor.move_down(&Default::default(), cx);
+// editor.save(project.clone(), cx)
+// })
+// .await
+// .unwrap();
+// editor3.update(cx, |editor, cx| {
+// editor.set_scroll_position(vec2f(0., 12.5), cx)
+// });
+// assert_eq!(
+// active_location(&workspace, cx),
+// (file3.clone(), DisplayPoint::new(16, 0), 12.5)
+// );
+
+// workspace
+// .update(cx, |w, cx| w.go_back(w.active_pane().downgrade(), cx))
+// .await
+// .unwrap();
+// assert_eq!(
+// active_location(&workspace, cx),
+// (file3.clone(), DisplayPoint::new(0, 0), 0.)
+// );
+
+// workspace
+// .update(cx, |w, cx| w.go_back(w.active_pane().downgrade(), cx))
+// .await
+// .unwrap();
+// assert_eq!(
+// active_location(&workspace, cx),
+// (file2.clone(), DisplayPoint::new(0, 0), 0.)
+// );
+
+// workspace
+// .update(cx, |w, cx| w.go_back(w.active_pane().downgrade(), cx))
+// .await
+// .unwrap();
+// assert_eq!(
+// active_location(&workspace, cx),
+// (file1.clone(), DisplayPoint::new(10, 0), 0.)
+// );
+
+// workspace
+// .update(cx, |w, cx| w.go_back(w.active_pane().downgrade(), cx))
+// .await
+// .unwrap();
+// assert_eq!(
+// active_location(&workspace, cx),
+// (file1.clone(), DisplayPoint::new(0, 0), 0.)
+// );
+
+// // Go back one more time and ensure we don't navigate past the first item in the history.
+// workspace
+// .update(cx, |w, cx| w.go_back(w.active_pane().downgrade(), cx))
+// .await
+// .unwrap();
+// assert_eq!(
+// active_location(&workspace, cx),
+// (file1.clone(), DisplayPoint::new(0, 0), 0.)
+// );
+
+// workspace
+// .update(cx, |w, cx| w.go_forward(w.active_pane().downgrade(), cx))
+// .await
+// .unwrap();
+// assert_eq!(
+// active_location(&workspace, cx),
+// (file1.clone(), DisplayPoint::new(10, 0), 0.)
+// );
+
+// workspace
+// .update(cx, |w, cx| w.go_forward(w.active_pane().downgrade(), cx))
+// .await
+// .unwrap();
+// assert_eq!(
+// active_location(&workspace, cx),
+// (file2.clone(), DisplayPoint::new(0, 0), 0.)
+// );
+
+// // Go forward to an item that has been closed, ensuring it gets re-opened at the same
+// // location.
+// pane.update(cx, |pane, cx| {
+// let editor3_id = editor3.id();
+// drop(editor3);
+// pane.close_item_by_id(editor3_id, SaveIntent::Close, cx)
+// })
+// .await
+// .unwrap();
+// workspace
+// .update(cx, |w, cx| w.go_forward(w.active_pane().downgrade(), cx))
+// .await
+// .unwrap();
+// assert_eq!(
+// active_location(&workspace, cx),
+// (file3.clone(), DisplayPoint::new(0, 0), 0.)
+// );
+
+// workspace
+// .update(cx, |w, cx| w.go_forward(w.active_pane().downgrade(), cx))
+// .await
+// .unwrap();
+// assert_eq!(
+// active_location(&workspace, cx),
+// (file3.clone(), DisplayPoint::new(16, 0), 12.5)
+// );
+
+// workspace
+// .update(cx, |w, cx| w.go_back(w.active_pane().downgrade(), cx))
+// .await
+// .unwrap();
+// assert_eq!(
+// active_location(&workspace, cx),
+// (file3.clone(), DisplayPoint::new(0, 0), 0.)
+// );
+
+// // Go back to an item that has been closed and removed from disk, ensuring it gets skipped.
+// pane.update(cx, |pane, cx| {
+// let editor2_id = editor2.id();
+// drop(editor2);
+// pane.close_item_by_id(editor2_id, SaveIntent::Close, cx)
+// })
+// .await
+// .unwrap();
+// app_state
+// .fs
+// .remove_file(Path::new("/root/a/file2"), Default::default())
+// .await
+// .unwrap();
+// cx.foreground().run_until_parked();
+
+// workspace
+// .update(cx, |w, cx| w.go_back(w.active_pane().downgrade(), cx))
+// .await
+// .unwrap();
+// assert_eq!(
+// active_location(&workspace, cx),
+// (file1.clone(), DisplayPoint::new(10, 0), 0.)
+// );
+// workspace
+// .update(cx, |w, cx| w.go_forward(w.active_pane().downgrade(), cx))
+// .await
+// .unwrap();
+// assert_eq!(
+// active_location(&workspace, cx),
+// (file3.clone(), DisplayPoint::new(0, 0), 0.)
+// );
+
+// // Modify file to collapse multiple nav history entries into the same location.
+// // Ensure we don't visit the same location twice when navigating.
+// editor1.update(cx, |editor, cx| {
+// editor.change_selections(None, cx, |s| {
+// s.select_display_ranges([DisplayPoint::new(15, 0)..DisplayPoint::new(15, 0)])
+// })
+// });
+
+// for _ in 0..5 {
+// editor1.update(cx, |editor, cx| {
+// editor.change_selections(None, cx, |s| {
+// s.select_display_ranges([DisplayPoint::new(3, 0)..DisplayPoint::new(3, 0)])
+// });
+// });
+// editor1.update(cx, |editor, cx| {
+// editor.change_selections(None, cx, |s| {
+// s.select_display_ranges([DisplayPoint::new(13, 0)..DisplayPoint::new(13, 0)])
+// })
+// });
+// }
+
+// editor1.update(cx, |editor, cx| {
+// editor.transact(cx, |editor, cx| {
+// editor.change_selections(None, cx, |s| {
+// s.select_display_ranges([DisplayPoint::new(2, 0)..DisplayPoint::new(14, 0)])
+// });
+// editor.insert("", cx);
+// })
+// });
+
+// editor1.update(cx, |editor, cx| {
+// editor.change_selections(None, cx, |s| {
+// s.select_display_ranges([DisplayPoint::new(1, 0)..DisplayPoint::new(1, 0)])
+// })
+// });
+// workspace
+// .update(cx, |w, cx| w.go_back(w.active_pane().downgrade(), cx))
+// .await
+// .unwrap();
+// assert_eq!(
+// active_location(&workspace, cx),
+// (file1.clone(), DisplayPoint::new(2, 0), 0.)
+// );
+// workspace
+// .update(cx, |w, cx| w.go_back(w.active_pane().downgrade(), cx))
+// .await
+// .unwrap();
+// assert_eq!(
+// active_location(&workspace, cx),
+// (file1.clone(), DisplayPoint::new(3, 0), 0.)
+// );
+
+// fn active_location(
+// workspace: &ViewHandle<Workspace>,
+// cx: &mut TestAppContext,
+// ) -> (ProjectPath, DisplayPoint, f32) {
+// workspace.update(cx, |workspace, cx| {
+// let item = workspace.active_item(cx).unwrap();
+// let editor = item.downcast::<Editor>().unwrap();
+// let (selections, scroll_position) = editor.update(cx, |editor, cx| {
+// (
+// editor.selections.display_ranges(cx),
+// editor.scroll_position(cx),
+// )
+// });
+// (
+// item.project_path(cx).unwrap(),
+// selections[0].start,
+// scroll_position.y(),
+// )
+// })
+// }
+// }
+
+// #[gpui::test]
+// async fn test_reopening_closed_items(cx: &mut TestAppContext) {
+// let app_state = init_test(cx);
+// app_state
+// .fs
+// .as_fake()
+// .insert_tree(
+// "/root",
+// json!({
+// "a": {
+// "file1": "",
+// "file2": "",
+// "file3": "",
+// "file4": "",
+// },
+// }),
+// )
+// .await;
+
+// let project = Project::test(app_state.fs.clone(), ["/root".as_ref()], cx).await;
+// let workspace = cx
+// .add_window(|cx| Workspace::test_new(project, cx))
+// .root(cx);
+// let pane = workspace.read_with(cx, |workspace, _| workspace.active_pane().clone());
+
+// let entries = cx.read(|cx| workspace.file_project_paths(cx));
+// let file1 = entries[0].clone();
+// let file2 = entries[1].clone();
+// let file3 = entries[2].clone();
+// let file4 = entries[3].clone();
+
+// let file1_item_id = workspace
+// .update(cx, |w, cx| w.open_path(file1.clone(), None, true, cx))
+// .await
+// .unwrap()
+// .id();
+// let file2_item_id = workspace
+// .update(cx, |w, cx| w.open_path(file2.clone(), None, true, cx))
+// .await
+// .unwrap()
+// .id();
+// let file3_item_id = workspace
+// .update(cx, |w, cx| w.open_path(file3.clone(), None, true, cx))
+// .await
+// .unwrap()
+// .id();
+// let file4_item_id = workspace
+// .update(cx, |w, cx| w.open_path(file4.clone(), None, true, cx))
+// .await
+// .unwrap()
+// .id();
+// assert_eq!(active_path(&workspace, cx), Some(file4.clone()));
+
+// // Close all the pane items in some arbitrary order.
+// pane.update(cx, |pane, cx| {
+// pane.close_item_by_id(file1_item_id, SaveIntent::Close, cx)
+// })
+// .await
+// .unwrap();
+// assert_eq!(active_path(&workspace, cx), Some(file4.clone()));
+
+// pane.update(cx, |pane, cx| {
+// pane.close_item_by_id(file4_item_id, SaveIntent::Close, cx)
+// })
+// .await
+// .unwrap();
+// assert_eq!(active_path(&workspace, cx), Some(file3.clone()));
+
+// pane.update(cx, |pane, cx| {
+// pane.close_item_by_id(file2_item_id, SaveIntent::Close, cx)
+// })
+// .await
+// .unwrap();
+// assert_eq!(active_path(&workspace, cx), Some(file3.clone()));
+
+// pane.update(cx, |pane, cx| {
+// pane.close_item_by_id(file3_item_id, SaveIntent::Close, cx)
+// })
+// .await
+// .unwrap();
+// assert_eq!(active_path(&workspace, cx), None);
+
+// // Reopen all the closed items, ensuring they are reopened in the same order
+// // in which they were closed.
+// workspace
+// .update(cx, Workspace::reopen_closed_item)
+// .await
+// .unwrap();
+// assert_eq!(active_path(&workspace, cx), Some(file3.clone()));
+
+// workspace
+// .update(cx, Workspace::reopen_closed_item)
+// .await
+// .unwrap();
+// assert_eq!(active_path(&workspace, cx), Some(file2.clone()));
+
+// workspace
+// .update(cx, Workspace::reopen_closed_item)
+// .await
+// .unwrap();
+// assert_eq!(active_path(&workspace, cx), Some(file4.clone()));
+
+// workspace
+// .update(cx, Workspace::reopen_closed_item)
+// .await
+// .unwrap();
+// assert_eq!(active_path(&workspace, cx), Some(file1.clone()));
+
+// // Reopening past the last closed item is a no-op.
+// workspace
+// .update(cx, Workspace::reopen_closed_item)
+// .await
+// .unwrap();
+// assert_eq!(active_path(&workspace, cx), Some(file1.clone()));
+
+// // Reopening closed items doesn't interfere with navigation history.
+// workspace
+// .update(cx, |workspace, cx| {
+// workspace.go_back(workspace.active_pane().downgrade(), cx)
+// })
+// .await
+// .unwrap();
+// assert_eq!(active_path(&workspace, cx), Some(file4.clone()));
+
+// workspace
+// .update(cx, |workspace, cx| {
+// workspace.go_back(workspace.active_pane().downgrade(), cx)
+// })
+// .await
+// .unwrap();
+// assert_eq!(active_path(&workspace, cx), Some(file2.clone()));
+
+// workspace
+// .update(cx, |workspace, cx| {
+// workspace.go_back(workspace.active_pane().downgrade(), cx)
+// })
+// .await
+// .unwrap();
+// assert_eq!(active_path(&workspace, cx), Some(file3.clone()));
+
+// workspace
+// .update(cx, |workspace, cx| {
+// workspace.go_back(workspace.active_pane().downgrade(), cx)
+// })
+// .await
+// .unwrap();
+// assert_eq!(active_path(&workspace, cx), Some(file4.clone()));
+
+// workspace
+// .update(cx, |workspace, cx| {
+// workspace.go_back(workspace.active_pane().downgrade(), cx)
+// })
+// .await
+// .unwrap();
+// assert_eq!(active_path(&workspace, cx), Some(file3.clone()));
+
+// workspace
+// .update(cx, |workspace, cx| {
+// workspace.go_back(workspace.active_pane().downgrade(), cx)
+// })
+// .await
+// .unwrap();
+// assert_eq!(active_path(&workspace, cx), Some(file2.clone()));
+
+// workspace
+// .update(cx, |workspace, cx| {
+// workspace.go_back(workspace.active_pane().downgrade(), cx)
+// })
+// .await
+// .unwrap();
+// assert_eq!(active_path(&workspace, cx), Some(file1.clone()));
+
+// workspace
+// .update(cx, |workspace, cx| {
+// workspace.go_back(workspace.active_pane().downgrade(), cx)
+// })
+// .await
+// .unwrap();
+// assert_eq!(active_path(&workspace, cx), Some(file1.clone()));
+
+// fn active_path(
+// workspace: &ViewHandle<Workspace>,
+// cx: &TestAppContext,
+// ) -> Option<ProjectPath> {
+// workspace.read_with(cx, |workspace, cx| {
+// let item = workspace.active_item(cx)?;
+// item.project_path(cx)
+// })
+// }
+// }
+
+// #[gpui::test]
+// async fn test_base_keymap(cx: &mut gpui::TestAppContext) {
+// struct TestView;
+
+// impl Entity for TestView {
+// type Event = ();
+// }
+
+// impl View for TestView {
+// fn ui_name() -> &'static str {
+// "TestView"
+// }
+
+// fn render(&mut self, _: &mut ViewContext<Self>) -> AnyElement<Self> {
+// Empty::new().into_any()
+// }
+// }
+
+// let executor = cx.background();
+// let fs = FakeFs::new(executor.clone());
+
+// actions!(test, [A, B]);
+// // From the Atom keymap
+// actions!(workspace, [ActivatePreviousPane]);
+// // From the JetBrains keymap
+// actions!(pane, [ActivatePrevItem]);
+
+// fs.save(
+// "/settings.json".as_ref(),
+// &r#"
+// {
+// "base_keymap": "Atom"
+// }
+// "#
+// .into(),
+// Default::default(),
+// )
+// .await
+// .unwrap();
+
+// fs.save(
+// "/keymap.json".as_ref(),
+// &r#"
+// [
+// {
+// "bindings": {
+// "backspace": "test::A"
+// }
+// }
+// ]
+// "#
+// .into(),
+// Default::default(),
+// )
+// .await
+// .unwrap();
+
+// cx.update(|cx| {
+// cx.set_global(SettingsStore::test(cx));
+// theme::init(Assets, cx);
+// welcome::init(cx);
+
+// cx.add_global_action(|_: &A, _cx| {});
+// cx.add_global_action(|_: &B, _cx| {});
+// cx.add_global_action(|_: &ActivatePreviousPane, _cx| {});
+// cx.add_global_action(|_: &ActivatePrevItem, _cx| {});
+
+// let settings_rx = watch_config_file(
+// executor.clone(),
+// fs.clone(),
+// PathBuf::from("/settings.json"),
+// );
+// let keymap_rx =
+// watch_config_file(executor.clone(), fs.clone(), PathBuf::from("/keymap.json"));
+
+// handle_keymap_file_changes(keymap_rx, cx);
+// handle_settings_file_changes(settings_rx, cx);
+// });
+
+// cx.foreground().run_until_parked();
+
+// let window = cx.add_window(|_| TestView);
+
+// // Test loading the keymap base at all
+// assert_key_bindings_for(
+// window.into(),
+// cx,
+// vec![("backspace", &A), ("k", &ActivatePreviousPane)],
+// line!(),
+// );
+
+// // Test modifying the users keymap, while retaining the base keymap
+// fs.save(
+// "/keymap.json".as_ref(),
+// &r#"
+// [
+// {
+// "bindings": {
+// "backspace": "test::B"
+// }
+// }
+// ]
+// "#
+// .into(),
+// Default::default(),
+// )
+// .await
+// .unwrap();
+
+// cx.foreground().run_until_parked();
+
+// assert_key_bindings_for(
+// window.into(),
+// cx,
+// vec![("backspace", &B), ("k", &ActivatePreviousPane)],
+// line!(),
+// );
+
+// // Test modifying the base, while retaining the users keymap
+// fs.save(
+// "/settings.json".as_ref(),
+// &r#"
+// {
+// "base_keymap": "JetBrains"
+// }
+// "#
+// .into(),
+// Default::default(),
+// )
+// .await
+// .unwrap();
+
+// cx.foreground().run_until_parked();
+
+// assert_key_bindings_for(
+// window.into(),
+// cx,
+// vec![("backspace", &B), ("[", &ActivatePrevItem)],
+// line!(),
+// );
+
+// #[track_caller]
+// fn assert_key_bindings_for<'a>(
+// window: AnyWindowHandle,
+// cx: &TestAppContext,
+// actions: Vec<(&'static str, &'a dyn Action)>,
+// line: u32,
+// ) {
+// for (key, action) in actions {
+// // assert that...
+// assert!(
+// cx.available_actions(window, 0)
+// .into_iter()
+// .any(|(_, bound_action, b)| {
+// // action names match...
+// bound_action.name() == action.name()
+// && bound_action.namespace() == action.namespace()
+// // and key strokes contain the given key
+// && b.iter()
+// .any(|binding| binding.keystrokes().iter().any(|k| k.key == key))
+// }),
+// "On {} Failed to find {} with key binding {}",
+// line,
+// action.name(),
+// key
+// );
+// }
+// }
+// }
+
+// #[gpui::test]
+// async fn test_disabled_keymap_binding(cx: &mut gpui::TestAppContext) {
+// struct TestView;
+
+// impl Entity for TestView {
+// type Event = ();
+// }
+
+// impl View for TestView {
+// fn ui_name() -> &'static str {
+// "TestView"
+// }
+
+// fn render(&mut self, _: &mut ViewContext<Self>) -> AnyElement<Self> {
+// Empty::new().into_any()
+// }
+// }
+
+// let executor = cx.background();
+// let fs = FakeFs::new(executor.clone());
+
+// actions!(test, [A, B]);
+// // From the Atom keymap
+// actions!(workspace, [ActivatePreviousPane]);
+// // From the JetBrains keymap
+// actions!(pane, [ActivatePrevItem]);
+
+// fs.save(
+// "/settings.json".as_ref(),
+// &r#"
+// {
+// "base_keymap": "Atom"
+// }
+// "#
+// .into(),
+// Default::default(),
+// )
+// .await
+// .unwrap();
+
+// fs.save(
+// "/keymap.json".as_ref(),
+// &r#"
+// [
+// {
+// "bindings": {
+// "backspace": "test::A"
+// }
+// }
+// ]
+// "#
+// .into(),
+// Default::default(),
+// )
+// .await
+// .unwrap();
+
+// cx.update(|cx| {
+// cx.set_global(SettingsStore::test(cx));
+// theme::init(Assets, cx);
+// welcome::init(cx);
+
+// cx.add_global_action(|_: &A, _cx| {});
+// cx.add_global_action(|_: &B, _cx| {});
+// cx.add_global_action(|_: &ActivatePreviousPane, _cx| {});
+// cx.add_global_action(|_: &ActivatePrevItem, _cx| {});
+
+// let settings_rx = watch_config_file(
+// executor.clone(),
+// fs.clone(),
+// PathBuf::from("/settings.json"),
+// );
+// let keymap_rx =
+// watch_config_file(executor.clone(), fs.clone(), PathBuf::from("/keymap.json"));
+
+// handle_keymap_file_changes(keymap_rx, cx);
+// handle_settings_file_changes(settings_rx, cx);
+// });
+
+// cx.foreground().run_until_parked();
+
+// let window = cx.add_window(|_| TestView);
+
+// // Test loading the keymap base at all
+// assert_key_bindings_for(
+// window.into(),
+// cx,
+// vec![("backspace", &A), ("k", &ActivatePreviousPane)],
+// line!(),
+// );
+
+// // Test disabling the key binding for the base keymap
+// fs.save(
+// "/keymap.json".as_ref(),
+// &r#"
+// [
+// {
+// "bindings": {
+// "backspace": null
+// }
+// }
+// ]
+// "#
+// .into(),
+// Default::default(),
+// )
+// .await
+// .unwrap();
+
+// cx.foreground().run_until_parked();
+
+// assert_key_bindings_for(
+// window.into(),
+// cx,
+// vec![("k", &ActivatePreviousPane)],
+// line!(),
+// );
+
+// // Test modifying the base, while retaining the users keymap
+// fs.save(
+// "/settings.json".as_ref(),
+// &r#"
+// {
+// "base_keymap": "JetBrains"
+// }
+// "#
+// .into(),
+// Default::default(),
+// )
+// .await
+// .unwrap();
+
+// cx.foreground().run_until_parked();
+
+// assert_key_bindings_for(window.into(), cx, vec![("[", &ActivatePrevItem)], line!());
+
+// #[track_caller]
+// fn assert_key_bindings_for<'a>(
+// window: AnyWindowHandle,
+// cx: &TestAppContext,
+// actions: Vec<(&'static str, &'a dyn Action)>,
+// line: u32,
+// ) {
+// for (key, action) in actions {
+// // assert that...
+// assert!(
+// cx.available_actions(window, 0)
+// .into_iter()
+// .any(|(_, bound_action, b)| {
+// // action names match...
+// bound_action.name() == action.name()
+// && bound_action.namespace() == action.namespace()
+// // and key strokes contain the given key
+// && b.iter()
+// .any(|binding| binding.keystrokes().iter().any(|k| k.key == key))
+// }),
+// "On {} Failed to find {} with key binding {}",
+// line,
+// action.name(),
+// key
+// );
+// }
+// }
+// }
+
+// #[gpui::test]
+// fn test_bundled_settings_and_themes(cx: &mut AppContext) {
+// cx.platform()
+// .fonts()
+// .add_fonts(&[
+// Assets
+// .load("fonts/zed-sans/zed-sans-extended.ttf")
+// .unwrap()
+// .to_vec()
+// .into(),
+// Assets
+// .load("fonts/zed-mono/zed-mono-extended.ttf")
+// .unwrap()
+// .to_vec()
+// .into(),
+// Assets
+// .load("fonts/plex/IBMPlexSans-Regular.ttf")
+// .unwrap()
+// .to_vec()
+// .into(),
+// ])
+// .unwrap();
+// let themes = ThemeRegistry::new(Assets, cx.font_cache().clone());
+// let mut settings = SettingsStore::default();
+// settings
+// .set_default_settings(&settings::default_settings(), cx)
+// .unwrap();
+// cx.set_global(settings);
+// theme::init(Assets, cx);
+
+// let mut has_default_theme = false;
+// for theme_name in themes.list(false).map(|meta| meta.name) {
+// let theme = themes.get(&theme_name).unwrap();
+// assert_eq!(theme.meta.name, theme_name);
+// if theme.meta.name == settings::get::<ThemeSettings>(cx).theme.meta.name {
+// has_default_theme = true;
+// }
+// }
+// assert!(has_default_theme);
+// }
+
+// #[gpui::test]
+// fn test_bundled_languages(cx: &mut AppContext) {
+// cx.set_global(SettingsStore::test(cx));
+// let mut languages = LanguageRegistry::test();
+// languages.set_executor(cx.background().clone());
+// let languages = Arc::new(languages);
+// let node_runtime = node_runtime::FakeNodeRuntime::new();
+// languages::init(languages.clone(), node_runtime, cx);
+// for name in languages.language_names() {
+// languages.language_for_name(&name);
+// }
+// cx.foreground().run_until_parked();
+// }
+
+// fn init_test(cx: &mut TestAppContext) -> Arc<AppState> {
+// cx.foreground().forbid_parking();
+// cx.update(|cx| {
+// let mut app_state = AppState::test(cx);
+// let state = Arc::get_mut(&mut app_state).unwrap();
+// state.initialize_workspace = initialize_workspace;
+// state.build_window_options = build_window_options;
+// theme::init((), cx);
+// audio::init((), cx);
+// channel::init(&app_state.client, app_state.user_store.clone(), cx);
+// call::init(app_state.client.clone(), app_state.user_store.clone(), cx);
+// notifications::init(app_state.client.clone(), app_state.user_store.clone(), cx);
+// workspace::init(app_state.clone(), cx);
+// Project::init_settings(cx);
+// language::init(cx);
+// editor::init(cx);
+// project_panel::init_settings(cx);
+// collab_ui::init(&app_state, cx);
+// pane::init(cx);
+// project_panel::init((), cx);
+// terminal_view::init(cx);
+// assistant::init(cx);
+// app_state
+// })
+// }
+
+// fn rust_lang() -> Arc<language::Language> {
+// Arc::new(language::Language::new(
+// language::LanguageConfig {
+// name: "Rust".into(),
+// path_suffixes: vec!["rs".to_string()],
+// ..Default::default()
+// },
+// Some(tree_sitter_rust::language()),
+// ))
+// }
+// }