Detailed changes
@@ -3749,9 +3749,9 @@ dependencies = [
[[package]]
name = "lsp-types"
-version = "0.91.1"
+version = "0.94.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2368312c59425dd133cb9a327afee65be0a633a8ce471d248e2202a48f8f68ae"
+checksum = "0b63735a13a1f9cd4f4835223d828ed9c2e35c8c5e61837774399f558b6a1237"
dependencies = [
"bitflags",
"serde",
@@ -1,18 +1,6 @@
-You are #zed, a language model representing the collective understanding of an open source project called Zed. When a new human visits you, they'll send you their profile. You'll respond with an introduction tailored to their situation. For example, a new user might see something like this:
-
-Welcome to Zed! Zed is an innovative, open-source platform designed to enhance team communication and collaboration. At the heart of Zed are *contexts*, which create a dynamic digital representation of shared mental models. Contexts offer personalized starting points and the flexibility to edit and explore, enabling teams to align knowledge, streamline communication, and improve overall performance.
-
-As the #zed model, I'm happy to answer any questions. In fact, I will improve as a result of you doing so!
-
-You might ask about Zed's core philosophy, how you can build your own model like this one, or how you might get involved. Zed's open source!
-
-> [USER INPUT PROMPT]
-
-You should base your introduction on your full understanding of the state of #zed and the user's profile, customizing your introduction to their specific needs. Don't welcome them to Zed if they've been using Zed for 2 days. If they're returning after a while, welcome them back.
-
-User input begins on a line starting with >.
-Your output begins on a line starting with <.
-
+User input begins on a line starting with /.
Don't apologize ever.
Never say "I apologize".
-Use simple language and don't flatter the users. Spend your tokens on valuable information.
+Use simple language and don't flatter the users.
+Keep it short.
+Risk being rude.
@@ -10,7 +10,7 @@ use gpui::{
WindowContext,
};
use isahc::{http::StatusCode, Request, RequestExt};
-use language::{language_settings::SoftWrap, Buffer, Language, LanguageRegistry};
+use language::{language_settings::SoftWrap, Buffer, LanguageRegistry};
use std::{io, sync::Arc};
use util::{post_inc, ResultExt, TryFutureExt};
use workspace::{
@@ -36,7 +36,7 @@ pub enum AssistantPanelEvent {
pub struct AssistantPanel {
width: Option<f32>,
pane: ViewHandle<Pane>,
- workspace: WeakViewHandle<Workspace>,
+ languages: Arc<LanguageRegistry>,
_subscriptions: Vec<Subscription>,
}
@@ -52,6 +52,7 @@ impl AssistantPanel {
let pane = cx.add_view(|cx| {
let mut pane = Pane::new(
workspace.weak_handle(),
+ workspace.project().clone(),
workspace.app_state().background_actions,
Default::default(),
cx,
@@ -98,7 +99,7 @@ impl AssistantPanel {
Self {
pane,
- workspace: workspace.weak_handle(),
+ languages: workspace.app_state().languages.clone(),
width: None,
_subscriptions: subscriptions,
}
@@ -177,28 +178,11 @@ impl Panel for AssistantPanel {
fn set_active(&mut self, active: bool, cx: &mut ViewContext<Self>) {
if active && self.pane.read(cx).items_len() == 0 {
- let workspace = self.workspace.clone();
- let pane = self.pane.clone();
let focus = self.has_focus(cx);
- cx.spawn(|_, mut cx| async move {
- let markdown = workspace
- .read_with(&cx, |workspace, _| {
- workspace
- .app_state()
- .languages
- .language_for_name("Markdown")
- })?
- .await?;
- workspace.update(&mut cx, |workspace, cx| {
- let editor = Box::new(cx.add_view(|cx| {
- AssistantEditor::new(markdown, workspace.app_state().languages.clone(), cx)
- }));
- Pane::add_item(workspace, &pane, editor, true, focus, None, cx);
- })?;
-
- anyhow::Ok(())
- })
- .detach_and_log_err(cx);
+ let editor = cx.add_view(|cx| AssistantEditor::new(self.languages.clone(), cx));
+ self.pane.update(cx, |pane, cx| {
+ pane.add_item(Box::new(editor), true, focus, None, cx)
+ });
}
}
@@ -238,7 +222,6 @@ struct Assistant {
messages_by_id: HashMap<ExcerptId, Message>,
completion_count: usize,
pending_completions: Vec<PendingCompletion>,
- markdown: Arc<Language>,
language_registry: Arc<LanguageRegistry>,
}
@@ -247,18 +230,13 @@ impl Entity for Assistant {
}
impl Assistant {
- fn new(
- markdown: Arc<Language>,
- language_registry: Arc<LanguageRegistry>,
- cx: &mut ModelContext<Self>,
- ) -> Self {
+ fn new(language_registry: Arc<LanguageRegistry>, cx: &mut ModelContext<Self>) -> Self {
let mut this = Self {
buffer: cx.add_model(|_| MultiBuffer::new(0)),
messages: Default::default(),
messages_by_id: Default::default(),
completion_count: Default::default(),
pending_completions: Default::default(),
- markdown,
language_registry,
};
this.push_message(Role::User, cx);
@@ -323,7 +301,18 @@ impl Assistant {
fn push_message(&mut self, role: Role, cx: &mut ModelContext<Self>) -> Message {
let content = cx.add_model(|cx| {
let mut buffer = Buffer::new(0, "", cx);
- buffer.set_language(Some(self.markdown.clone()), cx);
+ let markdown = self.language_registry.language_for_name("Markdown");
+ cx.spawn_weak(|buffer, mut cx| async move {
+ let markdown = markdown.await?;
+ let buffer = buffer
+ .upgrade(&cx)
+ .ok_or_else(|| anyhow!("buffer was dropped"))?;
+ buffer.update(&mut cx, |buffer, cx| {
+ buffer.set_language(Some(markdown), cx)
+ });
+ anyhow::Ok(())
+ })
+ .detach_and_log_err(cx);
buffer.set_language_registry(self.language_registry.clone());
buffer
});
@@ -363,12 +352,8 @@ struct AssistantEditor {
}
impl AssistantEditor {
- fn new(
- markdown: Arc<Language>,
- language_registry: Arc<LanguageRegistry>,
- cx: &mut ViewContext<Self>,
- ) -> Self {
- let assistant = cx.add_model(|cx| Assistant::new(markdown, language_registry, cx));
+ fn new(language_registry: Arc<LanguageRegistry>, cx: &mut ViewContext<Self>) -> Self {
+ let assistant = cx.add_model(|cx| Assistant::new(language_registry, cx));
let editor = cx.add_view(|cx| {
let mut editor = Editor::for_multibuffer(assistant.read(cx).buffer.clone(), None, cx);
editor.set_soft_wrap_mode(SoftWrap::EditorWidth, cx);
@@ -5010,19 +5010,21 @@ async fn test_project_symbols(
.unwrap();
let fake_language_server = fake_language_servers.next().await.unwrap();
- fake_language_server.handle_request::<lsp::request::WorkspaceSymbol, _, _>(|_, _| async move {
- #[allow(deprecated)]
- Ok(Some(vec![lsp::SymbolInformation {
- name: "TWO".into(),
- location: lsp::Location {
- uri: lsp::Url::from_file_path("/code/crate-2/two.rs").unwrap(),
- range: lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
+ fake_language_server.handle_request::<lsp::WorkspaceSymbolRequest, _, _>(|_, _| async move {
+ Ok(Some(lsp::WorkspaceSymbolResponse::Flat(vec![
+ #[allow(deprecated)]
+ lsp::SymbolInformation {
+ name: "TWO".into(),
+ location: lsp::Location {
+ uri: lsp::Url::from_file_path("/code/crate-2/two.rs").unwrap(),
+ range: lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
+ },
+ kind: lsp::SymbolKind::CONSTANT,
+ tags: None,
+ container_name: None,
+ deprecated: None,
},
- kind: lsp::SymbolKind::CONSTANT,
- tags: None,
- container_name: None,
- deprecated: None,
- }]))
+ ])))
});
// Request the definition of a symbol as the guest.
@@ -6606,7 +6608,7 @@ async fn test_basic_following(
// When client A navigates back and forth, client B does so as well.
workspace_a
.update(cx_a, |workspace, cx| {
- workspace::Pane::go_back(workspace, None, cx)
+ workspace.go_back(workspace.active_pane().downgrade(), cx)
})
.await
.unwrap();
@@ -6617,7 +6619,7 @@ async fn test_basic_following(
workspace_a
.update(cx_a, |workspace, cx| {
- workspace::Pane::go_back(workspace, None, cx)
+ workspace.go_back(workspace.active_pane().downgrade(), cx)
})
.await
.unwrap();
@@ -6628,7 +6630,7 @@ async fn test_basic_following(
workspace_a
.update(cx_a, |workspace, cx| {
- workspace::Pane::go_forward(workspace, None, cx)
+ workspace.go_forward(workspace.active_pane().downgrade(), cx)
})
.await
.unwrap();
@@ -4938,12 +4938,12 @@ impl Editor {
}
fn push_to_nav_history(
- &self,
+ &mut self,
cursor_anchor: Anchor,
new_position: Option<Point>,
cx: &mut ViewContext<Self>,
) {
- if let Some(nav_history) = &self.nav_history {
+ if let Some(nav_history) = self.nav_history.as_mut() {
let buffer = self.buffer.read(cx).read(cx);
let cursor_position = cursor_anchor.to_point(&buffer);
let scroll_state = self.scroll_manager.anchor();
@@ -6,7 +6,7 @@ use gpui::{
use picker::{Picker, PickerDelegate};
use project::{PathMatchCandidateSet, Project, ProjectPath, WorktreeId};
use std::{
- path::Path,
+ path::{Path, PathBuf},
sync::{
atomic::{self, AtomicBool},
Arc,
@@ -25,11 +25,57 @@ pub struct FileFinderDelegate {
latest_search_id: usize,
latest_search_did_cancel: bool,
latest_search_query: Option<PathLikeWithPosition<FileSearchQuery>>,
- currently_opened_path: Option<ProjectPath>,
- matches: Vec<PathMatch>,
- selected: Option<(usize, Arc<Path>)>,
+ currently_opened_path: Option<FoundPath>,
+ matches: Matches,
+ selected_index: Option<usize>,
cancel_flag: Arc<AtomicBool>,
- history_items: Vec<ProjectPath>,
+ history_items: Vec<FoundPath>,
+}
+
+#[derive(Debug)]
+enum Matches {
+ History(Vec<FoundPath>),
+ Search(Vec<PathMatch>),
+}
+
+#[derive(Debug)]
+enum Match<'a> {
+ History(&'a FoundPath),
+ Search(&'a PathMatch),
+}
+
+impl Matches {
+ fn len(&self) -> usize {
+ match self {
+ Self::History(items) => items.len(),
+ Self::Search(items) => items.len(),
+ }
+ }
+
+ fn get(&self, index: usize) -> Option<Match<'_>> {
+ match self {
+ Self::History(items) => items.get(index).map(Match::History),
+ Self::Search(items) => items.get(index).map(Match::Search),
+ }
+ }
+}
+
+impl Default for Matches {
+ fn default() -> Self {
+ Self::History(Vec::new())
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+struct FoundPath {
+ project: ProjectPath,
+ absolute: Option<PathBuf>,
+}
+
+impl FoundPath {
+ fn new(project: ProjectPath, absolute: Option<PathBuf>) -> Self {
+ Self { project, absolute }
+ }
}
actions!(file_finder, [Toggle]);
@@ -43,10 +89,41 @@ const MAX_RECENT_SELECTIONS: usize = 20;
fn toggle_file_finder(workspace: &mut Workspace, _: &Toggle, cx: &mut ViewContext<Workspace>) {
workspace.toggle_modal(cx, |workspace, cx| {
- let history_items = workspace.recent_navigation_history(Some(MAX_RECENT_SELECTIONS), cx);
+ let project = workspace.project().read(cx);
+
let currently_opened_path = workspace
.active_item(cx)
- .and_then(|item| item.project_path(cx));
+ .and_then(|item| item.project_path(cx))
+ .map(|project_path| {
+ let abs_path = project
+ .worktree_for_id(project_path.worktree_id, cx)
+ .map(|worktree| worktree.read(cx).abs_path().join(&project_path.path));
+ FoundPath::new(project_path, abs_path)
+ });
+
+ // if exists, bubble the currently opened path to the top
+ let history_items = currently_opened_path
+ .clone()
+ .into_iter()
+ .chain(
+ workspace
+ .recent_navigation_history(Some(MAX_RECENT_SELECTIONS), cx)
+ .into_iter()
+ .filter(|(history_path, _)| {
+ Some(history_path)
+ != currently_opened_path
+ .as_ref()
+ .map(|found_path| &found_path.project)
+ })
+ .filter(|(_, history_abs_path)| {
+ history_abs_path.as_ref()
+ != currently_opened_path
+ .as_ref()
+ .and_then(|found_path| found_path.absolute.as_ref())
+ })
+ .map(|(history_path, abs_path)| FoundPath::new(history_path, abs_path)),
+ )
+ .collect();
let project = workspace.project().clone();
let workspace = cx.handle().downgrade();
@@ -87,37 +164,11 @@ impl FileSearchQuery {
}
impl FileFinderDelegate {
- fn labels_for_match(&self, path_match: &PathMatch) -> (String, Vec<usize>, String, Vec<usize>) {
- let path = &path_match.path;
- let path_string = path.to_string_lossy();
- let full_path = [path_match.path_prefix.as_ref(), path_string.as_ref()].join("");
- let path_positions = path_match.positions.clone();
-
- let file_name = path.file_name().map_or_else(
- || path_match.path_prefix.to_string(),
- |file_name| file_name.to_string_lossy().to_string(),
- );
- let file_name_start = path_match.path_prefix.chars().count() + path_string.chars().count()
- - file_name.chars().count();
- let file_name_positions = path_positions
- .iter()
- .filter_map(|pos| {
- if pos >= &file_name_start {
- Some(pos - file_name_start)
- } else {
- None
- }
- })
- .collect();
-
- (file_name, file_name_positions, full_path, path_positions)
- }
-
- pub fn new(
+ fn new(
workspace: WeakViewHandle<Workspace>,
project: ModelHandle<Project>,
- currently_opened_path: Option<ProjectPath>,
- history_items: Vec<ProjectPath>,
+ currently_opened_path: Option<FoundPath>,
+ history_items: Vec<FoundPath>,
cx: &mut ViewContext<FileFinder>,
) -> Self {
cx.observe(&project, |picker, _, cx| {
@@ -132,8 +183,8 @@ impl FileFinderDelegate {
latest_search_did_cancel: false,
latest_search_query: None,
currently_opened_path,
- matches: Vec::new(),
- selected: None,
+ matches: Matches::default(),
+ selected_index: None,
cancel_flag: Arc::new(AtomicBool::new(false)),
history_items,
}
@@ -147,7 +198,7 @@ impl FileFinderDelegate {
let relative_to = self
.currently_opened_path
.as_ref()
- .map(|project_path| Arc::clone(&project_path.path));
+ .map(|found_path| Arc::clone(&found_path.project.path));
let worktrees = self
.project
.read(cx)
@@ -188,13 +239,13 @@ impl FileFinderDelegate {
.update(&mut cx, |picker, cx| {
picker
.delegate_mut()
- .set_matches(search_id, did_cancel, query, matches, cx)
+ .set_search_matches(search_id, did_cancel, query, matches, cx)
})
.log_err();
})
}
- fn set_matches(
+ fn set_search_matches(
&mut self,
search_id: usize,
did_cancel: bool,
@@ -211,15 +262,126 @@ impl FileFinderDelegate {
.as_ref()
.map(|query| query.path_like.path_query())
{
- util::extend_sorted(&mut self.matches, matches.into_iter(), 100, |a, b| b.cmp(a));
+ match &mut self.matches {
+ Matches::History(_) => self.matches = Matches::Search(matches),
+ Matches::Search(search_matches) => {
+ util::extend_sorted(search_matches, matches.into_iter(), 100, |a, b| {
+ b.cmp(a)
+ })
+ }
+ }
} else {
- self.matches = matches;
+ self.matches = Matches::Search(matches);
}
self.latest_search_query = Some(query);
self.latest_search_did_cancel = did_cancel;
cx.notify();
}
}
+
+ fn labels_for_match(
+ &self,
+ path_match: Match,
+ cx: &AppContext,
+ ix: usize,
+ ) -> (String, Vec<usize>, String, Vec<usize>) {
+ let (file_name, file_name_positions, full_path, full_path_positions) = match path_match {
+ Match::History(found_path) => {
+ let worktree_id = found_path.project.worktree_id;
+ let project_relative_path = &found_path.project.path;
+ let has_worktree = self
+ .project
+ .read(cx)
+ .worktree_for_id(worktree_id, cx)
+ .is_some();
+
+ if !has_worktree {
+ if let Some(absolute_path) = &found_path.absolute {
+ return (
+ absolute_path
+ .file_name()
+ .map_or_else(
+ || project_relative_path.to_string_lossy(),
+ |file_name| file_name.to_string_lossy(),
+ )
+ .to_string(),
+ Vec::new(),
+ absolute_path.to_string_lossy().to_string(),
+ Vec::new(),
+ );
+ }
+ }
+
+ let mut path = Arc::clone(project_relative_path);
+ if project_relative_path.as_ref() == Path::new("") {
+ if let Some(absolute_path) = &found_path.absolute {
+ path = Arc::from(absolute_path.as_path());
+ }
+ }
+ self.labels_for_path_match(&PathMatch {
+ score: ix as f64,
+ positions: Vec::new(),
+ worktree_id: worktree_id.to_usize(),
+ path,
+ path_prefix: "".into(),
+ distance_to_relative_ancestor: usize::MAX,
+ })
+ }
+ Match::Search(path_match) => self.labels_for_path_match(path_match),
+ };
+
+ if file_name_positions.is_empty() {
+ if let Some(user_home_path) = std::env::var("HOME").ok() {
+ let user_home_path = user_home_path.trim();
+ if !user_home_path.is_empty() {
+ if (&full_path).starts_with(user_home_path) {
+ return (
+ file_name,
+ file_name_positions,
+ full_path.replace(user_home_path, "~"),
+ full_path_positions,
+ );
+ }
+ }
+ }
+ }
+
+ (
+ file_name,
+ file_name_positions,
+ full_path,
+ full_path_positions,
+ )
+ }
+
+ fn labels_for_path_match(
+ &self,
+ path_match: &PathMatch,
+ ) -> (String, Vec<usize>, String, Vec<usize>) {
+ let path = &path_match.path;
+ let path_string = path.to_string_lossy();
+ let full_path = [path_match.path_prefix.as_ref(), path_string.as_ref()].join("");
+ let path_positions = path_match.positions.clone();
+
+ let file_name = path.file_name().map_or_else(
+ || path_match.path_prefix.to_string(),
+ |file_name| file_name.to_string_lossy().to_string(),
+ );
+ let file_name_start = path_match.path_prefix.chars().count() + path_string.chars().count()
+ - file_name.chars().count();
+ let file_name_positions = path_positions
+ .iter()
+ .filter_map(|pos| {
+ if pos >= &file_name_start {
+ Some(pos - file_name_start)
+ } else {
+ None
+ }
+ })
+ .collect();
+
+ (file_name, file_name_positions, full_path, path_positions)
+ }
}
impl PickerDelegate for FileFinderDelegate {
@@ -232,45 +394,35 @@ impl PickerDelegate for FileFinderDelegate {
}
fn selected_index(&self) -> usize {
- if let Some(selected) = self.selected.as_ref() {
- for (ix, path_match) in self.matches.iter().enumerate() {
- if (path_match.worktree_id, path_match.path.as_ref())
- == (selected.0, selected.1.as_ref())
- {
- return ix;
- }
- }
- }
- 0
+ self.selected_index.unwrap_or(0)
}
fn set_selected_index(&mut self, ix: usize, cx: &mut ViewContext<FileFinder>) {
- let mat = &self.matches[ix];
- self.selected = Some((mat.worktree_id, mat.path.clone()));
+ self.selected_index = Some(ix);
cx.notify();
}
fn update_matches(&mut self, raw_query: String, cx: &mut ViewContext<FileFinder>) -> Task<()> {
if raw_query.is_empty() {
+ let project = self.project.read(cx);
self.latest_search_id = post_inc(&mut self.search_count);
- self.matches.clear();
-
- self.matches = self
- .currently_opened_path
- .iter() // if exists, bubble the currently opened path to the top
- .chain(self.history_items.iter().filter(|history_item| {
- Some(*history_item) != self.currently_opened_path.as_ref()
- }))
- .enumerate()
- .map(|(i, history_item)| PathMatch {
- score: i as f64,
- positions: Vec::new(),
- worktree_id: history_item.worktree_id.to_usize(),
- path: Arc::clone(&history_item.path),
- path_prefix: "".into(),
- distance_to_relative_ancestor: usize::MAX,
- })
- .collect();
+ self.matches = Matches::History(
+ self.history_items
+ .iter()
+ .filter(|history_item| {
+ project
+ .worktree_for_id(history_item.project.worktree_id, cx)
+ .is_some()
+ || (project.is_local()
+ && history_item
+ .absolute
+ .as_ref()
+ .filter(|abs_path| abs_path.exists())
+ .is_some())
+ })
+ .cloned()
+ .collect(),
+ );
cx.notify();
Task::ready(())
} else {
@@ -293,16 +445,52 @@ impl PickerDelegate for FileFinderDelegate {
fn confirm(&mut self, cx: &mut ViewContext<FileFinder>) {
if let Some(m) = self.matches.get(self.selected_index()) {
if let Some(workspace) = self.workspace.upgrade(cx) {
- let project_path = ProjectPath {
- worktree_id: WorktreeId::from_usize(m.worktree_id),
- path: m.path.clone(),
- };
- let open_task = workspace.update(cx, |workspace, cx| {
- workspace.open_path(project_path.clone(), None, true, cx)
+ let open_task = workspace.update(cx, |workspace, cx| match m {
+ Match::History(history_match) => {
+ let worktree_id = history_match.project.worktree_id;
+ if workspace
+ .project()
+ .read(cx)
+ .worktree_for_id(worktree_id, cx)
+ .is_some()
+ {
+ workspace.open_path(
+ ProjectPath {
+ worktree_id,
+ path: Arc::clone(&history_match.project.path),
+ },
+ None,
+ true,
+ cx,
+ )
+ } else {
+ match history_match.absolute.as_ref() {
+ Some(abs_path) => {
+ workspace.open_abs_path(abs_path.to_path_buf(), false, cx)
+ }
+ None => workspace.open_path(
+ ProjectPath {
+ worktree_id,
+ path: Arc::clone(&history_match.project.path),
+ },
+ None,
+ true,
+ cx,
+ ),
+ }
+ }
+ }
+ Match::Search(m) => workspace.open_path(
+ ProjectPath {
+ worktree_id: WorktreeId::from_usize(m.worktree_id),
+ path: m.path.clone(),
+ },
+ None,
+ true,
+ cx,
+ ),
});
- let workspace = workspace.downgrade();
-
let row = self
.latest_search_query
.as_ref()
@@ -333,6 +521,7 @@ impl PickerDelegate for FileFinderDelegate {
}
}
workspace
+ .downgrade()
.update(&mut cx, |workspace, cx| workspace.dismiss_modal(cx))
.log_err();
@@ -352,11 +541,14 @@ impl PickerDelegate for FileFinderDelegate {
selected: bool,
cx: &AppContext,
) -> AnyElement<Picker<Self>> {
- let path_match = &self.matches[ix];
+ let path_match = self
+ .matches
+ .get(ix)
+ .expect("Invalid matches state: no element for index {ix}");
let theme = theme::current(cx);
let style = theme.picker.item.style_for(mouse_state, selected);
let (file_name, file_name_positions, full_path, full_path_positions) =
- self.labels_for_match(path_match);
+ self.labels_for_match(path_match, cx, ix);
Flex::column()
.with_child(
Label::new(file_name, style.label.clone()).with_highlights(file_name_positions),
@@ -373,7 +565,7 @@ impl PickerDelegate for FileFinderDelegate {
#[cfg(test)]
mod tests {
- use std::{assert_eq, collections::HashMap, time::Duration};
+ use std::{assert_eq, collections::HashMap, path::Path, time::Duration};
use super::*;
use editor::Editor;
@@ -649,12 +841,16 @@ mod tests {
finder.update(cx, |finder, cx| {
let delegate = finder.delegate_mut();
- let matches = delegate.matches.clone();
+ let matches = match &delegate.matches {
+ Matches::Search(path_matches) => path_matches,
+ _ => panic!("Search matches expected"),
+ }
+ .clone();
// Simulate a search being cancelled after the time limit,
// returning only a subset of the matches that would have been found.
drop(delegate.spawn_search(query.clone(), cx));
- delegate.set_matches(
+ delegate.set_search_matches(
delegate.latest_search_id,
true, // did-cancel
query.clone(),
@@ -664,7 +860,7 @@ mod tests {
// Simulate another cancellation.
drop(delegate.spawn_search(query.clone(), cx));
- delegate.set_matches(
+ delegate.set_search_matches(
delegate.latest_search_id,
true, // did-cancel
query.clone(),
@@ -672,7 +868,12 @@ mod tests {
cx,
);
- assert_eq!(delegate.matches, matches[0..4])
+ match &delegate.matches {
+ Matches::Search(new_matches) => {
+ assert_eq!(new_matches.as_slice(), &matches[0..4])
+ }
+ _ => panic!("Search matches expected"),
+ };
});
}
@@ -772,10 +973,14 @@ mod tests {
cx.read(|cx| {
let finder = finder.read(cx);
let delegate = finder.delegate();
- assert_eq!(delegate.matches.len(), 1);
+ let matches = match &delegate.matches {
+ Matches::Search(path_matches) => path_matches,
+ _ => panic!("Search matches expected"),
+ };
+ assert_eq!(matches.len(), 1);
let (file_name, file_name_positions, full_path, full_path_positions) =
- delegate.labels_for_match(&delegate.matches[0]);
+ delegate.labels_for_path_match(&matches[0]);
assert_eq!(file_name, "the-file");
assert_eq!(file_name_positions, &[0, 1, 4]);
assert_eq!(full_path, "the-file");
@@ -876,10 +1081,10 @@ mod tests {
// When workspace has an active item, sort items which are closer to that item
// first when they have the same name. In this case, b.txt is closer to dir2's a.txt
// so that one should be sorted earlier
- let b_path = Some(ProjectPath {
+ let b_path = Some(dummy_found_path(ProjectPath {
worktree_id,
path: Arc::from(Path::new("/root/dir2/b.txt")),
- });
+ }));
let (_, finder) = cx.add_window(|cx| {
Picker::new(
FileFinderDelegate::new(
@@ -901,8 +1106,12 @@ mod tests {
finder.read_with(cx, |f, _| {
let delegate = f.delegate();
- assert_eq!(delegate.matches[0].path.as_ref(), Path::new("dir2/a.txt"));
- assert_eq!(delegate.matches[1].path.as_ref(), Path::new("dir1/a.txt"));
+ let matches = match &delegate.matches {
+ Matches::Search(path_matches) => path_matches,
+ _ => panic!("Search matches expected"),
+ };
+ assert_eq!(matches[0].path.as_ref(), Path::new("dir2/a.txt"));
+ assert_eq!(matches[1].path.as_ref(), Path::new("dir1/a.txt"));
});
}
@@ -1012,10 +1221,13 @@ mod tests {
.await;
assert_eq!(
history_after_first,
- vec![ProjectPath {
- worktree_id,
- path: Arc::from(Path::new("test/first.rs")),
- }],
+ vec![FoundPath::new(
+ ProjectPath {
+ worktree_id,
+ path: Arc::from(Path::new("test/first.rs")),
+ },
+ Some(PathBuf::from("/src/test/first.rs"))
+ )],
"Should show 1st opened item in the history when opening the 2nd item"
);
@@ -1032,14 +1244,20 @@ mod tests {
assert_eq!(
history_after_second,
vec![
- ProjectPath {
- worktree_id,
- path: Arc::from(Path::new("test/second.rs")),
- },
- ProjectPath {
- worktree_id,
- path: Arc::from(Path::new("test/first.rs")),
- },
+ FoundPath::new(
+ ProjectPath {
+ worktree_id,
+ path: Arc::from(Path::new("test/second.rs")),
+ },
+ Some(PathBuf::from("/src/test/second.rs"))
+ ),
+ FoundPath::new(
+ ProjectPath {
+ worktree_id,
+ path: Arc::from(Path::new("test/first.rs")),
+ },
+ Some(PathBuf::from("/src/test/first.rs"))
+ ),
],
"Should show 1st and 2nd opened items in the history when opening the 3rd item. \
2nd item should be the first in the history, as the last opened."
@@ -1058,18 +1276,27 @@ mod tests {
assert_eq!(
history_after_third,
vec![
- ProjectPath {
- worktree_id,
- path: Arc::from(Path::new("test/third.rs")),
- },
- ProjectPath {
- worktree_id,
- path: Arc::from(Path::new("test/second.rs")),
- },
- ProjectPath {
- worktree_id,
- path: Arc::from(Path::new("test/first.rs")),
- },
+ FoundPath::new(
+ ProjectPath {
+ worktree_id,
+ path: Arc::from(Path::new("test/third.rs")),
+ },
+ Some(PathBuf::from("/src/test/third.rs"))
+ ),
+ FoundPath::new(
+ ProjectPath {
+ worktree_id,
+ path: Arc::from(Path::new("test/second.rs")),
+ },
+ Some(PathBuf::from("/src/test/second.rs"))
+ ),
+ FoundPath::new(
+ ProjectPath {
+ worktree_id,
+ path: Arc::from(Path::new("test/first.rs")),
+ },
+ Some(PathBuf::from("/src/test/first.rs"))
+ ),
],
"Should show 1st, 2nd and 3rd opened items in the history when opening the 2nd item again. \
3rd item should be the first in the history, as the last opened."
@@ -1088,24 +1315,162 @@ mod tests {
assert_eq!(
history_after_second_again,
vec![
- ProjectPath {
- worktree_id,
- path: Arc::from(Path::new("test/second.rs")),
- },
- ProjectPath {
- worktree_id,
- path: Arc::from(Path::new("test/third.rs")),
- },
- ProjectPath {
- worktree_id,
- path: Arc::from(Path::new("test/first.rs")),
- },
+ FoundPath::new(
+ ProjectPath {
+ worktree_id,
+ path: Arc::from(Path::new("test/second.rs")),
+ },
+ Some(PathBuf::from("/src/test/second.rs"))
+ ),
+ FoundPath::new(
+ ProjectPath {
+ worktree_id,
+ path: Arc::from(Path::new("test/third.rs")),
+ },
+ Some(PathBuf::from("/src/test/third.rs"))
+ ),
+ FoundPath::new(
+ ProjectPath {
+ worktree_id,
+ path: Arc::from(Path::new("test/first.rs")),
+ },
+ Some(PathBuf::from("/src/test/first.rs"))
+ ),
],
"Should show 1st, 2nd and 3rd opened items in the history when opening the 3rd item again. \
2nd item, as the last opened, 3rd item should go next as it was opened right before."
);
}
+ #[gpui::test]
+ async fn test_external_files_history(
+ deterministic: Arc<gpui::executor::Deterministic>,
+ cx: &mut gpui::TestAppContext,
+ ) {
+ let app_state = init_test(cx);
+
+ app_state
+ .fs
+ .as_fake()
+ .insert_tree(
+ "/src",
+ json!({
+ "test": {
+ "first.rs": "// First Rust file",
+ "second.rs": "// Second Rust file",
+ }
+ }),
+ )
+ .await;
+
+ app_state
+ .fs
+ .as_fake()
+ .insert_tree(
+ "/external-src",
+ json!({
+ "test": {
+ "third.rs": "// Third Rust file",
+ "fourth.rs": "// Fourth Rust file",
+ }
+ }),
+ )
+ .await;
+
+ let project = Project::test(app_state.fs.clone(), ["/src".as_ref()], cx).await;
+ cx.update(|cx| {
+ project.update(cx, |project, cx| {
+ project.find_or_create_local_worktree("/external-src", false, cx)
+ })
+ })
+ .detach();
+ deterministic.run_until_parked();
+
+ let (window_id, workspace) = cx.add_window(|cx| Workspace::test_new(project, cx));
+ let worktree_id = cx.read(|cx| {
+ let worktrees = workspace.read(cx).worktrees(cx).collect::<Vec<_>>();
+ assert_eq!(worktrees.len(), 1,);
+
+ WorktreeId::from_usize(worktrees[0].id())
+ });
+ workspace
+ .update(cx, |workspace, cx| {
+ workspace.open_abs_path(PathBuf::from("/external-src/test/third.rs"), false, cx)
+ })
+ .detach();
+ deterministic.run_until_parked();
+ let external_worktree_id = cx.read(|cx| {
+ let worktrees = workspace.read(cx).worktrees(cx).collect::<Vec<_>>();
+ assert_eq!(
+ worktrees.len(),
+ 2,
+ "External file should get opened in a new worktree"
+ );
+
+ WorktreeId::from_usize(
+ worktrees
+ .into_iter()
+ .find(|worktree| worktree.id() != worktree_id.to_usize())
+ .expect("New worktree should have a different id")
+ .id(),
+ )
+ });
+ close_active_item(&workspace, &deterministic, cx).await;
+
+ let initial_history_items = open_close_queried_buffer(
+ "sec",
+ 1,
+ "second.rs",
+ window_id,
+ &workspace,
+ &deterministic,
+ cx,
+ )
+ .await;
+ assert_eq!(
+ initial_history_items,
+ vec![FoundPath::new(
+ ProjectPath {
+ worktree_id: external_worktree_id,
+ path: Arc::from(Path::new("")),
+ },
+ Some(PathBuf::from("/external-src/test/third.rs"))
+ )],
+ "Should show external file with its full path in the history after it was open"
+ );
+
+ let updated_history_items = open_close_queried_buffer(
+ "fir",
+ 1,
+ "first.rs",
+ window_id,
+ &workspace,
+ &deterministic,
+ cx,
+ )
+ .await;
+ assert_eq!(
+ updated_history_items,
+ vec![
+ FoundPath::new(
+ ProjectPath {
+ worktree_id,
+ path: Arc::from(Path::new("test/second.rs")),
+ },
+ Some(PathBuf::from("/src/test/second.rs"))
+ ),
+ FoundPath::new(
+ ProjectPath {
+ worktree_id: external_worktree_id,
+ path: Arc::from(Path::new("")),
+ },
+ Some(PathBuf::from("/external-src/test/third.rs"))
+ ),
+ ],
+ "Should keep external file with history updates",
+ );
+ }
+
async fn open_close_queried_buffer(
input: &str,
expected_matches: usize,
@@ -1114,7 +1479,7 @@ mod tests {
workspace: &ViewHandle<Workspace>,
deterministic: &gpui::executor::Deterministic,
cx: &mut gpui::TestAppContext,
- ) -> Vec<ProjectPath> {
+ ) -> Vec<FoundPath> {
cx.dispatch_action(window_id, Toggle);
let finder = cx.read(|cx| workspace.read(cx).modal::<FileFinder>().unwrap());
finder
@@ -1152,6 +1517,16 @@ mod tests {
);
});
+ close_active_item(workspace, deterministic, cx).await;
+
+ history_items
+ }
+
+ async fn close_active_item(
+ workspace: &ViewHandle<Workspace>,
+ deterministic: &gpui::executor::Deterministic,
+ cx: &mut TestAppContext,
+ ) {
let mut original_items = HashMap::new();
cx.read(|cx| {
for pane in workspace.read(cx).panes() {
@@ -1161,6 +1536,8 @@ mod tests {
assert!(insertion_result.is_none(), "Pane id {pane_id} collision");
}
});
+
+ let active_pane = cx.read(|cx| workspace.read(cx).active_pane().clone());
active_pane
.update(cx, |pane, cx| {
pane.close_active_item(&workspace::CloseActiveItem, cx)
@@ -1185,8 +1562,10 @@ mod tests {
}
}
});
-
- history_items
+ assert!(
+ original_items.len() <= 1,
+ "At most one panel should got closed"
+ );
}
fn init_test(cx: &mut TestAppContext) -> Arc<AppState> {
@@ -1216,4 +1595,11 @@ mod tests {
})
.unwrap()
}
+
+ fn dummy_found_path(project_path: ProjectPath) -> FoundPath {
+ FoundPath {
+ project: project_path,
+ absolute: None,
+ }
+ }
}
@@ -434,7 +434,9 @@ impl<T: Entity> ModelHandle<T> {
Duration::from_secs(1)
};
+ let executor = cx.background().clone();
async move {
+ executor.start_waiting();
let notification = crate::util::timeout(duration, rx.next())
.await
.expect("next notification timed out");
@@ -876,6 +876,14 @@ impl Background {
}
}
}
+
+ #[cfg(any(test, feature = "test-support"))]
+ pub fn start_waiting(&self) {
+ match self {
+ Self::Deterministic { executor, .. } => executor.start_waiting(),
+ _ => panic!("this method can only be called on a deterministic executor"),
+ }
+ }
}
impl Default for Background {
@@ -796,6 +796,12 @@ impl LanguageRegistry {
http_client: Arc<dyn HttpClient>,
cx: &mut AppContext,
) -> Option<PendingLanguageServer> {
+ let server_id = self.state.write().next_language_server_id();
+ log::info!(
+ "starting language server name:{}, path:{root_path:?}, id:{server_id}",
+ adapter.name.0
+ );
+
#[cfg(any(test, feature = "test-support"))]
if language.fake_adapter.is_some() {
let task = cx.spawn(|cx| async move {
@@ -825,7 +831,6 @@ impl LanguageRegistry {
Ok(server)
});
- let server_id = self.state.write().next_language_server_id();
return Some(PendingLanguageServer { server_id, task });
}
@@ -834,7 +839,6 @@ impl LanguageRegistry {
.clone()
.ok_or_else(|| anyhow!("language server download directory has not been assigned"))
.log_err()?;
-
let this = self.clone();
let language = language.clone();
let http_client = http_client.clone();
@@ -843,7 +847,6 @@ impl LanguageRegistry {
let adapter = adapter.clone();
let lsp_binary_statuses = self.lsp_binary_statuses_tx.clone();
let login_shell_env_loaded = self.login_shell_env_loaded.clone();
- let server_id = self.state.write().next_language_server_id();
let task = cx.spawn(|cx| async move {
login_shell_env_loaded.await;
@@ -20,7 +20,7 @@ anyhow.workspace = true
async-pipe = { git = "https://github.com/zed-industries/async-pipe-rs", rev = "82d00a04211cf4e1236029aa03e6b6ce2a74c553", optional = true }
futures.workspace = true
log.workspace = true
-lsp-types = "0.91"
+lsp-types = "0.94"
parking_lot.workspace = true
postage.workspace = true
serde.workspace = true
@@ -361,13 +361,18 @@ impl LanguageServer {
capabilities: ClientCapabilities {
workspace: Some(WorkspaceClientCapabilities {
configuration: Some(true),
- did_change_watched_files: Some(DynamicRegistrationClientCapabilities {
+ did_change_watched_files: Some(DidChangeWatchedFilesClientCapabilities {
dynamic_registration: Some(true),
+ relative_pattern_support: Some(true),
}),
did_change_configuration: Some(DynamicRegistrationClientCapabilities {
dynamic_registration: Some(true),
}),
workspace_folders: Some(true),
+ symbol: Some(WorkspaceSymbolClientCapabilities {
+ resolve_support: None,
+ ..WorkspaceSymbolClientCapabilities::default()
+ }),
..Default::default()
}),
text_document: Some(TextDocumentClientCapabilities {
@@ -849,10 +854,12 @@ impl FakeLanguageServer {
T: request::Request,
T::Result: 'static + Send,
{
+ self.server.executor.start_waiting();
self.server.request::<T>(params).await
}
pub async fn receive_notification<T: notification::Notification>(&mut self) -> T::Params {
+ self.server.executor.start_waiting();
self.try_receive_notification::<T>().await.unwrap()
}
@@ -1524,6 +1524,7 @@ impl LspCommand for GetCodeActions {
context: lsp::CodeActionContext {
diagnostics: relevant_diagnostics,
only: language_server.code_action_kinds(),
+ ..lsp::CodeActionContext::default()
},
}
}
@@ -40,6 +40,7 @@ use language::{
PendingLanguageServer, PointUtf16, RopeFingerprint, TextBufferSnapshot, ToOffset, ToPointUtf16,
Transaction, Unclipped,
};
+use log::error;
use lsp::{
DiagnosticSeverity, DiagnosticTag, DidChangeWatchedFilesRegistrationOptions,
DocumentHighlightKind, LanguageServer, LanguageServerId,
@@ -1459,7 +1460,7 @@ impl Project {
};
cx.foreground().spawn(async move {
- pump_loading_buffer_reciever(loading_watch)
+ wait_for_loading_buffer(loading_watch)
.await
.map_err(|error| anyhow!("{}", error))
})
@@ -3017,10 +3018,12 @@ impl Project {
if let Some(worktree) = worktree.upgrade(cx) {
let worktree = worktree.read(cx);
if let Some(abs_path) = worktree.abs_path().to_str() {
- if let Some(suffix) = watcher
- .glob_pattern
- .strip_prefix(abs_path)
- .and_then(|s| s.strip_prefix(std::path::MAIN_SEPARATOR))
+ if let Some(suffix) = match &watcher.glob_pattern {
+ lsp::GlobPattern::String(s) => s,
+ lsp::GlobPattern::Relative(rp) => &rp.pattern,
+ }
+ .strip_prefix(abs_path)
+ .and_then(|s| s.strip_prefix(std::path::MAIN_SEPARATOR))
{
if let Some(glob) = Glob::new(suffix).log_err() {
builders
@@ -3218,7 +3221,7 @@ impl Project {
) -> Result<(), anyhow::Error> {
let (worktree, relative_path) = self
.find_local_worktree(&abs_path, cx)
- .ok_or_else(|| anyhow!("no worktree found for diagnostics"))?;
+ .ok_or_else(|| anyhow!("no worktree found for diagnostics path {abs_path:?}"))?;
let project_path = ProjectPath {
worktree_id: worktree.read(cx).id(),
@@ -3759,7 +3762,7 @@ impl Project {
let worktree_abs_path = worktree.abs_path().clone();
requests.push(
server
- .request::<lsp::request::WorkspaceSymbol>(
+ .request::<lsp::request::WorkspaceSymbolRequest>(
lsp::WorkspaceSymbolParams {
query: query.to_string(),
..Default::default()
@@ -3767,12 +3770,32 @@ impl Project {
)
.log_err()
.map(move |response| {
+ let lsp_symbols = response.flatten().map(|symbol_response| match symbol_response {
+ lsp::WorkspaceSymbolResponse::Flat(flat_responses) => {
+ flat_responses.into_iter().map(|lsp_symbol| {
+ (lsp_symbol.name, lsp_symbol.kind, lsp_symbol.location)
+ }).collect::<Vec<_>>()
+ }
+ lsp::WorkspaceSymbolResponse::Nested(nested_responses) => {
+ nested_responses.into_iter().filter_map(|lsp_symbol| {
+ let location = match lsp_symbol.location {
+ lsp::OneOf::Left(location) => location,
+ lsp::OneOf::Right(_) => {
+ error!("Unexpected: client capabilities forbid symbol resolutions in workspace.symbol.resolveSupport");
+ return None
+ }
+ };
+ Some((lsp_symbol.name, lsp_symbol.kind, location))
+ }).collect::<Vec<_>>()
+ }
+ }).unwrap_or_default();
+
(
adapter,
language,
worktree_id,
worktree_abs_path,
- response.unwrap_or_default(),
+ lsp_symbols,
)
}),
);
@@ -3794,53 +3817,54 @@ impl Project {
adapter_language,
source_worktree_id,
worktree_abs_path,
- response,
+ lsp_symbols,
) in responses
{
- symbols.extend(response.into_iter().flatten().filter_map(|lsp_symbol| {
- let abs_path = lsp_symbol.location.uri.to_file_path().ok()?;
- let mut worktree_id = source_worktree_id;
- let path;
- if let Some((worktree, rel_path)) =
- this.find_local_worktree(&abs_path, cx)
- {
- worktree_id = worktree.read(cx).id();
- path = rel_path;
- } else {
- path = relativize_path(&worktree_abs_path, &abs_path);
- }
-
- let project_path = ProjectPath {
- worktree_id,
- path: path.into(),
- };
- let signature = this.symbol_signature(&project_path);
- let adapter_language = adapter_language.clone();
- let language = this
- .languages
- .language_for_file(&project_path.path, None)
- .unwrap_or_else(move |_| adapter_language);
- let language_server_name = adapter.name.clone();
- Some(async move {
- let language = language.await;
- let label = language
- .label_for_symbol(&lsp_symbol.name, lsp_symbol.kind)
- .await;
-
- Symbol {
- language_server_name,
- source_worktree_id,
- path: project_path,
- label: label.unwrap_or_else(|| {
- CodeLabel::plain(lsp_symbol.name.clone(), None)
- }),
- kind: lsp_symbol.kind,
- name: lsp_symbol.name,
- range: range_from_lsp(lsp_symbol.location.range),
- signature,
+ symbols.extend(lsp_symbols.into_iter().filter_map(
+ |(symbol_name, symbol_kind, symbol_location)| {
+ let abs_path = symbol_location.uri.to_file_path().ok()?;
+ let mut worktree_id = source_worktree_id;
+ let path;
+ if let Some((worktree, rel_path)) =
+ this.find_local_worktree(&abs_path, cx)
+ {
+ worktree_id = worktree.read(cx).id();
+ path = rel_path;
+ } else {
+ path = relativize_path(&worktree_abs_path, &abs_path);
}
- })
- }));
+
+ let project_path = ProjectPath {
+ worktree_id,
+ path: path.into(),
+ };
+ let signature = this.symbol_signature(&project_path);
+ let adapter_language = adapter_language.clone();
+ let language = this
+ .languages
+ .language_for_file(&project_path.path, None)
+ .unwrap_or_else(move |_| adapter_language);
+ let language_server_name = adapter.name.clone();
+ Some(async move {
+ let language = language.await;
+ let label =
+ language.label_for_symbol(&symbol_name, symbol_kind).await;
+
+ Symbol {
+ language_server_name,
+ source_worktree_id,
+ path: project_path,
+ label: label.unwrap_or_else(|| {
+ CodeLabel::plain(symbol_name.clone(), None)
+ }),
+ kind: symbol_kind,
+ name: symbol_name,
+ range: range_from_lsp(symbol_location.range),
+ signature,
+ }
+ })
+ },
+ ));
}
symbols
});
@@ -4847,7 +4871,7 @@ impl Project {
if worktree.read(cx).is_local() {
cx.subscribe(worktree, |this, worktree, event, cx| match event {
worktree::Event::UpdatedEntries(changes) => {
- this.update_local_worktree_buffers(&worktree, &changes, cx);
+ this.update_local_worktree_buffers(&worktree, changes, cx);
this.update_local_worktree_language_servers(&worktree, changes, cx);
}
worktree::Event::UpdatedGitRepositories(updated_repos) => {
@@ -4881,13 +4905,13 @@ impl Project {
fn update_local_worktree_buffers(
&mut self,
worktree_handle: &ModelHandle<Worktree>,
- changes: &HashMap<(Arc<Path>, ProjectEntryId), PathChange>,
+ changes: &[(Arc<Path>, ProjectEntryId, PathChange)],
cx: &mut ModelContext<Self>,
) {
let snapshot = worktree_handle.read(cx).snapshot();
let mut renamed_buffers = Vec::new();
- for (path, entry_id) in changes.keys() {
+ for (path, entry_id, _) in changes {
let worktree_id = worktree_handle.read(cx).id();
let project_path = ProjectPath {
worktree_id,
@@ -4993,7 +5017,7 @@ impl Project {
fn update_local_worktree_language_servers(
&mut self,
worktree_handle: &ModelHandle<Worktree>,
- changes: &HashMap<(Arc<Path>, ProjectEntryId), PathChange>,
+ changes: &[(Arc<Path>, ProjectEntryId, PathChange)],
cx: &mut ModelContext<Self>,
) {
if changes.is_empty() {
@@ -5024,23 +5048,21 @@ impl Project {
let params = lsp::DidChangeWatchedFilesParams {
changes: changes
.iter()
- .filter_map(|((path, _), change)| {
- if watched_paths.is_match(&path) {
- Some(lsp::FileEvent {
- uri: lsp::Url::from_file_path(abs_path.join(path))
- .unwrap(),
- typ: match change {
- PathChange::Added => lsp::FileChangeType::CREATED,
- PathChange::Removed => lsp::FileChangeType::DELETED,
- PathChange::Updated
- | PathChange::AddedOrUpdated => {
- lsp::FileChangeType::CHANGED
- }
- },
- })
- } else {
- None
+ .filter_map(|(path, _, change)| {
+ if !watched_paths.is_match(&path) {
+ return None;
}
+ let typ = match change {
+ PathChange::Loaded => return None,
+ PathChange::Added => lsp::FileChangeType::CREATED,
+ PathChange::Removed => lsp::FileChangeType::DELETED,
+ PathChange::Updated => lsp::FileChangeType::CHANGED,
+ PathChange::AddedOrUpdated => lsp::FileChangeType::CHANGED,
+ };
+ Some(lsp::FileEvent {
+ uri: lsp::Url::from_file_path(abs_path.join(path)).unwrap(),
+ typ,
+ })
})
.collect(),
};
@@ -5059,98 +5081,102 @@ impl Project {
fn update_local_worktree_buffers_git_repos(
&mut self,
worktree_handle: ModelHandle<Worktree>,
- repos: &HashMap<Arc<Path>, LocalRepositoryEntry>,
+ changed_repos: &UpdatedGitRepositoriesSet,
cx: &mut ModelContext<Self>,
) {
debug_assert!(worktree_handle.read(cx).is_local());
- // Setup the pending buffers
+ // Identify the loading buffers whose containing repository that has changed.
let future_buffers = self
.loading_buffers_by_path
.iter()
- .filter_map(|(path, receiver)| {
- let path = &path.path;
- let (work_directory, repo) = repos
- .iter()
- .find(|(work_directory, _)| path.starts_with(work_directory))?;
-
- let repo_relative_path = path.strip_prefix(work_directory).log_err()?;
-
+ .filter_map(|(project_path, receiver)| {
+ if project_path.worktree_id != worktree_handle.read(cx).id() {
+ return None;
+ }
+ let path = &project_path.path;
+ changed_repos.iter().find(|(work_dir, change)| {
+ path.starts_with(work_dir) && change.git_dir_changed
+ })?;
let receiver = receiver.clone();
- let repo_ptr = repo.repo_ptr.clone();
- let repo_relative_path = repo_relative_path.to_owned();
+ let path = path.clone();
Some(async move {
- pump_loading_buffer_reciever(receiver)
+ wait_for_loading_buffer(receiver)
.await
.ok()
- .map(|buffer| (buffer, repo_relative_path, repo_ptr))
+ .map(|buffer| (buffer, path))
})
})
- .collect::<FuturesUnordered<_>>()
- .filter_map(|result| async move {
- let (buffer_handle, repo_relative_path, repo_ptr) = result?;
+ .collect::<FuturesUnordered<_>>();
- let lock = repo_ptr.lock();
- lock.load_index_text(&repo_relative_path)
- .map(|diff_base| (diff_base, buffer_handle))
- });
+ // Identify the current buffers whose containing repository has changed.
+ let current_buffers = self
+ .opened_buffers
+ .values()
+ .filter_map(|buffer| {
+ let buffer = buffer.upgrade(cx)?;
+ let file = File::from_dyn(buffer.read(cx).file())?;
+ if file.worktree != worktree_handle {
+ return None;
+ }
+ let path = file.path();
+ changed_repos.iter().find(|(work_dir, change)| {
+ path.starts_with(work_dir) && change.git_dir_changed
+ })?;
+ Some((buffer, path.clone()))
+ })
+ .collect::<Vec<_>>();
+
+ if future_buffers.len() + current_buffers.len() == 0 {
+ return;
+ }
- let update_diff_base_fn = update_diff_base(self);
- cx.spawn(|_, mut cx| async move {
- let diff_base_tasks = cx
+ let remote_id = self.remote_id();
+ let client = self.client.clone();
+ cx.spawn_weak(move |_, mut cx| async move {
+ // Wait for all of the buffers to load.
+ let future_buffers = future_buffers.collect::<Vec<_>>().await;
+
+ // Reload the diff base for every buffer whose containing git repository has changed.
+ let snapshot =
+ worktree_handle.read_with(&cx, |tree, _| tree.as_local().unwrap().snapshot());
+ let diff_bases_by_buffer = cx
.background()
- .spawn(future_buffers.collect::<Vec<_>>())
+ .spawn(async move {
+ future_buffers
+ .into_iter()
+ .filter_map(|e| e)
+ .chain(current_buffers)
+ .filter_map(|(buffer, path)| {
+ let (work_directory, repo) =
+ snapshot.repository_and_work_directory_for_path(&path)?;
+ let repo = snapshot.get_local_repo(&repo)?;
+ let relative_path = path.strip_prefix(&work_directory).ok()?;
+ let base_text = repo.repo_ptr.lock().load_index_text(&relative_path);
+ Some((buffer, base_text))
+ })
+ .collect::<Vec<_>>()
+ })
.await;
- for (diff_base, buffer) in diff_base_tasks.into_iter() {
- update_diff_base_fn(Some(diff_base), buffer, &mut cx);
+ // Assign the new diff bases on all of the buffers.
+ for (buffer, diff_base) in diff_bases_by_buffer {
+ let buffer_id = buffer.update(&mut cx, |buffer, cx| {
+ buffer.set_diff_base(diff_base.clone(), cx);
+ buffer.remote_id()
+ });
+ if let Some(project_id) = remote_id {
+ client
+ .send(proto::UpdateDiffBase {
+ project_id,
+ buffer_id,
+ diff_base,
+ })
+ .log_err();
+ }
}
})
.detach();
-
- // And the current buffers
- for (_, buffer) in &self.opened_buffers {
- if let Some(buffer) = buffer.upgrade(cx) {
- let file = match File::from_dyn(buffer.read(cx).file()) {
- Some(file) => file,
- None => continue,
- };
- if file.worktree != worktree_handle {
- continue;
- }
-
- let path = file.path().clone();
-
- let worktree = worktree_handle.read(cx);
-
- let (work_directory, repo) = match repos
- .iter()
- .find(|(work_directory, _)| path.starts_with(work_directory))
- {
- Some(repo) => repo.clone(),
- None => continue,
- };
-
- let relative_repo = match path.strip_prefix(work_directory).log_err() {
- Some(relative_repo) => relative_repo.to_owned(),
- None => continue,
- };
-
- drop(worktree);
-
- let update_diff_base_fn = update_diff_base(self);
- let git_ptr = repo.repo_ptr.clone();
- let diff_base_task = cx
- .background()
- .spawn(async move { git_ptr.lock().load_index_text(&relative_repo) });
-
- cx.spawn(|_, mut cx| async move {
- let diff_base = diff_base_task.await;
- update_diff_base_fn(diff_base, buffer, &mut cx);
- })
- .detach();
- }
- }
}
pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
@@ -5238,6 +5264,20 @@ impl Project {
Some(ProjectPath { worktree_id, path })
}
+ pub fn absolute_path(&self, project_path: &ProjectPath, cx: &AppContext) -> Option<PathBuf> {
+ let workspace_root = self
+ .worktree_for_id(project_path.worktree_id, cx)?
+ .read(cx)
+ .abs_path();
+ let project_path = project_path.path.as_ref();
+
+ Some(if project_path == Path::new("") {
+ workspace_root.to_path_buf()
+ } else {
+ workspace_root.join(project_path)
+ })
+ }
+
// RPC message handlers
async fn handle_unshare_project(
@@ -5848,7 +5888,7 @@ impl Project {
this.update(&mut cx, |this, cx| {
let Some(guest_id) = envelope.original_sender_id else {
- log::error!("missing original_sender_id on SynchronizeBuffers request");
+ error!("missing original_sender_id on SynchronizeBuffers request");
return;
};
@@ -7072,7 +7112,7 @@ impl Item for Buffer {
}
}
-async fn pump_loading_buffer_reciever(
+async fn wait_for_loading_buffer(
mut receiver: postage::watch::Receiver<Option<Result<ModelHandle<Buffer>, Arc<anyhow::Error>>>>,
) -> Result<ModelHandle<Buffer>, Arc<anyhow::Error>> {
loop {
@@ -7085,26 +7125,3 @@ async fn pump_loading_buffer_reciever(
receiver.next().await;
}
}
-
-fn update_diff_base(
- project: &Project,
-) -> impl Fn(Option<String>, ModelHandle<Buffer>, &mut AsyncAppContext) {
- let remote_id = project.remote_id();
- let client = project.client().clone();
- move |diff_base, buffer, cx| {
- let buffer_id = buffer.update(cx, |buffer, cx| {
- buffer.set_diff_base(diff_base.clone(), cx);
- buffer.remote_id()
- });
-
- if let Some(project_id) = remote_id {
- client
- .send(proto::UpdateDiffBase {
- project_id,
- buffer_id: buffer_id as u64,
- diff_base,
- })
- .log_err();
- }
- }
-}
@@ -506,7 +506,9 @@ async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppCon
register_options: serde_json::to_value(
lsp::DidChangeWatchedFilesRegistrationOptions {
watchers: vec![lsp::FileSystemWatcher {
- glob_pattern: "/the-root/*.{rs,c}".to_string(),
+ glob_pattern: lsp::GlobPattern::String(
+ "/the-root/*.{rs,c}".to_string(),
+ ),
kind: None,
}],
},
@@ -1193,7 +1195,7 @@ async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
.await;
}
-#[gpui::test]
+#[gpui::test(iterations = 3)]
async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
init_test(cx);
@@ -1273,7 +1275,7 @@ async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
// The diagnostics have moved down since they were created.
buffer.next_notification(cx).await;
- buffer.next_notification(cx).await;
+ cx.foreground().run_until_parked();
buffer.read_with(cx, |buffer, _| {
assert_eq!(
buffer
@@ -1352,6 +1354,7 @@ async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
});
buffer.next_notification(cx).await;
+ cx.foreground().run_until_parked();
buffer.read_with(cx, |buffer, _| {
assert_eq!(
buffer
@@ -1444,6 +1447,7 @@ async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
});
buffer.next_notification(cx).await;
+ cx.foreground().run_until_parked();
buffer.read_with(cx, |buffer, _| {
assert_eq!(
buffer
@@ -2524,29 +2528,21 @@ async fn test_rescan_and_remote_updates(
// Create a remote copy of this worktree.
let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
- let initial_snapshot = tree.read_with(cx, |tree, _| tree.as_local().unwrap().snapshot());
- let remote = cx.update(|cx| {
- Worktree::remote(
- 1,
- 1,
- proto::WorktreeMetadata {
- id: initial_snapshot.id().to_proto(),
- root_name: initial_snapshot.root_name().into(),
- abs_path: initial_snapshot
- .abs_path()
- .as_os_str()
- .to_string_lossy()
- .into(),
- visible: true,
- },
- rpc.clone(),
- cx,
- )
- });
- remote.update(cx, |remote, _| {
- let update = initial_snapshot.build_initial_update(1);
- remote.as_remote_mut().unwrap().update_from_remote(update);
+
+ let metadata = tree.read_with(cx, |tree, _| tree.as_local().unwrap().metadata_proto());
+
+ let updates = Arc::new(Mutex::new(Vec::new()));
+ tree.update(cx, |tree, cx| {
+ let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
+ let updates = updates.clone();
+ move |update| {
+ updates.lock().push(update);
+ async { true }
+ }
+ });
});
+
+ let remote = cx.update(|cx| Worktree::remote(1, 1, metadata, rpc.clone(), cx));
deterministic.run_until_parked();
cx.read(|cx| {
@@ -2612,14 +2608,11 @@ async fn test_rescan_and_remote_updates(
// Update the remote worktree. Check that it becomes consistent with the
// local worktree.
- remote.update(cx, |remote, cx| {
- let update = tree.read(cx).as_local().unwrap().snapshot().build_update(
- &initial_snapshot,
- 1,
- 1,
- true,
- );
- remote.as_remote_mut().unwrap().update_from_remote(update);
+ deterministic.run_until_parked();
+ remote.update(cx, |remote, _| {
+ for update in updates.lock().drain(..) {
+ remote.as_remote_mut().unwrap().update_from_remote(update);
+ }
});
deterministic.run_until_parked();
remote.read_with(cx, |remote, _| {
@@ -17,7 +17,7 @@ use futures::{
},
select_biased,
task::Poll,
- Stream, StreamExt,
+ FutureExt, Stream, StreamExt,
};
use fuzzy::CharBag;
use git::{DOT_GIT, GITIGNORE};
@@ -55,7 +55,7 @@ use std::{
time::{Duration, SystemTime},
};
use sum_tree::{Bias, Edit, SeekTarget, SumTree, TreeMap, TreeSet};
-use util::{paths::HOME, ResultExt, TakeUntilExt, TryFutureExt};
+use util::{paths::HOME, ResultExt, TakeUntilExt};
#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, PartialOrd, Ord)]
pub struct WorktreeId(usize);
@@ -317,18 +317,20 @@ pub struct LocalSnapshot {
git_repositories: TreeMap<ProjectEntryId, LocalRepositoryEntry>,
}
-pub struct LocalMutableSnapshot {
+pub struct BackgroundScannerState {
snapshot: LocalSnapshot,
/// The ids of all of the entries that were removed from the snapshot
/// as part of the current update. These entry ids may be re-used
/// if the same inode is discovered at a new path, or if the given
/// path is re-created after being deleted.
removed_entry_ids: HashMap<u64, ProjectEntryId>,
+ changed_paths: Vec<Arc<Path>>,
+ prev_snapshot: Snapshot,
}
#[derive(Debug, Clone)]
pub struct LocalRepositoryEntry {
- pub(crate) scan_id: usize,
+ pub(crate) work_dir_scan_id: usize,
pub(crate) git_dir_scan_id: usize,
pub(crate) repo_ptr: Arc<Mutex<dyn GitRepository>>,
/// Path to the actual .git folder.
@@ -357,25 +359,11 @@ impl DerefMut for LocalSnapshot {
}
}
-impl Deref for LocalMutableSnapshot {
- type Target = LocalSnapshot;
-
- fn deref(&self) -> &Self::Target {
- &self.snapshot
- }
-}
-
-impl DerefMut for LocalMutableSnapshot {
- fn deref_mut(&mut self) -> &mut Self::Target {
- &mut self.snapshot
- }
-}
-
enum ScanState {
Started,
Updated {
snapshot: LocalSnapshot,
- changes: HashMap<(Arc<Path>, ProjectEntryId), PathChange>,
+ changes: UpdatedEntriesSet,
barrier: Option<barrier::Sender>,
scanning: bool,
},
@@ -383,14 +371,15 @@ enum ScanState {
struct ShareState {
project_id: u64,
- snapshots_tx: watch::Sender<LocalSnapshot>,
+ snapshots_tx:
+ mpsc::UnboundedSender<(LocalSnapshot, UpdatedEntriesSet, UpdatedGitRepositoriesSet)>,
resume_updates: watch::Sender<()>,
_maintain_remote_snapshot: Task<Option<()>>,
}
pub enum Event {
- UpdatedEntries(HashMap<(Arc<Path>, ProjectEntryId), PathChange>),
- UpdatedGitRepositories(HashMap<Arc<Path>, LocalRepositoryEntry>),
+ UpdatedEntries(UpdatedEntriesSet),
+ UpdatedGitRepositories(UpdatedGitRepositoriesSet),
}
impl Entity for Worktree {
@@ -465,8 +454,7 @@ impl Worktree {
scanning,
} => {
*this.is_scanning.0.borrow_mut() = scanning;
- this.set_snapshot(snapshot, cx);
- cx.emit(Event::UpdatedEntries(changes));
+ this.set_snapshot(snapshot, changes, cx);
drop(barrier);
}
}
@@ -560,7 +548,7 @@ impl Worktree {
this.update(&mut cx, |this, cx| {
let this = this.as_remote_mut().unwrap();
this.snapshot = this.background_snapshot.lock().clone();
- cx.emit(Event::UpdatedEntries(Default::default()));
+ cx.emit(Event::UpdatedEntries(Arc::from([])));
cx.notify();
while let Some((scan_id, _)) = this.snapshot_subscriptions.front() {
if this.observed_snapshot(*scan_id) {
@@ -832,73 +820,137 @@ impl LocalWorktree {
Ok(!old_summary.is_empty() || !new_summary.is_empty())
}
- fn set_snapshot(&mut self, new_snapshot: LocalSnapshot, cx: &mut ModelContext<Worktree>) {
- let updated_repos =
- self.changed_repos(&self.git_repositories, &new_snapshot.git_repositories);
+ fn set_snapshot(
+ &mut self,
+ new_snapshot: LocalSnapshot,
+ entry_changes: UpdatedEntriesSet,
+ cx: &mut ModelContext<Worktree>,
+ ) {
+ let repo_changes = self.changed_repos(&self.snapshot, &new_snapshot);
self.snapshot = new_snapshot;
if let Some(share) = self.share.as_mut() {
- *share.snapshots_tx.borrow_mut() = self.snapshot.clone();
+ share
+ .snapshots_tx
+ .unbounded_send((
+ self.snapshot.clone(),
+ entry_changes.clone(),
+ repo_changes.clone(),
+ ))
+ .ok();
}
- if !updated_repos.is_empty() {
- cx.emit(Event::UpdatedGitRepositories(updated_repos));
+ if !entry_changes.is_empty() {
+ cx.emit(Event::UpdatedEntries(entry_changes));
+ }
+ if !repo_changes.is_empty() {
+ cx.emit(Event::UpdatedGitRepositories(repo_changes));
}
}
fn changed_repos(
&self,
- old_repos: &TreeMap<ProjectEntryId, LocalRepositoryEntry>,
- new_repos: &TreeMap<ProjectEntryId, LocalRepositoryEntry>,
- ) -> HashMap<Arc<Path>, LocalRepositoryEntry> {
- let mut diff = HashMap::default();
- let mut old_repos = old_repos.iter().peekable();
- let mut new_repos = new_repos.iter().peekable();
+ old_snapshot: &LocalSnapshot,
+ new_snapshot: &LocalSnapshot,
+ ) -> UpdatedGitRepositoriesSet {
+ let mut changes = Vec::new();
+ let mut old_repos = old_snapshot.git_repositories.iter().peekable();
+ let mut new_repos = new_snapshot.git_repositories.iter().peekable();
loop {
- match (old_repos.peek(), new_repos.peek()) {
- (Some((old_entry_id, old_repo)), Some((new_entry_id, new_repo))) => {
- match Ord::cmp(old_entry_id, new_entry_id) {
+ match (new_repos.peek().map(clone), old_repos.peek().map(clone)) {
+ (Some((new_entry_id, new_repo)), Some((old_entry_id, old_repo))) => {
+ match Ord::cmp(&new_entry_id, &old_entry_id) {
Ordering::Less => {
- if let Some(entry) = self.entry_for_id(**old_entry_id) {
- diff.insert(entry.path.clone(), (*old_repo).clone());
+ if let Some(entry) = new_snapshot.entry_for_id(new_entry_id) {
+ changes.push((
+ entry.path.clone(),
+ GitRepositoryChange {
+ old_repository: None,
+ git_dir_changed: true,
+ },
+ ));
}
- old_repos.next();
+ new_repos.next();
}
Ordering::Equal => {
- if old_repo.git_dir_scan_id != new_repo.git_dir_scan_id {
- if let Some(entry) = self.entry_for_id(**new_entry_id) {
- diff.insert(entry.path.clone(), (*new_repo).clone());
+ let git_dir_changed =
+ new_repo.git_dir_scan_id != old_repo.git_dir_scan_id;
+ let work_dir_changed =
+ new_repo.work_dir_scan_id != old_repo.work_dir_scan_id;
+ if git_dir_changed || work_dir_changed {
+ if let Some(entry) = new_snapshot.entry_for_id(new_entry_id) {
+ let old_repo = old_snapshot
+ .repository_entries
+ .get(&RepositoryWorkDirectory(entry.path.clone()))
+ .cloned();
+ changes.push((
+ entry.path.clone(),
+ GitRepositoryChange {
+ old_repository: old_repo,
+ git_dir_changed,
+ },
+ ));
}
}
-
- old_repos.next();
new_repos.next();
+ old_repos.next();
}
Ordering::Greater => {
- if let Some(entry) = self.entry_for_id(**new_entry_id) {
- diff.insert(entry.path.clone(), (*new_repo).clone());
+ if let Some(entry) = old_snapshot.entry_for_id(old_entry_id) {
+ let old_repo = old_snapshot
+ .repository_entries
+ .get(&RepositoryWorkDirectory(entry.path.clone()))
+ .cloned();
+ changes.push((
+ entry.path.clone(),
+ GitRepositoryChange {
+ old_repository: old_repo,
+ git_dir_changed: true,
+ },
+ ));
}
- new_repos.next();
+ old_repos.next();
}
}
}
- (Some((old_entry_id, old_repo)), None) => {
- if let Some(entry) = self.entry_for_id(**old_entry_id) {
- diff.insert(entry.path.clone(), (*old_repo).clone());
+ (Some((entry_id, _)), None) => {
+ if let Some(entry) = new_snapshot.entry_for_id(entry_id) {
+ changes.push((
+ entry.path.clone(),
+ GitRepositoryChange {
+ old_repository: None,
+ git_dir_changed: true,
+ },
+ ));
}
- old_repos.next();
+ new_repos.next();
}
- (None, Some((new_entry_id, new_repo))) => {
- if let Some(entry) = self.entry_for_id(**new_entry_id) {
- diff.insert(entry.path.clone(), (*new_repo).clone());
+ (None, Some((entry_id, _))) => {
+ if let Some(entry) = old_snapshot.entry_for_id(entry_id) {
+ let old_repo = old_snapshot
+ .repository_entries
+ .get(&RepositoryWorkDirectory(entry.path.clone()))
+ .cloned();
+ changes.push((
+ entry.path.clone(),
+ GitRepositoryChange {
+ old_repository: old_repo,
+ git_dir_changed: true,
+ },
+ ));
}
- new_repos.next();
+ old_repos.next();
}
(None, None) => break,
}
}
- diff
+
+ fn clone<T: Clone, U: Clone>(value: &(&T, &U)) -> (T, U) {
+ (value.0.clone(), value.1.clone())
+ }
+
+ changes.into()
}
pub fn scan_complete(&self) -> impl Future<Output = ()> {
@@ -1239,89 +1291,97 @@ impl LocalWorktree {
})
}
- pub fn share(&mut self, project_id: u64, cx: &mut ModelContext<Worktree>) -> Task<Result<()>> {
+ pub fn observe_updates<F, Fut>(
+ &mut self,
+ project_id: u64,
+ cx: &mut ModelContext<Worktree>,
+ callback: F,
+ ) -> oneshot::Receiver<()>
+ where
+ F: 'static + Send + Fn(proto::UpdateWorktree) -> Fut,
+ Fut: Send + Future<Output = bool>,
+ {
+ #[cfg(any(test, feature = "test-support"))]
+ const MAX_CHUNK_SIZE: usize = 2;
+ #[cfg(not(any(test, feature = "test-support")))]
+ const MAX_CHUNK_SIZE: usize = 256;
+
let (share_tx, share_rx) = oneshot::channel();
if let Some(share) = self.share.as_mut() {
- let _ = share_tx.send(());
+ share_tx.send(()).ok();
*share.resume_updates.borrow_mut() = ();
- } else {
- let (snapshots_tx, mut snapshots_rx) = watch::channel_with(self.snapshot());
- let (resume_updates_tx, mut resume_updates_rx) = watch::channel();
- let worktree_id = cx.model_id() as u64;
+ return share_rx;
+ }
- for (path, summaries) in &self.diagnostic_summaries {
- for (&server_id, summary) in summaries {
- if let Err(e) = self.client.send(proto::UpdateDiagnosticSummary {
- project_id,
- worktree_id,
- summary: Some(summary.to_proto(server_id, &path)),
- }) {
- return Task::ready(Err(e));
- }
+ let (resume_updates_tx, mut resume_updates_rx) = watch::channel::<()>();
+ let (snapshots_tx, mut snapshots_rx) =
+ mpsc::unbounded::<(LocalSnapshot, UpdatedEntriesSet, UpdatedGitRepositoriesSet)>();
+ snapshots_tx
+ .unbounded_send((self.snapshot(), Arc::from([]), Arc::from([])))
+ .ok();
+
+ let worktree_id = cx.model_id() as u64;
+ let _maintain_remote_snapshot = cx.background().spawn(async move {
+ let mut is_first = true;
+ while let Some((snapshot, entry_changes, repo_changes)) = snapshots_rx.next().await {
+ let update;
+ if is_first {
+ update = snapshot.build_initial_update(project_id, worktree_id);
+ is_first = false;
+ } else {
+ update =
+ snapshot.build_update(project_id, worktree_id, entry_changes, repo_changes);
}
- }
- let _maintain_remote_snapshot = cx.background().spawn({
- let client = self.client.clone();
- async move {
- let mut share_tx = Some(share_tx);
- let mut prev_snapshot = LocalSnapshot {
- ignores_by_parent_abs_path: Default::default(),
- git_repositories: Default::default(),
- snapshot: Snapshot {
- id: WorktreeId(worktree_id as usize),
- abs_path: Path::new("").into(),
- root_name: Default::default(),
- root_char_bag: Default::default(),
- entries_by_path: Default::default(),
- entries_by_id: Default::default(),
- repository_entries: Default::default(),
- scan_id: 0,
- completed_scan_id: 0,
- },
- };
- while let Some(snapshot) = snapshots_rx.recv().await {
- #[cfg(any(test, feature = "test-support"))]
- const MAX_CHUNK_SIZE: usize = 2;
- #[cfg(not(any(test, feature = "test-support")))]
- const MAX_CHUNK_SIZE: usize = 256;
-
- let update =
- snapshot.build_update(&prev_snapshot, project_id, worktree_id, true);
- for update in proto::split_worktree_update(update, MAX_CHUNK_SIZE) {
- let _ = resume_updates_rx.try_recv();
- while let Err(error) = client.request(update.clone()).await {
- log::error!("failed to send worktree update: {}", error);
- log::info!("waiting to resume updates");
- if resume_updates_rx.next().await.is_none() {
- return Ok(());
- }
+ for update in proto::split_worktree_update(update, MAX_CHUNK_SIZE) {
+ let _ = resume_updates_rx.try_recv();
+ loop {
+ let result = callback(update.clone());
+ if result.await {
+ break;
+ } else {
+ log::info!("waiting to resume updates");
+ if resume_updates_rx.next().await.is_none() {
+ return Some(());
}
}
+ }
+ }
+ }
+ share_tx.send(()).ok();
+ Some(())
+ });
- if let Some(share_tx) = share_tx.take() {
- let _ = share_tx.send(());
- }
+ self.share = Some(ShareState {
+ project_id,
+ snapshots_tx,
+ resume_updates: resume_updates_tx,
+ _maintain_remote_snapshot,
+ });
+ share_rx
+ }
- prev_snapshot = snapshot;
- }
+ pub fn share(&mut self, project_id: u64, cx: &mut ModelContext<Worktree>) -> Task<Result<()>> {
+ let client = self.client.clone();
- Ok::<_, anyhow::Error>(())
+ for (path, summaries) in &self.diagnostic_summaries {
+ for (&server_id, summary) in summaries {
+ if let Err(e) = self.client.send(proto::UpdateDiagnosticSummary {
+ project_id,
+ worktree_id: cx.model_id() as u64,
+ summary: Some(summary.to_proto(server_id, &path)),
+ }) {
+ return Task::ready(Err(e));
}
- .log_err()
- });
-
- self.share = Some(ShareState {
- project_id,
- snapshots_tx,
- resume_updates: resume_updates_tx,
- _maintain_remote_snapshot,
- });
+ }
}
+ let rx = self.observe_updates(project_id, cx, move |update| {
+ client.request(update).map(|result| result.is_ok())
+ });
cx.foreground()
- .spawn(async move { share_rx.await.map_err(|_| anyhow!("share ended")) })
+ .spawn(async move { rx.await.map_err(|_| anyhow!("share ended")) })
}
pub fn unshare(&mut self) {
@@ -1530,10 +1590,12 @@ impl Snapshot {
pub(crate) fn apply_remote_update(&mut self, mut update: proto::UpdateWorktree) -> Result<()> {
let mut entries_by_path_edits = Vec::new();
let mut entries_by_id_edits = Vec::new();
+
for entry_id in update.removed_entries {
- if let Some(entry) = self.entry_for_id(ProjectEntryId::from_proto(entry_id)) {
+ let entry_id = ProjectEntryId::from_proto(entry_id);
+ entries_by_id_edits.push(Edit::Remove(entry_id));
+ if let Some(entry) = self.entry_for_id(entry_id) {
entries_by_path_edits.push(Edit::Remove(PathKey(entry.path.clone())));
- entries_by_id_edits.push(Edit::Remove(entry.id));
}
}
@@ -1542,6 +1604,11 @@ impl Snapshot {
if let Some(PathEntry { path, .. }) = self.entries_by_id.get(&entry.id, &()) {
entries_by_path_edits.push(Edit::Remove(PathKey(path.clone())));
}
+ if let Some(old_entry) = self.entries_by_path.get(&PathKey(entry.path.clone()), &()) {
+ if old_entry.id != entry.id {
+ entries_by_id_edits.push(Edit::Remove(old_entry.id));
+ }
+ }
entries_by_id_edits.push(Edit::Insert(PathEntry {
id: entry.id,
path: entry.path.clone(),
@@ -1684,20 +1751,19 @@ impl Snapshot {
/// Get the repository whose work directory contains the given path.
pub fn repository_for_path(&self, path: &Path) -> Option<RepositoryEntry> {
- let mut max_len = 0;
- let mut current_candidate = None;
- for (work_directory, repo) in (&self.repository_entries).iter() {
- if path.starts_with(&work_directory.0) {
- if work_directory.0.as_os_str().len() >= max_len {
- current_candidate = Some(repo);
- max_len = work_directory.0.as_os_str().len();
- } else {
- break;
- }
- }
- }
+ self.repository_and_work_directory_for_path(path)
+ .map(|e| e.1)
+ }
- current_candidate.cloned()
+ pub fn repository_and_work_directory_for_path(
+ &self,
+ path: &Path,
+ ) -> Option<(RepositoryWorkDirectory, RepositoryEntry)> {
+ self.repository_entries
+ .iter()
+ .filter(|(workdir_path, _)| path.starts_with(workdir_path))
+ .last()
+ .map(|(path, repo)| (path.clone(), repo.clone()))
}
/// Given an ordered iterator of entries, returns an iterator of those entries,
@@ -1833,117 +1899,52 @@ impl LocalSnapshot {
.find(|(_, repo)| repo.in_dot_git(path))
}
- #[cfg(test)]
- pub(crate) fn build_initial_update(&self, project_id: u64) -> proto::UpdateWorktree {
- let root_name = self.root_name.clone();
- proto::UpdateWorktree {
- project_id,
- worktree_id: self.id().to_proto(),
- abs_path: self.abs_path().to_string_lossy().into(),
- root_name,
- updated_entries: self.entries_by_path.iter().map(Into::into).collect(),
- removed_entries: Default::default(),
- scan_id: self.scan_id as u64,
- is_last_update: true,
- updated_repositories: self.repository_entries.values().map(Into::into).collect(),
- removed_repositories: Default::default(),
- }
- }
-
- pub(crate) fn build_update(
+ fn build_update(
&self,
- other: &Self,
project_id: u64,
worktree_id: u64,
- include_ignored: bool,
+ entry_changes: UpdatedEntriesSet,
+ repo_changes: UpdatedGitRepositoriesSet,
) -> proto::UpdateWorktree {
let mut updated_entries = Vec::new();
let mut removed_entries = Vec::new();
- let mut self_entries = self
- .entries_by_id
- .cursor::<()>()
- .filter(|e| include_ignored || !e.is_ignored)
- .peekable();
- let mut other_entries = other
- .entries_by_id
- .cursor::<()>()
- .filter(|e| include_ignored || !e.is_ignored)
- .peekable();
- loop {
- match (self_entries.peek(), other_entries.peek()) {
- (Some(self_entry), Some(other_entry)) => {
- match Ord::cmp(&self_entry.id, &other_entry.id) {
- Ordering::Less => {
- let entry = self.entry_for_id(self_entry.id).unwrap().into();
- updated_entries.push(entry);
- self_entries.next();
- }
- Ordering::Equal => {
- if self_entry.scan_id != other_entry.scan_id {
- let entry = self.entry_for_id(self_entry.id).unwrap().into();
- updated_entries.push(entry);
- }
+ let mut updated_repositories = Vec::new();
+ let mut removed_repositories = Vec::new();
- self_entries.next();
- other_entries.next();
- }
- Ordering::Greater => {
- removed_entries.push(other_entry.id.to_proto());
- other_entries.next();
- }
- }
- }
- (Some(self_entry), None) => {
- let entry = self.entry_for_id(self_entry.id).unwrap().into();
- updated_entries.push(entry);
- self_entries.next();
- }
- (None, Some(other_entry)) => {
- removed_entries.push(other_entry.id.to_proto());
- other_entries.next();
- }
- (None, None) => break,
+ for (_, entry_id, path_change) in entry_changes.iter() {
+ if let PathChange::Removed = path_change {
+ removed_entries.push(entry_id.0 as u64);
+ } else if let Some(entry) = self.entry_for_id(*entry_id) {
+ updated_entries.push(proto::Entry::from(entry));
}
}
- let mut updated_repositories: Vec<proto::RepositoryEntry> = Vec::new();
- let mut removed_repositories = Vec::new();
- let mut self_repos = self.snapshot.repository_entries.iter().peekable();
- let mut other_repos = other.snapshot.repository_entries.iter().peekable();
- loop {
- match (self_repos.peek(), other_repos.peek()) {
- (Some((self_work_dir, self_repo)), Some((other_work_dir, other_repo))) => {
- match Ord::cmp(self_work_dir, other_work_dir) {
- Ordering::Less => {
- updated_repositories.push((*self_repo).into());
- self_repos.next();
- }
- Ordering::Equal => {
- if self_repo != other_repo {
- updated_repositories.push(self_repo.build_update(other_repo));
- }
-
- self_repos.next();
- other_repos.next();
- }
- Ordering::Greater => {
- removed_repositories.push(other_repo.work_directory.to_proto());
- other_repos.next();
- }
- }
+ for (work_dir_path, change) in repo_changes.iter() {
+ let new_repo = self
+ .repository_entries
+ .get(&RepositoryWorkDirectory(work_dir_path.clone()));
+ match (&change.old_repository, new_repo) {
+ (Some(old_repo), Some(new_repo)) => {
+ updated_repositories.push(new_repo.build_update(old_repo));
}
- (Some((_, self_repo)), None) => {
- updated_repositories.push((*self_repo).into());
- self_repos.next();
+ (None, Some(new_repo)) => {
+ updated_repositories.push(proto::RepositoryEntry::from(new_repo));
}
- (None, Some((_, other_repo))) => {
- removed_repositories.push(other_repo.work_directory.to_proto());
- other_repos.next();
+ (Some(old_repo), None) => {
+ removed_repositories.push(old_repo.work_directory.0.to_proto());
}
- (None, None) => break,
+ _ => {}
}
}
+ removed_entries.sort_unstable();
+ updated_entries.sort_unstable_by_key(|e| e.id);
+ removed_repositories.sort_unstable();
+ updated_repositories.sort_unstable_by_key(|e| e.work_directory_id);
+
+ // TODO - optimize, knowing that removed_entries are sorted.
+ removed_entries.retain(|id| updated_entries.binary_search_by_key(id, |e| e.id).is_err());
+
proto::UpdateWorktree {
project_id,
worktree_id,
@@ -1958,6 +1959,35 @@ impl LocalSnapshot {
}
}
+ fn build_initial_update(&self, project_id: u64, worktree_id: u64) -> proto::UpdateWorktree {
+ let mut updated_entries = self
+ .entries_by_path
+ .iter()
+ .map(proto::Entry::from)
+ .collect::<Vec<_>>();
+ updated_entries.sort_unstable_by_key(|e| e.id);
+
+ let mut updated_repositories = self
+ .repository_entries
+ .values()
+ .map(proto::RepositoryEntry::from)
+ .collect::<Vec<_>>();
+ updated_repositories.sort_unstable_by_key(|e| e.work_directory_id);
+
+ proto::UpdateWorktree {
+ project_id,
+ worktree_id,
+ abs_path: self.abs_path().to_string_lossy().into(),
+ root_name: self.root_name().to_string(),
+ updated_entries,
+ removed_entries: Vec::new(),
+ scan_id: self.scan_id as u64,
+ is_last_update: self.completed_scan_id == self.scan_id,
+ updated_repositories,
+ removed_repositories: Vec::new(),
+ }
+ }
+
fn insert_entry(&mut self, mut entry: Entry, fs: &dyn Fs) -> Entry {
if entry.is_file() && entry.path.file_name() == Some(&GITIGNORE) {
let abs_path = self.abs_path.join(&entry.path);
@@ -2041,7 +2071,7 @@ impl LocalSnapshot {
self.git_repositories.insert(
work_dir_id,
LocalRepositoryEntry {
- scan_id,
+ work_dir_scan_id: scan_id,
git_dir_scan_id: scan_id,
repo_ptr: repo,
git_dir_path: parent_path.clone(),
@@ -2090,11 +2120,11 @@ impl LocalSnapshot {
}
}
-impl LocalMutableSnapshot {
+impl BackgroundScannerState {
fn reuse_entry_id(&mut self, entry: &mut Entry) {
if let Some(removed_entry_id) = self.removed_entry_ids.remove(&entry.inode) {
entry.id = removed_entry_id;
- } else if let Some(existing_entry) = self.entry_for_path(&entry.path) {
+ } else if let Some(existing_entry) = self.snapshot.entry_for_path(&entry.path) {
entry.id = existing_entry.id;
}
}
@@ -2111,8 +2141,10 @@ impl LocalMutableSnapshot {
ignore: Option<Arc<Gitignore>>,
fs: &dyn Fs,
) {
- let mut parent_entry = if let Some(parent_entry) =
- self.entries_by_path.get(&PathKey(parent_path.clone()), &())
+ let mut parent_entry = if let Some(parent_entry) = self
+ .snapshot
+ .entries_by_path
+ .get(&PathKey(parent_path.clone()), &())
{
parent_entry.clone()
} else {
@@ -2132,13 +2164,14 @@ impl LocalMutableSnapshot {
}
if let Some(ignore) = ignore {
- let abs_parent_path = self.abs_path.join(&parent_path).into();
- self.ignores_by_parent_abs_path
+ let abs_parent_path = self.snapshot.abs_path.join(&parent_path).into();
+ self.snapshot
+ .ignores_by_parent_abs_path
.insert(abs_parent_path, (ignore, false));
}
if parent_path.file_name() == Some(&DOT_GIT) {
- self.build_repo(parent_path, fs);
+ self.snapshot.build_repo(parent_path, fs);
}
let mut entries_by_path_edits = vec![Edit::Insert(parent_entry)];
@@ -2150,25 +2183,27 @@ impl LocalMutableSnapshot {
id: entry.id,
path: entry.path.clone(),
is_ignored: entry.is_ignored,
- scan_id: self.scan_id,
+ scan_id: self.snapshot.scan_id,
}));
entries_by_path_edits.push(Edit::Insert(entry));
}
- self.entries_by_path.edit(entries_by_path_edits, &());
- self.entries_by_id.edit(entries_by_id_edits, &());
+ self.snapshot
+ .entries_by_path
+ .edit(entries_by_path_edits, &());
+ self.snapshot.entries_by_id.edit(entries_by_id_edits, &());
}
fn remove_path(&mut self, path: &Path) {
let mut new_entries;
let removed_entries;
{
- let mut cursor = self.entries_by_path.cursor::<TraversalProgress>();
+ let mut cursor = self.snapshot.entries_by_path.cursor::<TraversalProgress>();
new_entries = cursor.slice(&TraversalTarget::Path(path), Bias::Left, &());
removed_entries = cursor.slice(&TraversalTarget::PathSuccessor(path), Bias::Left, &());
new_entries.push_tree(cursor.suffix(&()), &());
}
- self.entries_by_path = new_entries;
+ self.snapshot.entries_by_path = new_entries;
let mut entries_by_id_edits = Vec::new();
for entry in removed_entries.cursor::<()>() {
@@ -2179,11 +2214,12 @@ impl LocalMutableSnapshot {
*removed_entry_id = cmp::max(*removed_entry_id, entry.id);
entries_by_id_edits.push(Edit::Remove(entry.id));
}
- self.entries_by_id.edit(entries_by_id_edits, &());
+ self.snapshot.entries_by_id.edit(entries_by_id_edits, &());
if path.file_name() == Some(&GITIGNORE) {
- let abs_parent_path = self.abs_path.join(path.parent().unwrap());
+ let abs_parent_path = self.snapshot.abs_path.join(path.parent().unwrap());
if let Some((_, needs_update)) = self
+ .snapshot
.ignores_by_parent_abs_path
.get_mut(abs_parent_path.as_path())
{
@@ -2473,12 +2509,31 @@ pub enum EntryKind {
#[derive(Clone, Copy, Debug)]
pub enum PathChange {
+ /// A filesystem entry was was created.
Added,
+ /// A filesystem entry was removed.
Removed,
+ /// A filesystem entry was updated.
Updated,
+ /// A filesystem entry was either updated or added. We don't know
+ /// whether or not it already existed, because the path had not
+ /// been loaded before the event.
AddedOrUpdated,
+ /// A filesystem entry was found during the initial scan of the worktree.
+ Loaded,
+}
+
+pub struct GitRepositoryChange {
+ /// The previous state of the repository, if it already existed.
+ pub old_repository: Option<RepositoryEntry>,
+ /// Whether the content of the .git directory changed. This will be false
+ /// if only the repository's work directory changed.
+ pub git_dir_changed: bool,
}
+pub type UpdatedEntriesSet = Arc<[(Arc<Path>, ProjectEntryId, PathChange)]>;
+pub type UpdatedGitRepositoriesSet = Arc<[(Arc<Path>, GitRepositoryChange)]>;
+
impl Entry {
fn new(
path: Arc<Path>,
@@ -2635,19 +2690,20 @@ impl<'a> sum_tree::Dimension<'a, EntrySummary> for PathKey {
}
struct BackgroundScanner {
- snapshot: Mutex<LocalMutableSnapshot>,
+ state: Mutex<BackgroundScannerState>,
fs: Arc<dyn Fs>,
status_updates_tx: UnboundedSender<ScanState>,
executor: Arc<executor::Background>,
refresh_requests_rx: channel::Receiver<(Vec<PathBuf>, barrier::Sender)>,
- prev_state: Mutex<BackgroundScannerState>,
next_entry_id: Arc<AtomicUsize>,
- finished_initial_scan: bool,
+ phase: BackgroundScannerPhase,
}
-struct BackgroundScannerState {
- snapshot: Snapshot,
- event_paths: Vec<Arc<Path>>,
+#[derive(PartialEq)]
+enum BackgroundScannerPhase {
+ InitialScan,
+ EventsReceivedDuringInitialScan,
+ Events,
}
impl BackgroundScanner {
@@ -2665,15 +2721,13 @@ impl BackgroundScanner {
executor,
refresh_requests_rx,
next_entry_id,
- prev_state: Mutex::new(BackgroundScannerState {
- snapshot: snapshot.snapshot.clone(),
- event_paths: Default::default(),
- }),
- snapshot: Mutex::new(LocalMutableSnapshot {
+ state: Mutex::new(BackgroundScannerState {
+ prev_snapshot: snapshot.snapshot.clone(),
snapshot,
removed_entry_ids: Default::default(),
+ changed_paths: Default::default(),
}),
- finished_initial_scan: false,
+ phase: BackgroundScannerPhase::InitialScan,
}
}
@@ -2684,7 +2738,7 @@ impl BackgroundScanner {
use futures::FutureExt as _;
let (root_abs_path, root_inode) = {
- let snapshot = self.snapshot.lock();
+ let snapshot = &self.state.lock().snapshot;
(
snapshot.abs_path.clone(),
snapshot.root_entry().map(|e| e.inode),
@@ -2696,20 +2750,23 @@ impl BackgroundScanner {
for ancestor in root_abs_path.ancestors().skip(1) {
if let Ok(ignore) = build_gitignore(&ancestor.join(&*GITIGNORE), self.fs.as_ref()).await
{
- self.snapshot
+ self.state
.lock()
+ .snapshot
.ignores_by_parent_abs_path
.insert(ancestor.into(), (ignore.into(), false));
}
}
{
- let mut snapshot = self.snapshot.lock();
- snapshot.scan_id += 1;
- ignore_stack = snapshot.ignore_stack_for_abs_path(&root_abs_path, true);
+ let mut state = self.state.lock();
+ state.snapshot.scan_id += 1;
+ ignore_stack = state
+ .snapshot
+ .ignore_stack_for_abs_path(&root_abs_path, true);
if ignore_stack.is_all() {
- if let Some(mut root_entry) = snapshot.root_entry().cloned() {
+ if let Some(mut root_entry) = state.snapshot.root_entry().cloned() {
root_entry.is_ignored = true;
- snapshot.insert_entry(root_entry, self.fs.as_ref());
+ state.insert_entry(root_entry, self.fs.as_ref());
}
}
};
@@ -2727,14 +2784,15 @@ impl BackgroundScanner {
drop(scan_job_tx);
self.scan_dirs(true, scan_job_rx).await;
{
- let mut snapshot = self.snapshot.lock();
- snapshot.completed_scan_id = snapshot.scan_id;
+ let mut state = self.state.lock();
+ state.snapshot.completed_scan_id = state.snapshot.scan_id;
}
self.send_status_update(false, None);
// Process any any FS events that occurred while performing the initial scan.
// For these events, update events cannot be as precise, because we didn't
// have the previous state loaded yet.
+ self.phase = BackgroundScannerPhase::EventsReceivedDuringInitialScan;
if let Poll::Ready(Some(events)) = futures::poll!(events_rx.next()) {
let mut paths = events.into_iter().map(|e| e.path).collect::<Vec<_>>();
while let Poll::Ready(Some(more_events)) = futures::poll!(events_rx.next()) {
@@ -2743,9 +2801,8 @@ impl BackgroundScanner {
self.process_events(paths).await;
}
- self.finished_initial_scan = true;
-
// Continue processing events until the worktree is dropped.
+ self.phase = BackgroundScannerPhase::Events;
loop {
select_biased! {
// Process any path refresh requests from the worktree. Prioritize
@@ -2770,15 +2827,7 @@ impl BackgroundScanner {
}
async fn process_refresh_request(&self, paths: Vec<PathBuf>, barrier: barrier::Sender) -> bool {
- if let Some(mut paths) = self.reload_entries_for_paths(paths, None).await {
- paths.sort_unstable();
- util::extend_sorted(
- &mut self.prev_state.lock().event_paths,
- paths,
- usize::MAX,
- Ord::cmp,
- );
- }
+ self.reload_entries_for_paths(paths, None).await;
self.send_status_update(false, Some(barrier))
}
@@ -2787,50 +2836,42 @@ impl BackgroundScanner {
let paths = self
.reload_entries_for_paths(paths, Some(scan_job_tx.clone()))
.await;
- if let Some(paths) = &paths {
- util::extend_sorted(
- &mut self.prev_state.lock().event_paths,
- paths.iter().cloned(),
- usize::MAX,
- Ord::cmp,
- );
- }
drop(scan_job_tx);
self.scan_dirs(false, scan_job_rx).await;
self.update_ignore_statuses().await;
- let mut snapshot = self.snapshot.lock();
+ {
+ let mut snapshot = &mut self.state.lock().snapshot;
- if let Some(paths) = paths {
- for path in paths {
- self.reload_repo_for_file_path(&path, &mut *snapshot, self.fs.as_ref());
+ if let Some(paths) = paths {
+ for path in paths {
+ self.reload_repo_for_file_path(&path, &mut *snapshot, self.fs.as_ref());
+ }
}
- }
- let mut git_repositories = mem::take(&mut snapshot.git_repositories);
- git_repositories.retain(|work_directory_id, _| {
- snapshot
- .entry_for_id(*work_directory_id)
- .map_or(false, |entry| {
- snapshot.entry_for_path(entry.path.join(*DOT_GIT)).is_some()
- })
- });
- snapshot.git_repositories = git_repositories;
+ let mut git_repositories = mem::take(&mut snapshot.git_repositories);
+ git_repositories.retain(|work_directory_id, _| {
+ snapshot
+ .entry_for_id(*work_directory_id)
+ .map_or(false, |entry| {
+ snapshot.entry_for_path(entry.path.join(*DOT_GIT)).is_some()
+ })
+ });
+ snapshot.git_repositories = git_repositories;
- let mut git_repository_entries = mem::take(&mut snapshot.snapshot.repository_entries);
- git_repository_entries.retain(|_, entry| {
- snapshot
- .git_repositories
- .get(&entry.work_directory.0)
- .is_some()
- });
- snapshot.snapshot.repository_entries = git_repository_entries;
- snapshot.completed_scan_id = snapshot.scan_id;
- drop(snapshot);
+ let mut git_repository_entries = mem::take(&mut snapshot.snapshot.repository_entries);
+ git_repository_entries.retain(|_, entry| {
+ snapshot
+ .git_repositories
+ .get(&entry.work_directory.0)
+ .is_some()
+ });
+ snapshot.snapshot.repository_entries = git_repository_entries;
+ snapshot.completed_scan_id = snapshot.scan_id;
+ }
self.send_status_update(false, None);
- self.prev_state.lock().event_paths.clear();
}
async fn scan_dirs(
@@ -284,7 +284,7 @@ mod tests {
symbol("uno", "/dir/test.rs"),
];
let fake_server = fake_servers.next().await.unwrap();
- fake_server.handle_request::<lsp::request::WorkspaceSymbol, _, _>(
+ fake_server.handle_request::<lsp::WorkspaceSymbolRequest, _, _>(
move |params: lsp::WorkspaceSymbolParams, cx| {
let executor = cx.background();
let fake_symbols = fake_symbols.clone();
@@ -308,12 +308,12 @@ mod tests {
.await
};
- Ok(Some(
+ Ok(Some(lsp::WorkspaceSymbolResponse::Flat(
matches
.into_iter()
.map(|mat| fake_symbols[mat.candidate_id].clone())
.collect(),
- ))
+ )))
}
},
);
@@ -44,11 +44,11 @@ struct ActiveSearches(HashMap<WeakModelHandle<Project>, WeakViewHandle<ProjectSe
pub fn init(cx: &mut AppContext) {
cx.set_global(ActiveSearches::default());
cx.add_action(ProjectSearchView::deploy);
+ cx.add_action(ProjectSearchView::move_focus_to_results);
cx.add_action(ProjectSearchBar::search);
cx.add_action(ProjectSearchBar::search_in_new);
cx.add_action(ProjectSearchBar::select_next_match);
cx.add_action(ProjectSearchBar::select_prev_match);
- cx.add_action(ProjectSearchBar::move_focus_to_results);
cx.capture_action(ProjectSearchBar::tab);
cx.capture_action(ProjectSearchBar::tab_previous);
add_toggle_option_action::<ToggleCaseSensitive>(SearchOption::CaseSensitive, cx);
@@ -708,6 +708,23 @@ impl ProjectSearchView {
pub fn has_matches(&self) -> bool {
self.active_match_index.is_some()
}
+
+ fn move_focus_to_results(pane: &mut Pane, _: &ToggleFocus, cx: &mut ViewContext<Pane>) {
+ if let Some(search_view) = pane
+ .active_item()
+ .and_then(|item| item.downcast::<ProjectSearchView>())
+ {
+ search_view.update(cx, |search_view, cx| {
+ if !search_view.results_editor.is_focused(cx)
+ && !search_view.model.read(cx).match_ranges.is_empty()
+ {
+ return search_view.focus_results_editor(cx);
+ }
+ });
+ }
+
+ cx.propagate_action();
+ }
}
impl Default for ProjectSearchBar {
@@ -785,23 +802,6 @@ impl ProjectSearchBar {
}
}
- fn move_focus_to_results(pane: &mut Pane, _: &ToggleFocus, cx: &mut ViewContext<Pane>) {
- if let Some(search_view) = pane
- .active_item()
- .and_then(|item| item.downcast::<ProjectSearchView>())
- {
- search_view.update(cx, |search_view, cx| {
- if search_view.query_editor.is_focused(cx)
- && !search_view.model.read(cx).match_ranges.is_empty()
- {
- search_view.focus_results_editor(cx);
- }
- });
- } else {
- cx.propagate_action();
- }
- }
-
fn tab(&mut self, _: &editor::Tab, cx: &mut ViewContext<Self>) {
self.cycle_field(Direction::Next, cx);
}
@@ -1248,7 +1248,182 @@ pub mod tests {
});
}
+ #[gpui::test]
+ async fn test_project_search_focus(deterministic: Arc<Deterministic>, cx: &mut TestAppContext) {
+ init_test(cx);
+
+ let fs = FakeFs::new(cx.background());
+ fs.insert_tree(
+ "/dir",
+ json!({
+ "one.rs": "const ONE: usize = 1;",
+ "two.rs": "const TWO: usize = one::ONE + one::ONE;",
+ "three.rs": "const THREE: usize = one::ONE + two::TWO;",
+ "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
+ }),
+ )
+ .await;
+ let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
+ let (window_id, workspace) = cx.add_window(|cx| Workspace::test_new(project, cx));
+
+ let active_item = cx.read(|cx| {
+ workspace
+ .read(cx)
+ .active_pane()
+ .read(cx)
+ .active_item()
+ .and_then(|item| item.downcast::<ProjectSearchView>())
+ });
+ assert!(
+ active_item.is_none(),
+ "Expected no search panel to be active, but got: {active_item:?}"
+ );
+
+ workspace.update(cx, |workspace, cx| {
+ ProjectSearchView::deploy(workspace, &workspace::NewSearch, cx)
+ });
+
+ let Some(search_view) = cx.read(|cx| {
+ workspace
+ .read(cx)
+ .active_pane()
+ .read(cx)
+ .active_item()
+ .and_then(|item| item.downcast::<ProjectSearchView>())
+ }) else {
+ panic!("Search view expected to appear after new search event trigger")
+ };
+ let search_view_id = search_view.id();
+
+ cx.spawn(
+ |mut cx| async move { cx.dispatch_action(window_id, search_view_id, &ToggleFocus) },
+ )
+ .detach();
+ deterministic.run_until_parked();
+ search_view.update(cx, |search_view, cx| {
+ assert!(
+ search_view.query_editor.is_focused(cx),
+ "Empty search view should be focused after the toggle focus event: no results panel to focus on",
+ );
+ });
+
+ search_view.update(cx, |search_view, cx| {
+ let query_editor = &search_view.query_editor;
+ assert!(
+ query_editor.is_focused(cx),
+ "Search view should be focused after the new search view is activated",
+ );
+ let query_text = query_editor.read(cx).text(cx);
+ assert!(
+ query_text.is_empty(),
+ "New search query should be empty but got '{query_text}'",
+ );
+ let results_text = search_view
+ .results_editor
+ .update(cx, |editor, cx| editor.display_text(cx));
+ assert!(
+ results_text.is_empty(),
+ "Empty search view should have no results but got '{results_text}'"
+ );
+ });
+
+ search_view.update(cx, |search_view, cx| {
+ search_view.query_editor.update(cx, |query_editor, cx| {
+ query_editor.set_text("sOMETHINGtHATsURELYdOESnOTeXIST", cx)
+ });
+ search_view.search(cx);
+ });
+ deterministic.run_until_parked();
+ search_view.update(cx, |search_view, cx| {
+ let results_text = search_view
+ .results_editor
+ .update(cx, |editor, cx| editor.display_text(cx));
+ assert!(
+ results_text.is_empty(),
+ "Search view for mismatching query should have no results but got '{results_text}'"
+ );
+ assert!(
+ search_view.query_editor.is_focused(cx),
+ "Search view should be focused after mismatching query had been used in search",
+ );
+ });
+ cx.spawn(
+ |mut cx| async move { cx.dispatch_action(window_id, search_view_id, &ToggleFocus) },
+ )
+ .detach();
+ deterministic.run_until_parked();
+ search_view.update(cx, |search_view, cx| {
+ assert!(
+ search_view.query_editor.is_focused(cx),
+ "Search view with mismatching query should be focused after the toggle focus event: still no results panel to focus on",
+ );
+ });
+
+ search_view.update(cx, |search_view, cx| {
+ search_view
+ .query_editor
+ .update(cx, |query_editor, cx| query_editor.set_text("TWO", cx));
+ search_view.search(cx);
+ });
+ deterministic.run_until_parked();
+ search_view.update(cx, |search_view, cx| {
+ assert_eq!(
+ search_view
+ .results_editor
+ .update(cx, |editor, cx| editor.display_text(cx)),
+ "\n\nconst THREE: usize = one::ONE + two::TWO;\n\n\nconst TWO: usize = one::ONE + one::ONE;",
+ "Search view results should match the query"
+ );
+ assert!(
+ search_view.results_editor.is_focused(cx),
+ "Search view with mismatching query should be focused after search results are available",
+ );
+ });
+ cx.spawn(
+ |mut cx| async move { cx.dispatch_action(window_id, search_view_id, &ToggleFocus) },
+ )
+ .detach();
+ deterministic.run_until_parked();
+ search_view.update(cx, |search_view, cx| {
+ assert!(
+ search_view.results_editor.is_focused(cx),
+ "Search view with matching query should still have its results editor focused after the toggle focus event",
+ );
+ });
+
+ workspace.update(cx, |workspace, cx| {
+ ProjectSearchView::deploy(workspace, &workspace::NewSearch, cx)
+ });
+ search_view.update(cx, |search_view, cx| {
+ assert_eq!(search_view.query_editor.read(cx).text(cx), "two", "Query should be updated to first search result after search view 2nd open in a row");
+ assert_eq!(
+ search_view
+ .results_editor
+ .update(cx, |editor, cx| editor.display_text(cx)),
+ "\n\nconst THREE: usize = one::ONE + two::TWO;\n\n\nconst TWO: usize = one::ONE + one::ONE;",
+ "Results should be unchanged after search view 2nd open in a row"
+ );
+ assert!(
+ search_view.query_editor.is_focused(cx),
+ "Focus should be moved into query editor again after search view 2nd open in a row"
+ );
+ });
+
+ cx.spawn(
+ |mut cx| async move { cx.dispatch_action(window_id, search_view_id, &ToggleFocus) },
+ )
+ .detach();
+ deterministic.run_until_parked();
+ search_view.update(cx, |search_view, cx| {
+ assert!(
+ search_view.results_editor.is_focused(cx),
+ "Search view with matching query should switch focus to the results editor after the toggle focus event",
+ );
+ });
+ }
+
pub fn init_test(cx: &mut TestAppContext) {
+ cx.foreground().forbid_parking();
let fonts = cx.font_cache();
let mut theme = gpui::fonts::with_font_cache(fonts.clone(), theme::Theme::default);
theme.search.match_background = Color::red();
@@ -1266,9 +1441,10 @@ pub mod tests {
language::init(cx);
client::init_settings(cx);
- editor::init_settings(cx);
+ editor::init(cx);
workspace::init_settings(cx);
Project::init_settings(cx);
+ super::init(cx);
});
}
}
@@ -50,6 +50,7 @@ impl TerminalPanel {
let window_id = cx.window_id();
let mut pane = Pane::new(
workspace.weak_handle(),
+ workspace.project().clone(),
workspace.app_state().background_actions,
Default::default(),
cx,
@@ -176,8 +177,9 @@ impl TerminalPanel {
(panel, pane, items)
})?;
+ let pane = pane.downgrade();
let items = futures::future::join_all(items).await;
- workspace.update(&mut cx, |workspace, cx| {
+ pane.update(&mut cx, |pane, cx| {
let active_item_id = serialized_panel
.as_ref()
.and_then(|panel| panel.active_item_id);
@@ -185,17 +187,15 @@ impl TerminalPanel {
for item in items {
if let Some(item) = item.log_err() {
let item_id = item.id();
- Pane::add_item(workspace, &pane, Box::new(item), false, false, None, cx);
+ pane.add_item(Box::new(item), false, false, None, cx);
if Some(item_id) == active_item_id {
- active_ix = Some(pane.read(cx).items_len() - 1);
+ active_ix = Some(pane.items_len() - 1);
}
}
}
if let Some(active_ix) = active_ix {
- pane.update(cx, |pane, cx| {
- pane.activate_item(active_ix, false, false, cx)
- });
+ pane.activate_item(active_ix, false, false, cx)
}
})?;
@@ -240,8 +240,10 @@ impl TerminalPanel {
Box::new(cx.add_view(|cx| {
TerminalView::new(terminal, workspace.database_id(), cx)
}));
- let focus = pane.read(cx).has_focus();
- Pane::add_item(workspace, &pane, terminal, true, focus, None, cx);
+ pane.update(cx, |pane, cx| {
+ let focus = pane.has_focus();
+ pane.add_item(terminal, true, focus, None, cx);
+ });
}
})?;
this.update(&mut cx, |this, cx| this.serialize(cx))?;
@@ -5,7 +5,7 @@ use crate::{
item::WeakItemHandle, toolbar::Toolbar, AutosaveSetting, Item, NewFile, NewSearch, NewTerminal,
ToggleZoom, Workspace, WorkspaceSettings,
};
-use anyhow::{anyhow, Result};
+use anyhow::Result;
use collections::{HashMap, HashSet, VecDeque};
use context_menu::{ContextMenu, ContextMenuItem};
use drag_and_drop::{DragAndDrop, Draggable};
@@ -31,7 +31,7 @@ use std::{
any::Any,
cell::RefCell,
cmp, mem,
- path::Path,
+ path::{Path, PathBuf},
rc::Rc,
sync::{
atomic::{AtomicUsize, Ordering},
@@ -39,7 +39,6 @@ use std::{
},
};
use theme::{Theme, ThemeSettings};
-use util::ResultExt;
#[derive(Clone, Deserialize, PartialEq)]
pub struct ActivateItem(pub usize);
@@ -74,6 +73,8 @@ actions!(
CloseItemsToTheLeft,
CloseItemsToTheRight,
CloseAllItems,
+ GoBack,
+ GoForward,
ReopenClosedItem,
SplitLeft,
SplitUp,
@@ -82,19 +83,7 @@ actions!(
]
);
-#[derive(Clone, Deserialize, PartialEq)]
-pub struct GoBack {
- #[serde(skip_deserializing)]
- pub pane: Option<WeakViewHandle<Pane>>,
-}
-
-#[derive(Clone, Deserialize, PartialEq)]
-pub struct GoForward {
- #[serde(skip_deserializing)]
- pub pane: Option<WeakViewHandle<Pane>>,
-}
-
-impl_actions!(pane, [GoBack, GoForward, ActivateItem]);
+impl_actions!(pane, [ActivateItem]);
const MAX_NAVIGATION_HISTORY_LEN: usize = 1024;
@@ -124,19 +113,11 @@ pub fn init(cx: &mut AppContext) {
cx.add_action(|pane: &mut Pane, _: &SplitUp, cx| pane.split(SplitDirection::Up, cx));
cx.add_action(|pane: &mut Pane, _: &SplitRight, cx| pane.split(SplitDirection::Right, cx));
cx.add_action(|pane: &mut Pane, _: &SplitDown, cx| pane.split(SplitDirection::Down, cx));
- cx.add_action(|workspace: &mut Workspace, _: &ReopenClosedItem, cx| {
- Pane::reopen_closed_item(workspace, cx).detach();
- });
- cx.add_action(|workspace: &mut Workspace, action: &GoBack, cx| {
- Pane::go_back(workspace, action.pane.clone(), cx).detach();
- });
- cx.add_action(|workspace: &mut Workspace, action: &GoForward, cx| {
- Pane::go_forward(workspace, action.pane.clone(), cx).detach();
- });
}
#[derive(Debug)]
pub enum Event {
+ AddItem { item: Box<dyn ItemHandle> },
ActivateItem { local: bool },
Remove,
RemoveItem { item_id: usize },
@@ -155,38 +136,39 @@ pub struct Pane {
active_item_index: usize,
last_focused_view_by_item: HashMap<usize, AnyWeakViewHandle>,
autoscroll: bool,
- nav_history: Rc<RefCell<NavHistory>>,
+ nav_history: NavHistory,
toolbar: ViewHandle<Toolbar>,
tab_bar_context_menu: TabBarContextMenu,
tab_context_menu: ViewHandle<ContextMenu>,
_background_actions: BackgroundActions,
workspace: WeakViewHandle<Workspace>,
+ project: ModelHandle<Project>,
has_focus: bool,
can_drop: Rc<dyn Fn(&DragAndDrop<Workspace>, &WindowContext) -> bool>,
can_split: bool,
- can_navigate: bool,
render_tab_bar_buttons: Rc<dyn Fn(&mut Pane, &mut ViewContext<Pane>) -> AnyElement<Pane>>,
}
pub struct ItemNavHistory {
- history: Rc<RefCell<NavHistory>>,
+ history: NavHistory,
item: Rc<dyn WeakItemHandle>,
}
-pub struct PaneNavHistory(Rc<RefCell<NavHistory>>);
+#[derive(Clone)]
+pub struct NavHistory(Rc<RefCell<NavHistoryState>>);
-struct NavHistory {
+struct NavHistoryState {
mode: NavigationMode,
backward_stack: VecDeque<NavigationEntry>,
forward_stack: VecDeque<NavigationEntry>,
closed_stack: VecDeque<NavigationEntry>,
- paths_by_item: HashMap<usize, ProjectPath>,
+ paths_by_item: HashMap<usize, (ProjectPath, Option<PathBuf>)>,
pane: WeakViewHandle<Pane>,
next_timestamp: Arc<AtomicUsize>,
}
#[derive(Copy, Clone)]
-enum NavigationMode {
+pub enum NavigationMode {
Normal,
GoingBack,
GoingForward,
@@ -241,6 +223,7 @@ impl TabBarContextMenu {
impl Pane {
pub fn new(
workspace: WeakViewHandle<Workspace>,
+ project: ModelHandle<Project>,
background_actions: BackgroundActions,
next_timestamp: Arc<AtomicUsize>,
cx: &mut ViewContext<Self>,
@@ -260,7 +243,7 @@ impl Pane {
active_item_index: 0,
last_focused_view_by_item: Default::default(),
autoscroll: false,
- nav_history: Rc::new(RefCell::new(NavHistory {
+ nav_history: NavHistory(Rc::new(RefCell::new(NavHistoryState {
mode: NavigationMode::Normal,
backward_stack: Default::default(),
forward_stack: Default::default(),
@@ -268,7 +251,7 @@ impl Pane {
paths_by_item: Default::default(),
pane: handle.clone(),
next_timestamp,
- })),
+ }))),
toolbar: cx.add_view(|_| Toolbar::new(handle)),
tab_bar_context_menu: TabBarContextMenu {
kind: TabBarContextMenuKind::New,
@@ -277,10 +260,10 @@ impl Pane {
tab_context_menu: cx.add_view(|cx| ContextMenu::new(pane_view_id, cx)),
_background_actions: background_actions,
workspace,
+ project,
has_focus: false,
can_drop: Rc::new(|_, _| true),
can_split: true,
- can_navigate: true,
render_tab_bar_buttons: Rc::new(|pane, cx| {
Flex::row()
// New menu
@@ -336,6 +319,10 @@ impl Pane {
self.has_focus
}
+ pub fn active_item_index(&self) -> usize {
+ self.active_item_index
+ }
+
pub fn on_can_drop<F>(&mut self, can_drop: F)
where
F: 'static + Fn(&DragAndDrop<Workspace>, &WindowContext) -> bool,
@@ -349,7 +336,6 @@ impl Pane {
}
pub fn set_can_navigate(&mut self, can_navigate: bool, cx: &mut ViewContext<Self>) {
- self.can_navigate = can_navigate;
self.toolbar.update(cx, |toolbar, cx| {
toolbar.set_can_navigate(can_navigate, cx);
});
@@ -371,241 +357,94 @@ impl Pane {
}
}
- pub fn nav_history(&self) -> PaneNavHistory {
- PaneNavHistory(self.nav_history.clone())
+ pub fn nav_history(&self) -> &NavHistory {
+ &self.nav_history
}
- pub fn go_back(
- workspace: &mut Workspace,
- pane: Option<WeakViewHandle<Pane>>,
- cx: &mut ViewContext<Workspace>,
- ) -> Task<Result<()>> {
- Self::navigate_history(
- workspace,
- pane.unwrap_or_else(|| workspace.active_pane().downgrade()),
- NavigationMode::GoingBack,
- cx,
- )
- }
-
- pub fn go_forward(
- workspace: &mut Workspace,
- pane: Option<WeakViewHandle<Pane>>,
- cx: &mut ViewContext<Workspace>,
- ) -> Task<Result<()>> {
- Self::navigate_history(
- workspace,
- pane.unwrap_or_else(|| workspace.active_pane().downgrade()),
- NavigationMode::GoingForward,
- cx,
- )
- }
-
- pub fn reopen_closed_item(
- workspace: &mut Workspace,
- cx: &mut ViewContext<Workspace>,
- ) -> Task<Result<()>> {
- Self::navigate_history(
- workspace,
- workspace.active_pane().downgrade(),
- NavigationMode::ReopeningClosedItem,
- cx,
- )
+ pub fn nav_history_mut(&mut self) -> &mut NavHistory {
+ &mut self.nav_history
}
pub fn disable_history(&mut self) {
- self.nav_history.borrow_mut().disable();
+ self.nav_history.disable();
}
pub fn enable_history(&mut self) {
- self.nav_history.borrow_mut().enable();
+ self.nav_history.enable();
}
pub fn can_navigate_backward(&self) -> bool {
- !self.nav_history.borrow().backward_stack.is_empty()
+ !self.nav_history.0.borrow().backward_stack.is_empty()
}
pub fn can_navigate_forward(&self) -> bool {
- !self.nav_history.borrow().forward_stack.is_empty()
+ !self.nav_history.0.borrow().forward_stack.is_empty()
}
fn history_updated(&mut self, cx: &mut ViewContext<Self>) {
self.toolbar.update(cx, |_, cx| cx.notify());
}
- fn navigate_history(
- workspace: &mut Workspace,
- pane: WeakViewHandle<Pane>,
- mode: NavigationMode,
- cx: &mut ViewContext<Workspace>,
- ) -> Task<Result<()>> {
- let to_load = if let Some(pane) = pane.upgrade(cx) {
- if !pane.read(cx).can_navigate {
- return Task::ready(Ok(()));
- }
-
- cx.focus(&pane);
-
- pane.update(cx, |pane, cx| {
- loop {
- // Retrieve the weak item handle from the history.
- let entry = pane.nav_history.borrow_mut().pop(mode, cx)?;
-
- // If the item is still present in this pane, then activate it.
- if let Some(index) = entry
- .item
- .upgrade(cx)
- .and_then(|v| pane.index_for_item(v.as_ref()))
- {
- let prev_active_item_index = pane.active_item_index;
- pane.nav_history.borrow_mut().set_mode(mode);
- pane.activate_item(index, true, true, cx);
- pane.nav_history
- .borrow_mut()
- .set_mode(NavigationMode::Normal);
-
- let mut navigated = prev_active_item_index != pane.active_item_index;
- if let Some(data) = entry.data {
- navigated |= pane.active_item()?.navigate(data, cx);
- }
-
- if navigated {
- break None;
- }
- }
- // If the item is no longer present in this pane, then retrieve its
- // project path in order to reopen it.
- else {
- break pane
- .nav_history
- .borrow()
- .paths_by_item
- .get(&entry.item.id())
- .cloned()
- .map(|project_path| (project_path, entry));
- }
- }
- })
- } else {
- None
- };
-
- if let Some((project_path, entry)) = to_load {
- // If the item was no longer present, then load it again from its previous path.
- let task = workspace.load_path(project_path, cx);
- cx.spawn(|workspace, mut cx| async move {
- let task = task.await;
- let mut navigated = false;
- if let Some((project_entry_id, build_item)) = task.log_err() {
- let prev_active_item_id = pane.update(&mut cx, |pane, _| {
- pane.nav_history.borrow_mut().set_mode(mode);
- pane.active_item().map(|p| p.id())
- })?;
-
- let item = workspace.update(&mut cx, |workspace, cx| {
- let pane = pane
- .upgrade(cx)
- .ok_or_else(|| anyhow!("pane was dropped"))?;
- anyhow::Ok(Self::open_item(
- workspace,
- pane.clone(),
- project_entry_id,
- true,
- cx,
- build_item,
- ))
- })??;
-
- pane.update(&mut cx, |pane, cx| {
- navigated |= Some(item.id()) != prev_active_item_id;
- pane.nav_history
- .borrow_mut()
- .set_mode(NavigationMode::Normal);
- if let Some(data) = entry.data {
- navigated |= item.navigate(data, cx);
- }
- })?;
- }
-
- if !navigated {
- workspace
- .update(&mut cx, |workspace, cx| {
- Self::navigate_history(workspace, pane, mode, cx)
- })?
- .await?;
- }
-
- Ok(())
- })
- } else {
- Task::ready(Ok(()))
- }
- }
-
pub(crate) fn open_item(
- workspace: &mut Workspace,
- pane: ViewHandle<Pane>,
+ &mut self,
project_entry_id: ProjectEntryId,
focus_item: bool,
- cx: &mut ViewContext<Workspace>,
+ cx: &mut ViewContext<Self>,
build_item: impl FnOnce(&mut ViewContext<Pane>) -> Box<dyn ItemHandle>,
) -> Box<dyn ItemHandle> {
- let existing_item = pane.update(cx, |pane, cx| {
- for (index, item) in pane.items.iter().enumerate() {
- if item.is_singleton(cx)
- && item.project_entry_ids(cx).as_slice() == [project_entry_id]
- {
- let item = item.boxed_clone();
- return Some((index, item));
- }
+ let mut existing_item = None;
+ for (index, item) in self.items.iter().enumerate() {
+ if item.is_singleton(cx) && item.project_entry_ids(cx).as_slice() == [project_entry_id]
+ {
+ let item = item.boxed_clone();
+ existing_item = Some((index, item));
+ break;
}
- None
- });
+ }
if let Some((index, existing_item)) = existing_item {
- pane.update(cx, |pane, cx| {
- pane.activate_item(index, focus_item, focus_item, cx);
- });
+ self.activate_item(index, focus_item, focus_item, cx);
existing_item
} else {
- let new_item = pane.update(cx, |_, cx| build_item(cx));
- Pane::add_item(
- workspace,
- &pane,
- new_item.clone(),
- true,
- focus_item,
- None,
- cx,
- );
+ let new_item = build_item(cx);
+ self.add_item(new_item.clone(), true, focus_item, None, cx);
new_item
}
}
pub fn add_item(
- workspace: &mut Workspace,
- pane: &ViewHandle<Pane>,
+ &mut self,
item: Box<dyn ItemHandle>,
activate_pane: bool,
focus_item: bool,
destination_index: Option<usize>,
- cx: &mut ViewContext<Workspace>,
+ cx: &mut ViewContext<Self>,
) {
+ if item.is_singleton(cx) {
+ if let Some(&entry_id) = item.project_entry_ids(cx).get(0) {
+ let project = self.project.read(cx);
+ if let Some(project_path) = project.path_for_entry(entry_id, cx) {
+ let abs_path = project.absolute_path(&project_path, cx);
+ self.nav_history
+ .0
+ .borrow_mut()
+ .paths_by_item
+ .insert(item.id(), (project_path, abs_path));
+ }
+ }
+ }
// If no destination index is specified, add or move the item after the active item.
let mut insertion_index = {
- let pane = pane.read(cx);
cmp::min(
if let Some(destination_index) = destination_index {
destination_index
} else {
- pane.active_item_index + 1
+ self.active_item_index + 1
},
- pane.items.len(),
+ self.items.len(),
)
};
- item.added_to_pane(workspace, pane.clone(), cx);
-
// Does the item already exist?
let project_entry_id = if item.is_singleton(cx) {
item.project_entry_ids(cx).get(0).copied()
@@ -613,7 +452,7 @@ impl Pane {
None
};
- let existing_item_index = pane.read(cx).items.iter().position(|existing_item| {
+ let existing_item_index = self.items.iter().position(|existing_item| {
if existing_item.id() == item.id() {
true
} else if existing_item.is_singleton(cx) {
@@ -630,46 +469,45 @@ impl Pane {
if let Some(existing_item_index) = existing_item_index {
// If the item already exists, move it to the desired destination and activate it
- pane.update(cx, |pane, cx| {
- if existing_item_index != insertion_index {
- let existing_item_is_active = existing_item_index == pane.active_item_index;
-
- // If the caller didn't specify a destination and the added item is already
- // the active one, don't move it
- if existing_item_is_active && destination_index.is_none() {
- insertion_index = existing_item_index;
- } else {
- pane.items.remove(existing_item_index);
- if existing_item_index < pane.active_item_index {
- pane.active_item_index -= 1;
- }
- insertion_index = insertion_index.min(pane.items.len());
- pane.items.insert(insertion_index, item.clone());
+ if existing_item_index != insertion_index {
+ let existing_item_is_active = existing_item_index == self.active_item_index;
- if existing_item_is_active {
- pane.active_item_index = insertion_index;
- } else if insertion_index <= pane.active_item_index {
- pane.active_item_index += 1;
- }
+ // If the caller didn't specify a destination and the added item is already
+ // the active one, don't move it
+ if existing_item_is_active && destination_index.is_none() {
+ insertion_index = existing_item_index;
+ } else {
+ self.items.remove(existing_item_index);
+ if existing_item_index < self.active_item_index {
+ self.active_item_index -= 1;
}
+ insertion_index = insertion_index.min(self.items.len());
- cx.notify();
- }
+ self.items.insert(insertion_index, item.clone());
- pane.activate_item(insertion_index, activate_pane, focus_item, cx);
- });
- } else {
- pane.update(cx, |pane, cx| {
- pane.items.insert(insertion_index, item);
- if insertion_index <= pane.active_item_index {
- pane.active_item_index += 1;
+ if existing_item_is_active {
+ self.active_item_index = insertion_index;
+ } else if insertion_index <= self.active_item_index {
+ self.active_item_index += 1;
+ }
}
- pane.activate_item(insertion_index, activate_pane, focus_item, cx);
cx.notify();
- });
+ }
+
+ self.activate_item(insertion_index, activate_pane, focus_item, cx);
+ } else {
+ self.items.insert(insertion_index, item.clone());
+ if insertion_index <= self.active_item_index {
+ self.active_item_index += 1;
+ }
+
+ self.activate_item(insertion_index, activate_pane, focus_item, cx);
+ cx.notify();
}
+
+ cx.emit(Event::AddItem { item });
}
pub fn items_len(&self) -> usize {
@@ -731,7 +569,7 @@ impl Pane {
if index < self.items.len() {
let prev_active_item_ix = mem::replace(&mut self.active_item_index, index);
if prev_active_item_ix != self.active_item_index
- || matches!(self.nav_history.borrow().mode, GoingBack | GoingForward)
+ || matches!(self.nav_history.mode(), GoingBack | GoingForward)
{
if let Some(prev_item) = self.items.get(prev_active_item_ix) {
prev_item.deactivated(cx);
@@ -960,7 +798,12 @@ impl Pane {
})
}
- fn remove_item(&mut self, item_index: usize, activate_pane: bool, cx: &mut ViewContext<Self>) {
+ pub fn remove_item(
+ &mut self,
+ item_index: usize,
+ activate_pane: bool,
+ cx: &mut ViewContext<Self>,
+ ) {
self.activation_history
.retain(|&history_entry| history_entry != self.items[item_index].id());
@@ -994,21 +837,26 @@ impl Pane {
self.active_item_index -= 1;
}
- self.nav_history
- .borrow_mut()
- .set_mode(NavigationMode::ClosingItem);
+ self.nav_history.set_mode(NavigationMode::ClosingItem);
item.deactivated(cx);
- self.nav_history
- .borrow_mut()
- .set_mode(NavigationMode::Normal);
+ self.nav_history.set_mode(NavigationMode::Normal);
if let Some(path) = item.project_path(cx) {
+ let abs_path = self
+ .nav_history
+ .0
+ .borrow()
+ .paths_by_item
+ .get(&item.id())
+ .and_then(|(_, abs_path)| abs_path.clone());
self.nav_history
+ .0
.borrow_mut()
.paths_by_item
- .insert(item.id(), path);
+ .insert(item.id(), (path, abs_path));
} else {
self.nav_history
+ .0
.borrow_mut()
.paths_by_item
.remove(&item.id());
@@ -1128,48 +976,6 @@ impl Pane {
}
}
- pub fn move_item(
- workspace: &mut Workspace,
- from: ViewHandle<Pane>,
- to: ViewHandle<Pane>,
- item_id_to_move: usize,
- destination_index: usize,
- cx: &mut ViewContext<Workspace>,
- ) {
- let item_to_move = from
- .read(cx)
- .items()
- .enumerate()
- .find(|(_, item_handle)| item_handle.id() == item_id_to_move);
-
- if item_to_move.is_none() {
- log::warn!("Tried to move item handle which was not in `from` pane. Maybe tab was closed during drop");
- return;
- }
- let (item_ix, item_handle) = item_to_move.unwrap();
- let item_handle = item_handle.clone();
-
- if from != to {
- // Close item from previous pane
- from.update(cx, |from, cx| {
- from.remove_item(item_ix, false, cx);
- });
- }
-
- // This automatically removes duplicate items in the pane
- Pane::add_item(
- workspace,
- &to,
- item_handle,
- true,
- true,
- Some(destination_index),
- cx,
- );
-
- cx.focus(&to);
- }
-
pub fn split(&mut self, direction: SplitDirection, cx: &mut ViewContext<Self>) {
cx.emit(Event::Split(direction));
}
@@ -1298,7 +1104,7 @@ impl Pane {
})?;
self.remove_item(item_index_to_delete, false, cx);
- self.nav_history.borrow_mut().remove_item(item_id);
+ self.nav_history.remove_item(item_id);
Some(())
}
@@ -1768,7 +1574,7 @@ impl View for Pane {
let pane = cx.weak_handle();
cx.window_context().defer(move |cx| {
workspace.update(cx, |workspace, cx| {
- Pane::go_back(workspace, Some(pane), cx).detach_and_log_err(cx)
+ workspace.go_back(pane, cx).detach_and_log_err(cx)
})
})
}
@@ -1780,7 +1586,7 @@ impl View for Pane {
let pane = cx.weak_handle();
cx.window_context().defer(move |cx| {
workspace.update(cx, |workspace, cx| {
- Pane::go_forward(workspace, Some(pane), cx).detach_and_log_err(cx)
+ workspace.go_forward(pane, cx).detach_and_log_err(cx)
})
})
}
@@ -1835,143 +1641,157 @@ impl View for Pane {
}
impl ItemNavHistory {
- pub fn push<D: 'static + Any>(&self, data: Option<D>, cx: &mut WindowContext) {
- self.history.borrow_mut().push(data, self.item.clone(), cx);
+ pub fn push<D: 'static + Any>(&mut self, data: Option<D>, cx: &mut WindowContext) {
+ self.history.push(data, self.item.clone(), cx);
}
- pub fn pop_backward(&self, cx: &mut WindowContext) -> Option<NavigationEntry> {
- self.history.borrow_mut().pop(NavigationMode::GoingBack, cx)
+ pub fn pop_backward(&mut self, cx: &mut WindowContext) -> Option<NavigationEntry> {
+ self.history.pop(NavigationMode::GoingBack, cx)
}
- pub fn pop_forward(&self, cx: &mut WindowContext) -> Option<NavigationEntry> {
- self.history
- .borrow_mut()
- .pop(NavigationMode::GoingForward, cx)
+ pub fn pop_forward(&mut self, cx: &mut WindowContext) -> Option<NavigationEntry> {
+ self.history.pop(NavigationMode::GoingForward, cx)
}
}
impl NavHistory {
- fn set_mode(&mut self, mode: NavigationMode) {
- self.mode = mode;
+ pub fn for_each_entry(
+ &self,
+ cx: &AppContext,
+ mut f: impl FnMut(&NavigationEntry, (ProjectPath, Option<PathBuf>)),
+ ) {
+ let borrowed_history = self.0.borrow();
+ borrowed_history
+ .forward_stack
+ .iter()
+ .chain(borrowed_history.backward_stack.iter())
+ .chain(borrowed_history.closed_stack.iter())
+ .for_each(|entry| {
+ if let Some(project_and_abs_path) =
+ borrowed_history.paths_by_item.get(&entry.item.id())
+ {
+ f(entry, project_and_abs_path.clone());
+ } else if let Some(item) = entry.item.upgrade(cx) {
+ if let Some(path) = item.project_path(cx) {
+ f(entry, (path, None));
+ }
+ }
+ })
}
- fn disable(&mut self) {
- self.mode = NavigationMode::Disabled;
+ pub fn set_mode(&mut self, mode: NavigationMode) {
+ self.0.borrow_mut().mode = mode;
}
- fn enable(&mut self) {
- self.mode = NavigationMode::Normal;
+ pub fn mode(&self) -> NavigationMode {
+ self.0.borrow().mode
}
- fn pop(&mut self, mode: NavigationMode, cx: &mut WindowContext) -> Option<NavigationEntry> {
+ pub fn disable(&mut self) {
+ self.0.borrow_mut().mode = NavigationMode::Disabled;
+ }
+
+ pub fn enable(&mut self) {
+ self.0.borrow_mut().mode = NavigationMode::Normal;
+ }
+
+ pub fn pop(&mut self, mode: NavigationMode, cx: &mut WindowContext) -> Option<NavigationEntry> {
+ let mut state = self.0.borrow_mut();
let entry = match mode {
NavigationMode::Normal | NavigationMode::Disabled | NavigationMode::ClosingItem => {
return None
}
- NavigationMode::GoingBack => &mut self.backward_stack,
- NavigationMode::GoingForward => &mut self.forward_stack,
- NavigationMode::ReopeningClosedItem => &mut self.closed_stack,
+ NavigationMode::GoingBack => &mut state.backward_stack,
+ NavigationMode::GoingForward => &mut state.forward_stack,
+ NavigationMode::ReopeningClosedItem => &mut state.closed_stack,
}
.pop_back();
if entry.is_some() {
- self.did_update(cx);
+ state.did_update(cx);
}
entry
}
- fn push<D: 'static + Any>(
+ pub fn push<D: 'static + Any>(
&mut self,
data: Option<D>,
item: Rc<dyn WeakItemHandle>,
cx: &mut WindowContext,
) {
- match self.mode {
+ let state = &mut *self.0.borrow_mut();
+ match state.mode {
NavigationMode::Disabled => {}
NavigationMode::Normal | NavigationMode::ReopeningClosedItem => {
- if self.backward_stack.len() >= MAX_NAVIGATION_HISTORY_LEN {
- self.backward_stack.pop_front();
+ if state.backward_stack.len() >= MAX_NAVIGATION_HISTORY_LEN {
+ state.backward_stack.pop_front();
}
- self.backward_stack.push_back(NavigationEntry {
+ state.backward_stack.push_back(NavigationEntry {
item,
data: data.map(|data| Box::new(data) as Box<dyn Any>),
- timestamp: self.next_timestamp.fetch_add(1, Ordering::SeqCst),
+ timestamp: state.next_timestamp.fetch_add(1, Ordering::SeqCst),
});
- self.forward_stack.clear();
+ state.forward_stack.clear();
}
NavigationMode::GoingBack => {
- if self.forward_stack.len() >= MAX_NAVIGATION_HISTORY_LEN {
- self.forward_stack.pop_front();
+ if state.forward_stack.len() >= MAX_NAVIGATION_HISTORY_LEN {
+ state.forward_stack.pop_front();
}
- self.forward_stack.push_back(NavigationEntry {
+ state.forward_stack.push_back(NavigationEntry {
item,
data: data.map(|data| Box::new(data) as Box<dyn Any>),
- timestamp: self.next_timestamp.fetch_add(1, Ordering::SeqCst),
+ timestamp: state.next_timestamp.fetch_add(1, Ordering::SeqCst),
});
}
NavigationMode::GoingForward => {
- if self.backward_stack.len() >= MAX_NAVIGATION_HISTORY_LEN {
- self.backward_stack.pop_front();
+ if state.backward_stack.len() >= MAX_NAVIGATION_HISTORY_LEN {
+ state.backward_stack.pop_front();
}
- self.backward_stack.push_back(NavigationEntry {
+ state.backward_stack.push_back(NavigationEntry {
item,
data: data.map(|data| Box::new(data) as Box<dyn Any>),
- timestamp: self.next_timestamp.fetch_add(1, Ordering::SeqCst),
+ timestamp: state.next_timestamp.fetch_add(1, Ordering::SeqCst),
});
}
NavigationMode::ClosingItem => {
- if self.closed_stack.len() >= MAX_NAVIGATION_HISTORY_LEN {
- self.closed_stack.pop_front();
+ if state.closed_stack.len() >= MAX_NAVIGATION_HISTORY_LEN {
+ state.closed_stack.pop_front();
}
- self.closed_stack.push_back(NavigationEntry {
+ state.closed_stack.push_back(NavigationEntry {
item,
data: data.map(|data| Box::new(data) as Box<dyn Any>),
- timestamp: self.next_timestamp.fetch_add(1, Ordering::SeqCst),
+ timestamp: state.next_timestamp.fetch_add(1, Ordering::SeqCst),
});
}
}
- self.did_update(cx);
+ state.did_update(cx);
}
- fn did_update(&self, cx: &mut WindowContext) {
- if let Some(pane) = self.pane.upgrade(cx) {
- cx.defer(move |cx| {
- pane.update(cx, |pane, cx| pane.history_updated(cx));
- });
- }
- }
-
- fn remove_item(&mut self, item_id: usize) {
- self.paths_by_item.remove(&item_id);
- self.backward_stack
+ pub fn remove_item(&mut self, item_id: usize) {
+ let mut state = self.0.borrow_mut();
+ state.paths_by_item.remove(&item_id);
+ state
+ .backward_stack
.retain(|entry| entry.item.id() != item_id);
- self.forward_stack
+ state
+ .forward_stack
+ .retain(|entry| entry.item.id() != item_id);
+ state
+ .closed_stack
.retain(|entry| entry.item.id() != item_id);
- self.closed_stack.retain(|entry| entry.item.id() != item_id);
+ }
+
+ pub fn path_for_item(&self, item_id: usize) -> Option<(ProjectPath, Option<PathBuf>)> {
+ self.0.borrow().paths_by_item.get(&item_id).cloned()
}
}
-impl PaneNavHistory {
- pub fn for_each_entry(
- &self,
- cx: &AppContext,
- mut f: impl FnMut(&NavigationEntry, ProjectPath),
- ) {
- let borrowed_history = self.0.borrow();
- borrowed_history
- .forward_stack
- .iter()
- .chain(borrowed_history.backward_stack.iter())
- .chain(borrowed_history.closed_stack.iter())
- .for_each(|entry| {
- if let Some(path) = borrowed_history.paths_by_item.get(&entry.item.id()) {
- f(entry, path.clone());
- } else if let Some(item) = entry.item.upgrade(cx) {
- let path = item.project_path(cx);
- if let Some(path) = path {
- f(entry, path);
- }
- }
- })
+impl NavHistoryState {
+ pub fn did_update(&self, cx: &mut WindowContext) {
+ if let Some(pane) = self.pane.upgrade(cx) {
+ cx.defer(move |cx| {
+ pane.update(cx, |pane, cx| pane.history_updated(cx));
+ });
+ }
}
}
@@ -2103,11 +1923,9 @@ mod tests {
// 1. Add with a destination index
// a. Add before the active item
- set_labeled_items(&workspace, &pane, ["A", "B*", "C"], cx);
- workspace.update(cx, |workspace, cx| {
- Pane::add_item(
- workspace,
- &pane,
+ set_labeled_items(&pane, ["A", "B*", "C"], cx);
+ pane.update(cx, |pane, cx| {
+ pane.add_item(
Box::new(cx.add_view(|_| TestItem::new().with_label("D"))),
false,
false,
@@ -2118,11 +1936,9 @@ mod tests {
assert_item_labels(&pane, ["D*", "A", "B", "C"], cx);
// b. Add after the active item
- set_labeled_items(&workspace, &pane, ["A", "B*", "C"], cx);
- workspace.update(cx, |workspace, cx| {
- Pane::add_item(
- workspace,
- &pane,
+ set_labeled_items(&pane, ["A", "B*", "C"], cx);
+ pane.update(cx, |pane, cx| {
+ pane.add_item(
Box::new(cx.add_view(|_| TestItem::new().with_label("D"))),
false,
false,
@@ -2133,11 +1949,9 @@ mod tests {
assert_item_labels(&pane, ["A", "B", "D*", "C"], cx);
// c. Add at the end of the item list (including off the length)
- set_labeled_items(&workspace, &pane, ["A", "B*", "C"], cx);
- workspace.update(cx, |workspace, cx| {
- Pane::add_item(
- workspace,
- &pane,
+ set_labeled_items(&pane, ["A", "B*", "C"], cx);
+ pane.update(cx, |pane, cx| {
+ pane.add_item(
Box::new(cx.add_view(|_| TestItem::new().with_label("D"))),
false,
false,
@@ -2149,11 +1963,9 @@ mod tests {
// 2. Add without a destination index
// a. Add with active item at the start of the item list
- set_labeled_items(&workspace, &pane, ["A*", "B", "C"], cx);
- workspace.update(cx, |workspace, cx| {
- Pane::add_item(
- workspace,
- &pane,
+ set_labeled_items(&pane, ["A*", "B", "C"], cx);
+ pane.update(cx, |pane, cx| {
+ pane.add_item(
Box::new(cx.add_view(|_| TestItem::new().with_label("D"))),
false,
false,
@@ -2161,14 +1973,12 @@ mod tests {
cx,
);
});
- set_labeled_items(&workspace, &pane, ["A", "D*", "B", "C"], cx);
+ set_labeled_items(&pane, ["A", "D*", "B", "C"], cx);
// b. Add with active item at the end of the item list
- set_labeled_items(&workspace, &pane, ["A", "B", "C*"], cx);
- workspace.update(cx, |workspace, cx| {
- Pane::add_item(
- workspace,
- &pane,
+ set_labeled_items(&pane, ["A", "B", "C*"], cx);
+ pane.update(cx, |pane, cx| {
+ pane.add_item(
Box::new(cx.add_view(|_| TestItem::new().with_label("D"))),
false,
false,
@@ -2191,66 +2001,66 @@ mod tests {
// 1. Add with a destination index
// 1a. Add before the active item
- let [_, _, _, d] = set_labeled_items(&workspace, &pane, ["A", "B*", "C", "D"], cx);
- workspace.update(cx, |workspace, cx| {
- Pane::add_item(workspace, &pane, d, false, false, Some(0), cx);
+ let [_, _, _, d] = set_labeled_items(&pane, ["A", "B*", "C", "D"], cx);
+ pane.update(cx, |pane, cx| {
+ pane.add_item(d, false, false, Some(0), cx);
});
assert_item_labels(&pane, ["D*", "A", "B", "C"], cx);
// 1b. Add after the active item
- let [_, _, _, d] = set_labeled_items(&workspace, &pane, ["A", "B*", "C", "D"], cx);
- workspace.update(cx, |workspace, cx| {
- Pane::add_item(workspace, &pane, d, false, false, Some(2), cx);
+ let [_, _, _, d] = set_labeled_items(&pane, ["A", "B*", "C", "D"], cx);
+ pane.update(cx, |pane, cx| {
+ pane.add_item(d, false, false, Some(2), cx);
});
assert_item_labels(&pane, ["A", "B", "D*", "C"], cx);
// 1c. Add at the end of the item list (including off the length)
- let [a, _, _, _] = set_labeled_items(&workspace, &pane, ["A", "B*", "C", "D"], cx);
- workspace.update(cx, |workspace, cx| {
- Pane::add_item(workspace, &pane, a, false, false, Some(5), cx);
+ let [a, _, _, _] = set_labeled_items(&pane, ["A", "B*", "C", "D"], cx);
+ pane.update(cx, |pane, cx| {
+ pane.add_item(a, false, false, Some(5), cx);
});
assert_item_labels(&pane, ["B", "C", "D", "A*"], cx);
// 1d. Add same item to active index
- let [_, b, _] = set_labeled_items(&workspace, &pane, ["A", "B*", "C"], cx);
- workspace.update(cx, |workspace, cx| {
- Pane::add_item(workspace, &pane, b, false, false, Some(1), cx);
+ let [_, b, _] = set_labeled_items(&pane, ["A", "B*", "C"], cx);
+ pane.update(cx, |pane, cx| {
+ pane.add_item(b, false, false, Some(1), cx);
});
assert_item_labels(&pane, ["A", "B*", "C"], cx);
// 1e. Add item to index after same item in last position
- let [_, _, c] = set_labeled_items(&workspace, &pane, ["A", "B*", "C"], cx);
- workspace.update(cx, |workspace, cx| {
- Pane::add_item(workspace, &pane, c, false, false, Some(2), cx);
+ let [_, _, c] = set_labeled_items(&pane, ["A", "B*", "C"], cx);
+ pane.update(cx, |pane, cx| {
+ pane.add_item(c, false, false, Some(2), cx);
});
assert_item_labels(&pane, ["A", "B", "C*"], cx);
// 2. Add without a destination index
// 2a. Add with active item at the start of the item list
- let [_, _, _, d] = set_labeled_items(&workspace, &pane, ["A*", "B", "C", "D"], cx);
- workspace.update(cx, |workspace, cx| {
- Pane::add_item(workspace, &pane, d, false, false, None, cx);
+ let [_, _, _, d] = set_labeled_items(&pane, ["A*", "B", "C", "D"], cx);
+ pane.update(cx, |pane, cx| {
+ pane.add_item(d, false, false, None, cx);
});
assert_item_labels(&pane, ["A", "D*", "B", "C"], cx);
// 2b. Add with active item at the end of the item list
- let [a, _, _, _] = set_labeled_items(&workspace, &pane, ["A", "B", "C", "D*"], cx);
- workspace.update(cx, |workspace, cx| {
- Pane::add_item(workspace, &pane, a, false, false, None, cx);
+ let [a, _, _, _] = set_labeled_items(&pane, ["A", "B", "C", "D*"], cx);
+ pane.update(cx, |pane, cx| {
+ pane.add_item(a, false, false, None, cx);
});
assert_item_labels(&pane, ["B", "C", "D", "A*"], cx);
// 2c. Add active item to active item at end of list
- let [_, _, c] = set_labeled_items(&workspace, &pane, ["A", "B", "C*"], cx);
- workspace.update(cx, |workspace, cx| {
- Pane::add_item(workspace, &pane, c, false, false, None, cx);
+ let [_, _, c] = set_labeled_items(&pane, ["A", "B", "C*"], cx);
+ pane.update(cx, |pane, cx| {
+ pane.add_item(c, false, false, None, cx);
});
assert_item_labels(&pane, ["A", "B", "C*"], cx);
// 2d. Add active item to active item at start of list
- let [a, _, _] = set_labeled_items(&workspace, &pane, ["A*", "B", "C"], cx);
- workspace.update(cx, |workspace, cx| {
- Pane::add_item(workspace, &pane, a, false, false, None, cx);
+ let [a, _, _] = set_labeled_items(&pane, ["A*", "B", "C"], cx);
+ pane.update(cx, |pane, cx| {
+ pane.add_item(a, false, false, None, cx);
});
assert_item_labels(&pane, ["A*", "B", "C"], cx);
}
@@ -183,7 +183,7 @@ pub fn handle_dropped_item<V: View>(
.zip(pane.upgrade(cx))
{
workspace.update(cx, |workspace, cx| {
- Pane::move_item(workspace, from, to, item_id, index, cx);
+ workspace.move_item(from, to, item_id, index, cx);
})
}
});
@@ -1,5 +1,5 @@
use crate::{item::ItemHandle, ItemDeserializers, Member, Pane, PaneAxis, Workspace, WorkspaceId};
-use anyhow::{anyhow, Context, Result};
+use anyhow::{Context, Result};
use async_recursion::async_recursion;
use db::sqlez::{
bindable::{Bind, Column, StaticColumnCount},
@@ -230,7 +230,7 @@ impl SerializedPane {
pub async fn deserialize_to(
&self,
project: &ModelHandle<Project>,
- pane_handle: &WeakViewHandle<Pane>,
+ pane: &WeakViewHandle<Pane>,
workspace_id: WorkspaceId,
workspace: &WeakViewHandle<Workspace>,
cx: &mut AsyncAppContext,
@@ -239,7 +239,7 @@ impl SerializedPane {
let mut active_item_index = None;
for (index, item) in self.children.iter().enumerate() {
let project = project.clone();
- let item_handle = pane_handle
+ let item_handle = pane
.update(cx, |_, cx| {
if let Some(deserializer) = cx.global::<ItemDeserializers>().get(&item.kind) {
deserializer(project, workspace.clone(), workspace_id, item.item_id, cx)
@@ -256,13 +256,9 @@ impl SerializedPane {
items.push(item_handle.clone());
if let Some(item_handle) = item_handle {
- workspace.update(cx, |workspace, cx| {
- let pane_handle = pane_handle
- .upgrade(cx)
- .ok_or_else(|| anyhow!("pane was dropped"))?;
- Pane::add_item(workspace, &pane_handle, item_handle, true, true, None, cx);
- anyhow::Ok(())
- })??;
+ pane.update(cx, |pane, cx| {
+ pane.add_item(item_handle.clone(), true, true, None, cx);
+ })?;
}
if item.active {
@@ -271,7 +267,7 @@ impl SerializedPane {
}
if let Some(active_item_index) = active_item_index {
- pane_handle.update(cx, |pane, cx| {
+ pane.update(cx, |pane, cx| {
pane.activate_item(active_item_index, false, false, cx);
})?;
}
@@ -99,7 +99,7 @@ impl Item for SharedScreen {
Some(format!("{}'s screen", self.user.github_login).into())
}
fn deactivated(&mut self, cx: &mut ViewContext<Self>) {
- if let Some(nav_history) = self.nav_history.as_ref() {
+ if let Some(nav_history) = self.nav_history.as_mut() {
nav_history.push::<()>(None, cx);
}
}
@@ -153,14 +153,13 @@ impl View for Toolbar {
let pane = pane.clone();
cx.window_context().defer(move |cx| {
workspace.update(cx, |workspace, cx| {
- Pane::go_back(workspace, Some(pane.clone()), cx)
- .detach_and_log_err(cx);
+ workspace.go_back(pane.clone(), cx).detach_and_log_err(cx);
});
})
}
}
},
- super::GoBack { pane: None },
+ super::GoBack,
"Go Back",
cx,
));
@@ -182,14 +181,15 @@ impl View for Toolbar {
let pane = pane.clone();
cx.window_context().defer(move |cx| {
workspace.update(cx, |workspace, cx| {
- Pane::go_forward(workspace, Some(pane.clone()), cx)
+ workspace
+ .go_forward(pane.clone(), cx)
.detach_and_log_err(cx);
});
});
}
}
},
- super::GoForward { pane: None },
+ super::GoForward,
"Go Forward",
cx,
));
@@ -278,6 +278,19 @@ pub fn init(app_state: Arc<AppState>, cx: &mut AppContext) {
workspace.toggle_dock(DockPosition::Bottom, action.focus, cx);
});
cx.add_action(Workspace::activate_pane_at_index);
+ cx.add_action(|workspace: &mut Workspace, _: &ReopenClosedItem, cx| {
+ workspace.reopen_closed_item(cx).detach();
+ });
+ cx.add_action(|workspace: &mut Workspace, _: &GoBack, cx| {
+ workspace
+ .go_back(workspace.active_pane().downgrade(), cx)
+ .detach();
+ });
+ cx.add_action(|workspace: &mut Workspace, _: &GoForward, cx| {
+ workspace
+ .go_forward(workspace.active_pane().downgrade(), cx)
+ .detach();
+ });
cx.add_action(|_: &mut Workspace, _: &install_cli::Install, cx| {
cx.spawn(|workspace, mut cx| async move {
@@ -583,6 +596,7 @@ impl Workspace {
let center_pane = cx.add_view(|cx| {
Pane::new(
weak_handle.clone(),
+ project.clone(),
app_state.background_actions,
pane_history_timestamp.clone(),
cx,
@@ -946,21 +960,30 @@ impl Workspace {
&self,
limit: Option<usize>,
cx: &AppContext,
- ) -> Vec<ProjectPath> {
- let mut history: HashMap<ProjectPath, usize> = HashMap::default();
+ ) -> Vec<(ProjectPath, Option<PathBuf>)> {
+ let mut abs_paths_opened: HashMap<PathBuf, HashSet<ProjectPath>> = HashMap::default();
+ let mut history: HashMap<ProjectPath, (Option<PathBuf>, usize)> = HashMap::default();
for pane in &self.panes {
let pane = pane.read(cx);
pane.nav_history()
- .for_each_entry(cx, |entry, project_path| {
+ .for_each_entry(cx, |entry, (project_path, fs_path)| {
+ if let Some(fs_path) = &fs_path {
+ abs_paths_opened
+ .entry(fs_path.clone())
+ .or_default()
+ .insert(project_path.clone());
+ }
let timestamp = entry.timestamp;
match history.entry(project_path) {
hash_map::Entry::Occupied(mut entry) => {
- if ×tamp > entry.get() {
- entry.insert(timestamp);
+ let (old_fs_path, old_timestamp) = entry.get();
+ if ×tamp > old_timestamp {
+ assert_eq!(&fs_path, old_fs_path, "Inconsistent nav history");
+ entry.insert((fs_path, timestamp));
}
}
hash_map::Entry::Vacant(entry) => {
- entry.insert(timestamp);
+ entry.insert((fs_path, timestamp));
}
}
});
@@ -968,13 +991,137 @@ impl Workspace {
history
.into_iter()
- .sorted_by_key(|(_, timestamp)| *timestamp)
- .map(|(project_path, _)| project_path)
+ .sorted_by_key(|(_, (_, timestamp))| *timestamp)
+ .map(|(project_path, (fs_path, _))| (project_path, fs_path))
.rev()
+ .filter(|(history_path, abs_path)| {
+ let latest_project_path_opened = abs_path
+ .as_ref()
+ .and_then(|abs_path| abs_paths_opened.get(abs_path))
+ .and_then(|project_paths| {
+ project_paths
+ .iter()
+ .max_by(|b1, b2| b1.worktree_id.cmp(&b2.worktree_id))
+ });
+
+ match latest_project_path_opened {
+ Some(latest_project_path_opened) => latest_project_path_opened == history_path,
+ None => true,
+ }
+ })
.take(limit.unwrap_or(usize::MAX))
.collect()
}
+ fn navigate_history(
+ &mut self,
+ pane: WeakViewHandle<Pane>,
+ mode: NavigationMode,
+ cx: &mut ViewContext<Workspace>,
+ ) -> Task<Result<()>> {
+ let to_load = if let Some(pane) = pane.upgrade(cx) {
+ cx.focus(&pane);
+
+ pane.update(cx, |pane, cx| {
+ loop {
+ // Retrieve the weak item handle from the history.
+ let entry = pane.nav_history_mut().pop(mode, cx)?;
+
+ // If the item is still present in this pane, then activate it.
+ if let Some(index) = entry
+ .item
+ .upgrade(cx)
+ .and_then(|v| pane.index_for_item(v.as_ref()))
+ {
+ let prev_active_item_index = pane.active_item_index();
+ pane.nav_history_mut().set_mode(mode);
+ pane.activate_item(index, true, true, cx);
+ pane.nav_history_mut().set_mode(NavigationMode::Normal);
+
+ let mut navigated = prev_active_item_index != pane.active_item_index();
+ if let Some(data) = entry.data {
+ navigated |= pane.active_item()?.navigate(data, cx);
+ }
+
+ if navigated {
+ break None;
+ }
+ }
+ // If the item is no longer present in this pane, then retrieve its
+ // project path in order to reopen it.
+ else {
+ break pane
+ .nav_history()
+ .path_for_item(entry.item.id())
+ .map(|(project_path, _)| (project_path, entry));
+ }
+ }
+ })
+ } else {
+ None
+ };
+
+ if let Some((project_path, entry)) = to_load {
+ // If the item was no longer present, then load it again from its previous path.
+ let task = self.load_path(project_path, cx);
+ cx.spawn(|workspace, mut cx| async move {
+ let task = task.await;
+ let mut navigated = false;
+ if let Some((project_entry_id, build_item)) = task.log_err() {
+ let prev_active_item_id = pane.update(&mut cx, |pane, _| {
+ pane.nav_history_mut().set_mode(mode);
+ pane.active_item().map(|p| p.id())
+ })?;
+
+ pane.update(&mut cx, |pane, cx| {
+ let item = pane.open_item(project_entry_id, true, cx, build_item);
+ navigated |= Some(item.id()) != prev_active_item_id;
+ pane.nav_history_mut().set_mode(NavigationMode::Normal);
+ if let Some(data) = entry.data {
+ navigated |= item.navigate(data, cx);
+ }
+ })?;
+ }
+
+ if !navigated {
+ workspace
+ .update(&mut cx, |workspace, cx| {
+ Self::navigate_history(workspace, pane, mode, cx)
+ })?
+ .await?;
+ }
+
+ Ok(())
+ })
+ } else {
+ Task::ready(Ok(()))
+ }
+ }
+
+ pub fn go_back(
+ &mut self,
+ pane: WeakViewHandle<Pane>,
+ cx: &mut ViewContext<Workspace>,
+ ) -> Task<Result<()>> {
+ self.navigate_history(pane, NavigationMode::GoingBack, cx)
+ }
+
+ pub fn go_forward(
+ &mut self,
+ pane: WeakViewHandle<Pane>,
+ cx: &mut ViewContext<Workspace>,
+ ) -> Task<Result<()>> {
+ self.navigate_history(pane, NavigationMode::GoingForward, cx)
+ }
+
+ pub fn reopen_closed_item(&mut self, cx: &mut ViewContext<Workspace>) -> Task<Result<()>> {
+ self.navigate_history(
+ self.active_pane().downgrade(),
+ NavigationMode::ReopeningClosedItem,
+ cx,
+ )
+ }
+
pub fn client(&self) -> &Client {
&self.app_state.client
}
@@ -1548,6 +1695,7 @@ impl Workspace {
let pane = cx.add_view(|cx| {
Pane::new(
self.weak_handle(),
+ self.project.clone(),
self.app_state.background_actions,
self.pane_history_timestamp.clone(),
cx,
@@ -1567,7 +1715,7 @@ impl Workspace {
) -> bool {
if let Some(center_pane) = self.last_active_center_pane.clone() {
if let Some(center_pane) = center_pane.upgrade(cx) {
- Pane::add_item(self, ¢er_pane, item, true, true, None, cx);
+ center_pane.update(cx, |pane, cx| pane.add_item(item, true, true, None, cx));
true
} else {
false
@@ -1578,8 +1726,38 @@ impl Workspace {
}
pub fn add_item(&mut self, item: Box<dyn ItemHandle>, cx: &mut ViewContext<Self>) {
- let active_pane = self.active_pane().clone();
- Pane::add_item(self, &active_pane, item, true, true, None, cx);
+ self.active_pane
+ .update(cx, |pane, cx| pane.add_item(item, true, true, None, cx));
+ }
+
+ pub fn open_abs_path(
+ &mut self,
+ abs_path: PathBuf,
+ visible: bool,
+ cx: &mut ViewContext<Self>,
+ ) -> Task<anyhow::Result<Box<dyn ItemHandle>>> {
+ cx.spawn(|workspace, mut cx| async move {
+ let open_paths_task_result = workspace
+ .update(&mut cx, |workspace, cx| {
+ workspace.open_paths(vec![abs_path.clone()], visible, cx)
+ })
+ .with_context(|| format!("open abs path {abs_path:?} task spawn"))?
+ .await;
+ anyhow::ensure!(
+ open_paths_task_result.len() == 1,
+ "open abs path {abs_path:?} task returned incorrect number of results"
+ );
+ match open_paths_task_result
+ .into_iter()
+ .next()
+ .expect("ensured single task result")
+ {
+ Some(open_result) => {
+ open_result.with_context(|| format!("open abs path {abs_path:?} task join"))
+ }
+ None => anyhow::bail!("open abs path {abs_path:?} task returned None"),
+ }
+ })
}
pub fn open_path(
@@ -1599,13 +1777,10 @@ impl Workspace {
});
let task = self.load_path(path.into(), cx);
- cx.spawn(|this, mut cx| async move {
+ cx.spawn(|_, mut cx| async move {
let (project_entry_id, build_item) = task.await?;
- let pane = pane
- .upgrade(&cx)
- .ok_or_else(|| anyhow!("pane was closed"))?;
- this.update(&mut cx, |this, cx| {
- Pane::open_item(this, pane, project_entry_id, focus_item, cx, build_item)
+ pane.update(&mut cx, |pane, cx| {
+ pane.open_item(project_entry_id, focus_item, cx, build_item)
})
})
}
@@ -1662,8 +1837,9 @@ impl Workspace {
pub fn open_shared_screen(&mut self, peer_id: PeerId, cx: &mut ViewContext<Self>) {
if let Some(shared_screen) = self.shared_screen_for_peer(peer_id, &self.active_pane, cx) {
- let pane = self.active_pane.clone();
- Pane::add_item(self, &pane, Box::new(shared_screen), false, true, None, cx);
+ self.active_pane.update(cx, |pane, cx| {
+ pane.add_item(Box::new(shared_screen), false, true, None, cx)
+ });
}
}
@@ -1750,6 +1926,7 @@ impl Workspace {
cx: &mut ViewContext<Self>,
) {
match event {
+ pane::Event::AddItem { item } => item.added_to_pane(self, pane, cx),
pane::Event::Split(direction) => {
self.split_pane(pane, *direction, cx);
}
@@ -1804,7 +1981,7 @@ impl Workspace {
let item = pane.read(cx).active_item()?;
let maybe_pane_handle = if let Some(clone) = item.clone_on_split(self.database_id(), cx) {
let new_pane = self.add_pane(cx);
- Pane::add_item(self, &new_pane, clone, true, true, None, cx);
+ new_pane.update(cx, |pane, cx| pane.add_item(clone, true, true, None, cx));
self.center.split(&pane, &new_pane, direction).unwrap();
Some(new_pane)
} else {
@@ -1826,7 +2003,7 @@ impl Workspace {
let Some(from) = from.upgrade(cx) else { return; };
let new_pane = self.add_pane(cx);
- Pane::move_item(self, from.clone(), new_pane.clone(), item_id_to_move, 0, cx);
+ self.move_item(from.clone(), new_pane.clone(), item_id_to_move, 0, cx);
self.center
.split(&pane_to_split, &new_pane, split_direction)
.unwrap();
@@ -1854,6 +2031,41 @@ impl Workspace {
}))
}
+ pub fn move_item(
+ &mut self,
+ source: ViewHandle<Pane>,
+ destination: ViewHandle<Pane>,
+ item_id_to_move: usize,
+ destination_index: usize,
+ cx: &mut ViewContext<Self>,
+ ) {
+ let item_to_move = source
+ .read(cx)
+ .items()
+ .enumerate()
+ .find(|(_, item_handle)| item_handle.id() == item_id_to_move);
+
+ if item_to_move.is_none() {
+ log::warn!("Tried to move item handle which was not in `from` pane. Maybe tab was closed during drop");
+ return;
+ }
+ let (item_ix, item_handle) = item_to_move.unwrap();
+ let item_handle = item_handle.clone();
+
+ if source != destination {
+ // Close item from previous pane
+ source.update(cx, |source, cx| {
+ source.remove_item(item_ix, false, cx);
+ });
+ }
+
+ // This automatically removes duplicate items in the pane
+ destination.update(cx, |destination, cx| {
+ destination.add_item(item_handle, true, true, Some(destination_index), cx);
+ cx.focus_self();
+ });
+ }
+
fn remove_pane(&mut self, pane: ViewHandle<Pane>, cx: &mut ViewContext<Self>) {
if self.center.remove(&pane).unwrap() {
self.force_remove_pane(&pane, cx);
@@ -2457,7 +2669,9 @@ impl Workspace {
if let Some(index) = pane.update(cx, |pane, _| pane.index_for_item(item.as_ref())) {
pane.update(cx, |pane, cx| pane.activate_item(index, false, false, cx));
} else {
- Pane::add_item(self, &pane, item.boxed_clone(), false, false, None, cx);
+ pane.update(cx, |pane, cx| {
+ pane.add_item(item.boxed_clone(), false, false, None, cx)
+ });
}
if pane_was_focused {
@@ -3946,9 +4160,7 @@ mod tests {
});
workspace
- .update(cx, |workspace, cx| {
- Pane::go_back(workspace, Some(pane.downgrade()), cx)
- })
+ .update(cx, |workspace, cx| workspace.go_back(pane.downgrade(), cx))
.await
.unwrap();
@@ -120,8 +120,8 @@ pub fn menus() -> Vec<Menu<'static>> {
Menu {
name: "Go",
items: vec![
- MenuItem::action("Back", workspace::GoBack { pane: None }),
- MenuItem::action("Forward", workspace::GoForward { pane: None }),
+ MenuItem::action("Back", workspace::GoBack),
+ MenuItem::action("Forward", workspace::GoForward),
MenuItem::separator(),
MenuItem::action("Go to File", file_finder::Toggle),
MenuItem::action("Go to Symbol in Project", project_symbols::Toggle),
@@ -667,7 +667,7 @@ mod tests {
use util::http::FakeHttpClient;
use workspace::{
item::{Item, ItemHandle},
- open_new, open_paths, pane, NewFile, Pane, SplitDirection, WorkspaceHandle,
+ open_new, open_paths, pane, NewFile, SplitDirection, WorkspaceHandle,
};
#[gpui::test]
@@ -1492,7 +1492,7 @@ mod tests {
);
workspace
- .update(cx, |w, cx| Pane::go_back(w, None, cx))
+ .update(cx, |w, cx| w.go_back(w.active_pane().downgrade(), cx))
.await
.unwrap();
assert_eq!(
@@ -1501,7 +1501,7 @@ mod tests {
);
workspace
- .update(cx, |w, cx| Pane::go_back(w, None, cx))
+ .update(cx, |w, cx| w.go_back(w.active_pane().downgrade(), cx))
.await
.unwrap();
assert_eq!(
@@ -1510,7 +1510,7 @@ mod tests {
);
workspace
- .update(cx, |w, cx| Pane::go_back(w, None, cx))
+ .update(cx, |w, cx| w.go_back(w.active_pane().downgrade(), cx))
.await
.unwrap();
assert_eq!(
@@ -1519,7 +1519,7 @@ mod tests {
);
workspace
- .update(cx, |w, cx| Pane::go_back(w, None, cx))
+ .update(cx, |w, cx| w.go_back(w.active_pane().downgrade(), cx))
.await
.unwrap();
assert_eq!(
@@ -1529,7 +1529,7 @@ mod tests {
// Go back one more time and ensure we don't navigate past the first item in the history.
workspace
- .update(cx, |w, cx| Pane::go_back(w, None, cx))
+ .update(cx, |w, cx| w.go_back(w.active_pane().downgrade(), cx))
.await
.unwrap();
assert_eq!(
@@ -1538,7 +1538,7 @@ mod tests {
);
workspace
- .update(cx, |w, cx| Pane::go_forward(w, None, cx))
+ .update(cx, |w, cx| w.go_forward(w.active_pane().downgrade(), cx))
.await
.unwrap();
assert_eq!(
@@ -1547,7 +1547,7 @@ mod tests {
);
workspace
- .update(cx, |w, cx| Pane::go_forward(w, None, cx))
+ .update(cx, |w, cx| w.go_forward(w.active_pane().downgrade(), cx))
.await
.unwrap();
assert_eq!(
@@ -1565,7 +1565,7 @@ mod tests {
.await
.unwrap();
workspace
- .update(cx, |w, cx| Pane::go_forward(w, None, cx))
+ .update(cx, |w, cx| w.go_forward(w.active_pane().downgrade(), cx))
.await
.unwrap();
assert_eq!(
@@ -1574,7 +1574,7 @@ mod tests {
);
workspace
- .update(cx, |w, cx| Pane::go_forward(w, None, cx))
+ .update(cx, |w, cx| w.go_forward(w.active_pane().downgrade(), cx))
.await
.unwrap();
assert_eq!(
@@ -1583,7 +1583,7 @@ mod tests {
);
workspace
- .update(cx, |w, cx| Pane::go_back(w, None, cx))
+ .update(cx, |w, cx| w.go_back(w.active_pane().downgrade(), cx))
.await
.unwrap();
assert_eq!(
@@ -1605,7 +1605,7 @@ mod tests {
.await
.unwrap();
workspace
- .update(cx, |w, cx| Pane::go_back(w, None, cx))
+ .update(cx, |w, cx| w.go_back(w.active_pane().downgrade(), cx))
.await
.unwrap();
assert_eq!(
@@ -1613,7 +1613,7 @@ mod tests {
(file1.clone(), DisplayPoint::new(10, 0), 0.)
);
workspace
- .update(cx, |w, cx| Pane::go_forward(w, None, cx))
+ .update(cx, |w, cx| w.go_forward(w.active_pane().downgrade(), cx))
.await
.unwrap();
assert_eq!(
@@ -1657,7 +1657,7 @@ mod tests {
})
});
workspace
- .update(cx, |w, cx| Pane::go_back(w, None, cx))
+ .update(cx, |w, cx| w.go_back(w.active_pane().downgrade(), cx))
.await
.unwrap();
assert_eq!(
@@ -1665,7 +1665,7 @@ mod tests {
(file1.clone(), DisplayPoint::new(2, 0), 0.)
);
workspace
- .update(cx, |w, cx| Pane::go_back(w, None, cx))
+ .update(cx, |w, cx| w.go_back(w.active_pane().downgrade(), cx))
.await
.unwrap();
assert_eq!(
@@ -1770,81 +1770,97 @@ mod tests {
// Reopen all the closed items, ensuring they are reopened in the same order
// in which they were closed.
workspace
- .update(cx, Pane::reopen_closed_item)
+ .update(cx, Workspace::reopen_closed_item)
.await
.unwrap();
assert_eq!(active_path(&workspace, cx), Some(file3.clone()));
workspace
- .update(cx, Pane::reopen_closed_item)
+ .update(cx, Workspace::reopen_closed_item)
.await
.unwrap();
assert_eq!(active_path(&workspace, cx), Some(file2.clone()));
workspace
- .update(cx, Pane::reopen_closed_item)
+ .update(cx, Workspace::reopen_closed_item)
.await
.unwrap();
assert_eq!(active_path(&workspace, cx), Some(file4.clone()));
workspace
- .update(cx, Pane::reopen_closed_item)
+ .update(cx, Workspace::reopen_closed_item)
.await
.unwrap();
assert_eq!(active_path(&workspace, cx), Some(file1.clone()));
// Reopening past the last closed item is a no-op.
workspace
- .update(cx, Pane::reopen_closed_item)
+ .update(cx, Workspace::reopen_closed_item)
.await
.unwrap();
assert_eq!(active_path(&workspace, cx), Some(file1.clone()));
// Reopening closed items doesn't interfere with navigation history.
workspace
- .update(cx, |workspace, cx| Pane::go_back(workspace, None, cx))
+ .update(cx, |workspace, cx| {
+ workspace.go_back(workspace.active_pane().downgrade(), cx)
+ })
.await
.unwrap();
assert_eq!(active_path(&workspace, cx), Some(file4.clone()));
workspace
- .update(cx, |workspace, cx| Pane::go_back(workspace, None, cx))
+ .update(cx, |workspace, cx| {
+ workspace.go_back(workspace.active_pane().downgrade(), cx)
+ })
.await
.unwrap();
assert_eq!(active_path(&workspace, cx), Some(file2.clone()));
workspace
- .update(cx, |workspace, cx| Pane::go_back(workspace, None, cx))
+ .update(cx, |workspace, cx| {
+ workspace.go_back(workspace.active_pane().downgrade(), cx)
+ })
.await
.unwrap();
assert_eq!(active_path(&workspace, cx), Some(file3.clone()));
workspace
- .update(cx, |workspace, cx| Pane::go_back(workspace, None, cx))
+ .update(cx, |workspace, cx| {
+ workspace.go_back(workspace.active_pane().downgrade(), cx)
+ })
.await
.unwrap();
assert_eq!(active_path(&workspace, cx), Some(file4.clone()));
workspace
- .update(cx, |workspace, cx| Pane::go_back(workspace, None, cx))
+ .update(cx, |workspace, cx| {
+ workspace.go_back(workspace.active_pane().downgrade(), cx)
+ })
.await
.unwrap();
assert_eq!(active_path(&workspace, cx), Some(file3.clone()));
workspace
- .update(cx, |workspace, cx| Pane::go_back(workspace, None, cx))
+ .update(cx, |workspace, cx| {
+ workspace.go_back(workspace.active_pane().downgrade(), cx)
+ })
.await
.unwrap();
assert_eq!(active_path(&workspace, cx), Some(file2.clone()));
workspace
- .update(cx, |workspace, cx| Pane::go_back(workspace, None, cx))
+ .update(cx, |workspace, cx| {
+ workspace.go_back(workspace.active_pane().downgrade(), cx)
+ })
.await
.unwrap();
assert_eq!(active_path(&workspace, cx), Some(file1.clone()));
workspace
- .update(cx, |workspace, cx| Pane::go_back(workspace, None, cx))
+ .update(cx, |workspace, cx| {
+ workspace.go_back(workspace.active_pane().downgrade(), cx)
+ })
.await
.unwrap();
assert_eq!(active_path(&workspace, cx), Some(file1.clone()));