project: Move tests to integration layer (#47596)

Piotr Osiewicz and Zed Zippy created

Building project tests takes 6.5s instead of 18s that way.

Release Notes:

- N/A

---------

Co-authored-by: Zed Zippy <234243425+zed-zippy[bot]@users.noreply.github.com>

Change summary

Cargo.lock                                                |    1 
crates/project/Cargo.toml                                 |    7 
crates/project/src/agent_server_store.rs                  |  608 -----
crates/project/src/color_extractor.rs                     |  159 -
crates/project/src/context_server_store.rs                |  895 -------
crates/project/src/debugger.rs                            |    2 
crates/project/src/debugger/locators.rs                   |    4 
crates/project/src/debugger/locators/go.rs                |  216 -
crates/project/src/debugger/locators/python.rs            |   52 
crates/project/src/debugger/memory.rs                     |   58 
crates/project/src/git_store/conflict_set.rs              |  466 ----
crates/project/src/git_store/git_traversal.rs             |  541 ----
crates/project/src/image_store.rs                         |   83 
crates/project/src/lsp_command.rs                         |  137 -
crates/project/src/lsp_command/signature_help.rs          |  522 ----
crates/project/src/lsp_store.rs                           |   85 
crates/project/src/manifest_tree.rs                       |    2 
crates/project/src/manifest_tree/path_trie.rs             |  142 -
crates/project/src/project.rs                             |   93 
crates/project/src/project_search.rs                      |  126 -
crates/project/src/search.rs                              |  158 -
crates/project/src/search_history.rs                      |  156 -
crates/project/src/task_inventory.rs                      |  635 -----
crates/project/src/trusted_worktrees.rs                   |  983 --------
crates/project/src/yarn.rs                                |   43 
crates/project/tests/integration/color_extractor.rs       |  155 +
crates/project/tests/integration/context_server_store.rs  |  882 +++++++
crates/project/tests/integration/debugger.rs              |  293 ++
crates/project/tests/integration/ext_agent_tests.rs       |  226 ++
crates/project/tests/integration/extension_agent_tests.rs |  345 +++
crates/project/tests/integration/git_store.rs             | 1014 +++++++++
crates/project/tests/integration/image_store.rs           |   78 
crates/project/tests/integration/lsp_command.rs           |  128 +
crates/project/tests/integration/lsp_store.rs             |   74 
crates/project/tests/integration/manifest_tree.rs         |  124 +
crates/project/tests/integration/project_search.rs        |  114 +
crates/project/tests/integration/project_tests.rs         |  129 
crates/project/tests/integration/search.rs                |  156 +
crates/project/tests/integration/search_history.rs        |  148 +
crates/project/tests/integration/signature_help.rs        |  517 ++++
crates/project/tests/integration/task_inventory.rs        |  626 +++++
crates/project/tests/integration/trusted_worktrees.rs     |  957 ++++++++
crates/project/tests/integration/yarn.rs                  |   37 
crates/vercel/Cargo.toml                                  |    1 
44 files changed, 6,085 insertions(+), 6,093 deletions(-)

Detailed changes

Cargo.lock 🔗

@@ -12681,6 +12681,7 @@ dependencies = [
  "postage",
  "prettier",
  "pretty_assertions",
+ "project",
  "rand 0.9.2",
  "regex",
  "release_channel",

crates/project/Cargo.toml 🔗

@@ -11,6 +11,12 @@ workspace = true
 [lib]
 path = "src/project.rs"
 doctest = false
+test = false
+
+[[test]]
+name = "integration"
+required-features = ["test-support"]
+path = "tests/integration/project_tests.rs"
 
 [features]
 test-support = [
@@ -111,6 +117,7 @@ language = { workspace = true, features = ["test-support"] }
 lsp = { workspace = true, features = ["test-support"] }
 prettier = { workspace = true, features = ["test-support"] }
 pretty_assertions.workspace = true
+project = {workspace = true, features = ["test-support"]}
 rpc = { workspace = true, features = ["test-support"] }
 settings = { workspace = true, features = ["test-support"] }
 snippet_provider = { workspace = true, features = ["test-support"] }

crates/project/src/agent_server_store.rs 🔗

@@ -146,15 +146,15 @@ enum AgentServerStoreState {
     Collab,
 }
 
-struct ExternalAgentEntry {
+pub struct ExternalAgentEntry {
     server: Box<dyn ExternalAgentServer>,
     icon: Option<SharedString>,
     display_name: Option<SharedString>,
-    source: ExternalAgentSource,
+    pub source: ExternalAgentSource,
 }
 
 impl ExternalAgentEntry {
-    fn new(
+    pub fn new(
         server: Box<dyn ExternalAgentServer>,
         source: ExternalAgentSource,
         icon: Option<SharedString>,
@@ -171,241 +171,13 @@ impl ExternalAgentEntry {
 
 pub struct AgentServerStore {
     state: AgentServerStoreState,
-    external_agents: HashMap<ExternalAgentServerName, ExternalAgentEntry>,
+    pub external_agents: HashMap<ExternalAgentServerName, ExternalAgentEntry>,
 }
 
 pub struct AgentServersUpdated;
 
 impl EventEmitter<AgentServersUpdated> for AgentServerStore {}
 
-#[cfg(test)]
-mod ext_agent_tests {
-    use super::*;
-    use std::{collections::HashSet, fmt::Write as _};
-
-    // Helper to build a store in Collab mode so we can mutate internal maps without
-    // needing to spin up a full project environment.
-    fn collab_store() -> AgentServerStore {
-        AgentServerStore {
-            state: AgentServerStoreState::Collab,
-            external_agents: HashMap::default(),
-        }
-    }
-
-    // A simple fake that implements ExternalAgentServer without needing async plumbing.
-    struct NoopExternalAgent;
-
-    impl ExternalAgentServer for NoopExternalAgent {
-        fn get_command(
-            &mut self,
-            _root_dir: Option<&str>,
-            _extra_env: HashMap<String, String>,
-            _status_tx: Option<watch::Sender<SharedString>>,
-            _new_version_available_tx: Option<watch::Sender<Option<String>>>,
-            _cx: &mut AsyncApp,
-        ) -> Task<Result<(AgentServerCommand, String, Option<task::SpawnInTerminal>)>> {
-            Task::ready(Ok((
-                AgentServerCommand {
-                    path: PathBuf::from("noop"),
-                    args: Vec::new(),
-                    env: None,
-                },
-                "".to_string(),
-                None,
-            )))
-        }
-
-        fn as_any_mut(&mut self) -> &mut dyn Any {
-            self
-        }
-    }
-
-    #[test]
-    fn external_agent_server_name_display() {
-        let name = ExternalAgentServerName(SharedString::from("Ext: Tool"));
-        let mut s = String::new();
-        write!(&mut s, "{name}").unwrap();
-        assert_eq!(s, "Ext: Tool");
-    }
-
-    #[test]
-    fn sync_extension_agents_removes_previous_extension_entries() {
-        let mut store = collab_store();
-
-        // Seed with a couple of agents that will be replaced by extensions
-        store.external_agents.insert(
-            ExternalAgentServerName(SharedString::from("foo-agent")),
-            ExternalAgentEntry::new(
-                Box::new(NoopExternalAgent) as Box<dyn ExternalAgentServer>,
-                ExternalAgentSource::Custom,
-                None,
-                None,
-            ),
-        );
-        store.external_agents.insert(
-            ExternalAgentServerName(SharedString::from("bar-agent")),
-            ExternalAgentEntry::new(
-                Box::new(NoopExternalAgent) as Box<dyn ExternalAgentServer>,
-                ExternalAgentSource::Custom,
-                None,
-                None,
-            ),
-        );
-        store.external_agents.insert(
-            ExternalAgentServerName(SharedString::from("custom")),
-            ExternalAgentEntry::new(
-                Box::new(NoopExternalAgent) as Box<dyn ExternalAgentServer>,
-                ExternalAgentSource::Custom,
-                None,
-                None,
-            ),
-        );
-
-        // Simulate the removal phase: if we're syncing extensions that provide
-        // "foo-agent" and "bar-agent", those should be removed first
-        let extension_agent_names: HashSet<String> =
-            ["foo-agent".to_string(), "bar-agent".to_string()]
-                .into_iter()
-                .collect();
-
-        let keys_to_remove: Vec<_> = store
-            .external_agents
-            .keys()
-            .filter(|name| extension_agent_names.contains(name.0.as_ref()))
-            .cloned()
-            .collect();
-
-        for key in keys_to_remove {
-            store.external_agents.remove(&key);
-        }
-
-        // Only the custom entry should remain.
-        let remaining: Vec<_> = store
-            .external_agents
-            .keys()
-            .map(|k| k.0.to_string())
-            .collect();
-        assert_eq!(remaining, vec!["custom".to_string()]);
-    }
-
-    #[test]
-    fn resolve_extension_icon_path_allows_valid_paths() {
-        // Create a temporary directory structure for testing
-        let temp_dir = tempfile::tempdir().unwrap();
-        let extensions_dir = temp_dir.path();
-        let ext_dir = extensions_dir.join("my-extension");
-        std::fs::create_dir_all(&ext_dir).unwrap();
-
-        // Create a valid icon file
-        let icon_path = ext_dir.join("icon.svg");
-        std::fs::write(&icon_path, "<svg></svg>").unwrap();
-
-        // Test that a valid relative path works
-        let result = super::resolve_extension_icon_path(extensions_dir, "my-extension", "icon.svg");
-        assert!(result.is_some());
-        assert!(result.unwrap().ends_with("icon.svg"));
-    }
-
-    #[test]
-    fn resolve_extension_icon_path_allows_nested_paths() {
-        let temp_dir = tempfile::tempdir().unwrap();
-        let extensions_dir = temp_dir.path();
-        let ext_dir = extensions_dir.join("my-extension");
-        let icons_dir = ext_dir.join("assets").join("icons");
-        std::fs::create_dir_all(&icons_dir).unwrap();
-
-        let icon_path = icons_dir.join("logo.svg");
-        std::fs::write(&icon_path, "<svg></svg>").unwrap();
-
-        let result = super::resolve_extension_icon_path(
-            extensions_dir,
-            "my-extension",
-            "assets/icons/logo.svg",
-        );
-        assert!(result.is_some());
-        assert!(result.unwrap().ends_with("logo.svg"));
-    }
-
-    #[test]
-    fn resolve_extension_icon_path_blocks_path_traversal() {
-        let temp_dir = tempfile::tempdir().unwrap();
-        let extensions_dir = temp_dir.path();
-
-        // Create two extension directories
-        let ext1_dir = extensions_dir.join("extension1");
-        let ext2_dir = extensions_dir.join("extension2");
-        std::fs::create_dir_all(&ext1_dir).unwrap();
-        std::fs::create_dir_all(&ext2_dir).unwrap();
-
-        // Create a file in extension2
-        let secret_file = ext2_dir.join("secret.svg");
-        std::fs::write(&secret_file, "<svg>secret</svg>").unwrap();
-
-        // Try to access extension2's file from extension1 using path traversal
-        let result = super::resolve_extension_icon_path(
-            extensions_dir,
-            "extension1",
-            "../extension2/secret.svg",
-        );
-        assert!(
-            result.is_none(),
-            "Path traversal to sibling extension should be blocked"
-        );
-    }
-
-    #[test]
-    fn resolve_extension_icon_path_blocks_absolute_escape() {
-        let temp_dir = tempfile::tempdir().unwrap();
-        let extensions_dir = temp_dir.path();
-        let ext_dir = extensions_dir.join("my-extension");
-        std::fs::create_dir_all(&ext_dir).unwrap();
-
-        // Create a file outside the extensions directory
-        let outside_file = temp_dir.path().join("outside.svg");
-        std::fs::write(&outside_file, "<svg>outside</svg>").unwrap();
-
-        // Try to escape to parent directory
-        let result =
-            super::resolve_extension_icon_path(extensions_dir, "my-extension", "../outside.svg");
-        assert!(
-            result.is_none(),
-            "Path traversal to parent directory should be blocked"
-        );
-    }
-
-    #[test]
-    fn resolve_extension_icon_path_blocks_deep_traversal() {
-        let temp_dir = tempfile::tempdir().unwrap();
-        let extensions_dir = temp_dir.path();
-        let ext_dir = extensions_dir.join("my-extension");
-        std::fs::create_dir_all(&ext_dir).unwrap();
-
-        // Try deep path traversal
-        let result = super::resolve_extension_icon_path(
-            extensions_dir,
-            "my-extension",
-            "../../../../../../etc/passwd",
-        );
-        assert!(
-            result.is_none(),
-            "Deep path traversal should be blocked (file doesn't exist)"
-        );
-    }
-
-    #[test]
-    fn resolve_extension_icon_path_returns_none_for_nonexistent() {
-        let temp_dir = tempfile::tempdir().unwrap();
-        let extensions_dir = temp_dir.path();
-        let ext_dir = extensions_dir.join("my-extension");
-        std::fs::create_dir_all(&ext_dir).unwrap();
-
-        // Try to access a file that doesn't exist
-        let result =
-            super::resolve_extension_icon_path(extensions_dir, "my-extension", "nonexistent.svg");
-        assert!(result.is_none(), "Nonexistent file should return None");
-    }
-}
-
 impl AgentServerStore {
     /// Synchronizes extension-provided agent servers with the store.
     pub fn sync_extension_agents<'a, I>(
@@ -535,7 +307,7 @@ impl AgentServerStore {
 
 /// Safely resolves an extension icon path, ensuring it stays within the extension directory.
 /// Returns `None` if the path would escape the extension directory (path traversal attack).
-fn resolve_extension_icon_path(
+pub fn resolve_extension_icon_path(
     extensions_dir: &Path,
     extension_id: &str,
     icon_relative_path: &str,
@@ -960,7 +732,7 @@ impl AgentServerStore {
         }
     }
 
-    pub(crate) fn collab(_cx: &mut Context<Self>) -> Self {
+    pub fn collab() -> Self {
         Self {
             state: AgentServerStoreState::Collab,
             external_agents: Default::default(),
@@ -1937,15 +1709,15 @@ fn asset_name(version: &str) -> Option<String> {
     Some(format!("codex-acp-{version}-{arch}-{platform}.{ext}"))
 }
 
-struct LocalExtensionArchiveAgent {
-    fs: Arc<dyn Fs>,
-    http_client: Arc<dyn HttpClient>,
-    node_runtime: NodeRuntime,
-    project_environment: Entity<ProjectEnvironment>,
-    extension_id: Arc<str>,
-    agent_id: Arc<str>,
-    targets: HashMap<String, extension::TargetConfig>,
-    env: HashMap<String, String>,
+pub struct LocalExtensionArchiveAgent {
+    pub fs: Arc<dyn Fs>,
+    pub http_client: Arc<dyn HttpClient>,
+    pub node_runtime: NodeRuntime,
+    pub project_environment: Entity<ProjectEnvironment>,
+    pub extension_id: Arc<str>,
+    pub agent_id: Arc<str>,
+    pub targets: HashMap<String, extension::TargetConfig>,
+    pub env: HashMap<String, String>,
 }
 
 impl ExternalAgentServer for LocalExtensionArchiveAgent {
@@ -2772,353 +2544,3 @@ impl settings::Settings for AllAgentServersSettings {
         }
     }
 }
-
-#[cfg(test)]
-mod extension_agent_tests {
-    use crate::worktree_store::WorktreeStore;
-
-    use super::*;
-    use gpui::TestAppContext;
-    use std::sync::Arc;
-
-    #[test]
-    fn extension_agent_constructs_proper_display_names() {
-        // Verify the display name format for extension-provided agents
-        let name1 = ExternalAgentServerName(SharedString::from("Extension: Agent"));
-        assert!(name1.0.contains(": "));
-
-        let name2 = ExternalAgentServerName(SharedString::from("MyExt: MyAgent"));
-        assert_eq!(name2.0, "MyExt: MyAgent");
-
-        // Non-extension agents shouldn't have the separator
-        let custom = ExternalAgentServerName(SharedString::from("custom"));
-        assert!(!custom.0.contains(": "));
-    }
-
-    struct NoopExternalAgent;
-
-    impl ExternalAgentServer for NoopExternalAgent {
-        fn get_command(
-            &mut self,
-            _root_dir: Option<&str>,
-            _extra_env: HashMap<String, String>,
-            _status_tx: Option<watch::Sender<SharedString>>,
-            _new_version_available_tx: Option<watch::Sender<Option<String>>>,
-            _cx: &mut AsyncApp,
-        ) -> Task<Result<(AgentServerCommand, String, Option<task::SpawnInTerminal>)>> {
-            Task::ready(Ok((
-                AgentServerCommand {
-                    path: PathBuf::from("noop"),
-                    args: Vec::new(),
-                    env: None,
-                },
-                "".to_string(),
-                None,
-            )))
-        }
-
-        fn as_any_mut(&mut self) -> &mut dyn Any {
-            self
-        }
-    }
-
-    #[test]
-    fn sync_removes_only_extension_provided_agents() {
-        let mut store = AgentServerStore {
-            state: AgentServerStoreState::Collab,
-            external_agents: HashMap::default(),
-        };
-
-        // Seed with extension agents (contain ": ") and custom agents (don't contain ": ")
-        store.external_agents.insert(
-            ExternalAgentServerName(SharedString::from("Ext1: Agent1")),
-            ExternalAgentEntry::new(
-                Box::new(NoopExternalAgent) as Box<dyn ExternalAgentServer>,
-                ExternalAgentSource::Extension,
-                None,
-                None,
-            ),
-        );
-        store.external_agents.insert(
-            ExternalAgentServerName(SharedString::from("Ext2: Agent2")),
-            ExternalAgentEntry::new(
-                Box::new(NoopExternalAgent) as Box<dyn ExternalAgentServer>,
-                ExternalAgentSource::Extension,
-                None,
-                None,
-            ),
-        );
-        store.external_agents.insert(
-            ExternalAgentServerName(SharedString::from("custom-agent")),
-            ExternalAgentEntry::new(
-                Box::new(NoopExternalAgent) as Box<dyn ExternalAgentServer>,
-                ExternalAgentSource::Custom,
-                None,
-                None,
-            ),
-        );
-
-        // Simulate removal phase
-        store
-            .external_agents
-            .retain(|_, entry| entry.source != ExternalAgentSource::Extension);
-
-        // Only custom-agent should remain
-        assert_eq!(store.external_agents.len(), 1);
-        assert!(
-            store
-                .external_agents
-                .contains_key(&ExternalAgentServerName(SharedString::from("custom-agent")))
-        );
-    }
-
-    #[test]
-    fn archive_launcher_constructs_with_all_fields() {
-        use extension::AgentServerManifestEntry;
-
-        let mut env = HashMap::default();
-        env.insert("GITHUB_TOKEN".into(), "secret".into());
-
-        let mut targets = HashMap::default();
-        targets.insert(
-            "darwin-aarch64".to_string(),
-            extension::TargetConfig {
-                archive:
-                    "https://github.com/owner/repo/releases/download/v1.0.0/agent-darwin-arm64.zip"
-                        .into(),
-                cmd: "./agent".into(),
-                args: vec![],
-                sha256: None,
-                env: Default::default(),
-            },
-        );
-
-        let _entry = AgentServerManifestEntry {
-            name: "GitHub Agent".into(),
-            targets,
-            env,
-            icon: None,
-        };
-
-        // Verify display name construction
-        let expected_name = ExternalAgentServerName(SharedString::from("GitHub Agent"));
-        assert_eq!(expected_name.0, "GitHub Agent");
-    }
-
-    #[gpui::test]
-    async fn archive_agent_uses_extension_and_agent_id_for_cache_key(cx: &mut TestAppContext) {
-        let fs = fs::FakeFs::new(cx.background_executor.clone());
-        let http_client = http_client::FakeHttpClient::with_404_response();
-        let worktree_store = cx.new(|_| WorktreeStore::local(false, fs.clone()));
-        let project_environment = cx.new(|cx| {
-            crate::ProjectEnvironment::new(None, worktree_store.downgrade(), None, false, cx)
-        });
-
-        let agent = LocalExtensionArchiveAgent {
-            fs,
-            http_client,
-            node_runtime: node_runtime::NodeRuntime::unavailable(),
-            project_environment,
-            extension_id: Arc::from("my-extension"),
-            agent_id: Arc::from("my-agent"),
-            targets: {
-                let mut map = HashMap::default();
-                map.insert(
-                    "darwin-aarch64".to_string(),
-                    extension::TargetConfig {
-                        archive: "https://example.com/my-agent-darwin-arm64.zip".into(),
-                        cmd: "./my-agent".into(),
-                        args: vec!["--serve".into()],
-                        sha256: None,
-                        env: Default::default(),
-                    },
-                );
-                map
-            },
-            env: {
-                let mut map = HashMap::default();
-                map.insert("PORT".into(), "8080".into());
-                map
-            },
-        };
-
-        // Verify agent is properly constructed
-        assert_eq!(agent.extension_id.as_ref(), "my-extension");
-        assert_eq!(agent.agent_id.as_ref(), "my-agent");
-        assert_eq!(agent.env.get("PORT"), Some(&"8080".to_string()));
-        assert!(agent.targets.contains_key("darwin-aarch64"));
-    }
-
-    #[test]
-    fn sync_extension_agents_registers_archive_launcher() {
-        use extension::AgentServerManifestEntry;
-
-        let expected_name = ExternalAgentServerName(SharedString::from("Release Agent"));
-        assert_eq!(expected_name.0, "Release Agent");
-
-        // Verify the manifest entry structure for archive-based installation
-        let mut env = HashMap::default();
-        env.insert("API_KEY".into(), "secret".into());
-
-        let mut targets = HashMap::default();
-        targets.insert(
-            "linux-x86_64".to_string(),
-            extension::TargetConfig {
-                archive: "https://github.com/org/project/releases/download/v2.1.0/release-agent-linux-x64.tar.gz".into(),
-                cmd: "./release-agent".into(),
-                args: vec!["serve".into()],
-                sha256: None,
-                env: Default::default(),
-            },
-        );
-
-        let manifest_entry = AgentServerManifestEntry {
-            name: "Release Agent".into(),
-            targets: targets.clone(),
-            env,
-            icon: None,
-        };
-
-        // Verify target config is present
-        assert!(manifest_entry.targets.contains_key("linux-x86_64"));
-        let target = manifest_entry.targets.get("linux-x86_64").unwrap();
-        assert_eq!(target.cmd, "./release-agent");
-    }
-
-    #[gpui::test]
-    async fn test_node_command_uses_managed_runtime(cx: &mut TestAppContext) {
-        let fs = fs::FakeFs::new(cx.background_executor.clone());
-        let http_client = http_client::FakeHttpClient::with_404_response();
-        let node_runtime = NodeRuntime::unavailable();
-        let worktree_store = cx.new(|_| WorktreeStore::local(false, fs.clone()));
-        let project_environment = cx.new(|cx| {
-            crate::ProjectEnvironment::new(None, worktree_store.downgrade(), None, false, cx)
-        });
-
-        let agent = LocalExtensionArchiveAgent {
-            fs: fs.clone(),
-            http_client,
-            node_runtime,
-            project_environment,
-            extension_id: Arc::from("node-extension"),
-            agent_id: Arc::from("node-agent"),
-            targets: {
-                let mut map = HashMap::default();
-                map.insert(
-                    "darwin-aarch64".to_string(),
-                    extension::TargetConfig {
-                        archive: "https://example.com/node-agent.zip".into(),
-                        cmd: "node".into(),
-                        args: vec!["index.js".into()],
-                        sha256: None,
-                        env: Default::default(),
-                    },
-                );
-                map
-            },
-            env: HashMap::default(),
-        };
-
-        // Verify that when cmd is "node", it attempts to use the node runtime
-        assert_eq!(agent.extension_id.as_ref(), "node-extension");
-        assert_eq!(agent.agent_id.as_ref(), "node-agent");
-
-        let target = agent.targets.get("darwin-aarch64").unwrap();
-        assert_eq!(target.cmd, "node");
-        assert_eq!(target.args, vec!["index.js"]);
-    }
-
-    #[gpui::test]
-    async fn test_commands_run_in_extraction_directory(cx: &mut TestAppContext) {
-        let fs = fs::FakeFs::new(cx.background_executor.clone());
-        let http_client = http_client::FakeHttpClient::with_404_response();
-        let node_runtime = NodeRuntime::unavailable();
-        let worktree_store = cx.new(|_| WorktreeStore::local(false, fs.clone()));
-        let project_environment = cx.new(|cx| {
-            crate::ProjectEnvironment::new(None, worktree_store.downgrade(), None, false, cx)
-        });
-
-        let agent = LocalExtensionArchiveAgent {
-            fs: fs.clone(),
-            http_client,
-            node_runtime,
-            project_environment,
-            extension_id: Arc::from("test-ext"),
-            agent_id: Arc::from("test-agent"),
-            targets: {
-                let mut map = HashMap::default();
-                map.insert(
-                    "darwin-aarch64".to_string(),
-                    extension::TargetConfig {
-                        archive: "https://example.com/test.zip".into(),
-                        cmd: "node".into(),
-                        args: vec![
-                            "server.js".into(),
-                            "--config".into(),
-                            "./config.json".into(),
-                        ],
-                        sha256: None,
-                        env: Default::default(),
-                    },
-                );
-                map
-            },
-            env: HashMap::default(),
-        };
-
-        // Verify the agent is configured with relative paths in args
-        let target = agent.targets.get("darwin-aarch64").unwrap();
-        assert_eq!(target.args[0], "server.js");
-        assert_eq!(target.args[2], "./config.json");
-        // These relative paths will resolve relative to the extraction directory
-        // when the command is executed
-    }
-
-    #[test]
-    fn test_tilde_expansion_in_settings() {
-        let settings = settings::BuiltinAgentServerSettings {
-            path: Some(PathBuf::from("~/bin/agent")),
-            args: Some(vec!["--flag".into()]),
-            env: None,
-            ignore_system_version: None,
-            default_mode: None,
-            default_model: None,
-            favorite_models: vec![],
-            default_config_options: Default::default(),
-            favorite_config_option_values: Default::default(),
-        };
-
-        let BuiltinAgentServerSettings { path, .. } = settings.into();
-
-        let path = path.unwrap();
-        assert!(
-            !path.to_string_lossy().starts_with("~"),
-            "Tilde should be expanded for builtin agent path"
-        );
-
-        let settings = settings::CustomAgentServerSettings::Custom {
-            path: PathBuf::from("~/custom/agent"),
-            args: vec!["serve".into()],
-            env: Default::default(),
-            default_mode: None,
-            default_model: None,
-            favorite_models: vec![],
-            default_config_options: Default::default(),
-            favorite_config_option_values: Default::default(),
-        };
-
-        let converted: CustomAgentServerSettings = settings.into();
-        let CustomAgentServerSettings::Custom {
-            command: AgentServerCommand { path, .. },
-            ..
-        } = converted
-        else {
-            panic!("Expected Custom variant");
-        };
-
-        assert!(
-            !path.to_string_lossy().starts_with("~"),
-            "Tilde should be expanded for custom agent path"
-        );
-    }
-}

crates/project/src/color_extractor.rs 🔗

@@ -134,162 +134,3 @@ fn from_hsl(h: &str, s: &str, l: &str, a: Option<&str>) -> Option<Hsla> {
 
     Some(Hsla { h, s, l, a })
 }
-
-#[cfg(test)]
-mod tests {
-    use super::*;
-    use gpui::rgba;
-    use lsp::{CompletionItem, CompletionItemKind};
-
-    pub const COLOR_TABLE: &[(&str, Option<u32>)] = &[
-        // -- Invalid --
-        // Invalid hex
-        ("f0f", None),
-        ("#fof", None),
-        // Extra field
-        ("rgb(255, 0, 0, 0.0)", None),
-        ("hsl(120, 0, 0, 0.0)", None),
-        // Missing field
-        ("rgba(255, 0, 0)", None),
-        ("hsla(120, 0, 0)", None),
-        // No decimal after zero
-        ("rgba(255, 0, 0, 0)", None),
-        ("hsla(120, 0, 0, 0)", None),
-        // Decimal after one
-        ("rgba(255, 0, 0, 1.0)", None),
-        ("hsla(120, 0, 0, 1.0)", None),
-        // HEX (sRGB)
-        ("#f0f", Some(0xFF00FFFF)),
-        ("#ff0000", Some(0xFF0000FF)),
-        // RGB / RGBA (sRGB)
-        ("rgb(255, 0, 0)", Some(0xFF0000FF)),
-        ("rgba(255, 0, 0, 0.4)", Some(0xFF000066)),
-        ("rgba(255, 0, 0, 1)", Some(0xFF0000FF)),
-        ("rgb(20%, 0%, 0%)", Some(0x330000FF)),
-        ("rgba(20%, 0%, 0%, 1)", Some(0x330000FF)),
-        ("rgb(0%, 20%, 0%)", Some(0x003300FF)),
-        ("rgba(0%, 20%, 0%, 1)", Some(0x003300FF)),
-        ("rgb(0%, 0%, 20%)", Some(0x000033FF)),
-        ("rgba(0%, 0%, 20%, 1)", Some(0x000033FF)),
-        // HSL / HSLA (sRGB)
-        ("hsl(0, 100%, 50%)", Some(0xFF0000FF)),
-        ("hsl(120, 100%, 50%)", Some(0x00FF00FF)),
-        ("hsla(0, 100%, 50%, 0.0)", Some(0xFF000000)),
-        ("hsla(0, 100%, 50%, 0.4)", Some(0xFF000066)),
-        ("hsla(0, 100%, 50%, 1)", Some(0xFF0000FF)),
-        ("hsla(120, 100%, 50%, 0.0)", Some(0x00FF0000)),
-        ("hsla(120, 100%, 50%, 0.4)", Some(0x00FF0066)),
-        ("hsla(120, 100%, 50%, 1)", Some(0x00FF00FF)),
-    ];
-
-    #[test]
-    fn can_extract_from_label() {
-        for (color_str, color_val) in COLOR_TABLE.iter() {
-            let color = extract_color(&CompletionItem {
-                kind: Some(CompletionItemKind::COLOR),
-                label: color_str.to_string(),
-                detail: None,
-                documentation: None,
-                ..Default::default()
-            });
-
-            assert_eq!(color, color_val.map(|v| Hsla::from(rgba(v))));
-        }
-    }
-
-    #[test]
-    fn only_whole_label_matches_are_allowed() {
-        for (color_str, _) in COLOR_TABLE.iter() {
-            let color = extract_color(&CompletionItem {
-                kind: Some(CompletionItemKind::COLOR),
-                label: format!("{} foo", color_str).to_string(),
-                detail: None,
-                documentation: None,
-                ..Default::default()
-            });
-
-            assert_eq!(color, None);
-        }
-    }
-
-    #[test]
-    fn can_extract_from_detail() {
-        for (color_str, color_val) in COLOR_TABLE.iter() {
-            let color = extract_color(&CompletionItem {
-                kind: Some(CompletionItemKind::COLOR),
-                label: "".to_string(),
-                detail: Some(color_str.to_string()),
-                documentation: None,
-                ..Default::default()
-            });
-
-            assert_eq!(color, color_val.map(|v| Hsla::from(rgba(v))));
-        }
-    }
-
-    #[test]
-    fn only_whole_detail_matches_are_allowed() {
-        for (color_str, _) in COLOR_TABLE.iter() {
-            let color = extract_color(&CompletionItem {
-                kind: Some(CompletionItemKind::COLOR),
-                label: "".to_string(),
-                detail: Some(format!("{} foo", color_str).to_string()),
-                documentation: None,
-                ..Default::default()
-            });
-
-            assert_eq!(color, None);
-        }
-    }
-
-    #[test]
-    fn can_extract_from_documentation_start() {
-        for (color_str, color_val) in COLOR_TABLE.iter() {
-            let color = extract_color(&CompletionItem {
-                kind: Some(CompletionItemKind::COLOR),
-                label: "".to_string(),
-                detail: None,
-                documentation: Some(Documentation::String(
-                    format!("{} foo", color_str).to_string(),
-                )),
-                ..Default::default()
-            });
-
-            assert_eq!(color, color_val.map(|v| Hsla::from(rgba(v))));
-        }
-    }
-
-    #[test]
-    fn can_extract_from_documentation_end() {
-        for (color_str, color_val) in COLOR_TABLE.iter() {
-            let color = extract_color(&CompletionItem {
-                kind: Some(CompletionItemKind::COLOR),
-                label: "".to_string(),
-                detail: None,
-                documentation: Some(Documentation::String(
-                    format!("foo {}", color_str).to_string(),
-                )),
-                ..Default::default()
-            });
-
-            assert_eq!(color, color_val.map(|v| Hsla::from(rgba(v))));
-        }
-    }
-
-    #[test]
-    fn cannot_extract_from_documentation_middle() {
-        for (color_str, _) in COLOR_TABLE.iter() {
-            let color = extract_color(&CompletionItem {
-                kind: Some(CompletionItemKind::COLOR),
-                label: "".to_string(),
-                detail: None,
-                documentation: Some(Documentation::String(
-                    format!("foo {} foo", color_str).to_string(),
-                )),
-                ..Default::default()
-            });
-
-            assert_eq!(color, None);
-        }
-    }
-}

crates/project/src/context_server_store.rs 🔗

@@ -289,7 +289,7 @@ impl ContextServerStore {
             .collect()
     }
 
-    #[cfg(any(test, feature = "test-support"))]
+    #[cfg(feature = "test-support")]
     pub fn test(
         registry: Entity<ContextServerDescriptorRegistry>,
         worktree_store: Entity<WorktreeStore>,
@@ -310,7 +310,7 @@ impl ContextServerStore {
         )
     }
 
-    #[cfg(any(test, feature = "test-support"))]
+    #[cfg(feature = "test-support")]
     pub fn test_maintain_server_loop(
         context_server_factory: Option<ContextServerFactory>,
         registry: Entity<ContextServerDescriptorRegistry>,
@@ -332,17 +332,17 @@ impl ContextServerStore {
         )
     }
 
-    #[cfg(any(test, feature = "test-support"))]
+    #[cfg(feature = "test-support")]
     pub fn set_context_server_factory(&mut self, factory: ContextServerFactory) {
         self.context_server_factory = Some(factory);
     }
 
-    #[cfg(any(test, feature = "test-support"))]
+    #[cfg(feature = "test-support")]
     pub fn registry(&self) -> &Entity<ContextServerDescriptorRegistry> {
         &self.registry
     }
 
-    #[cfg(any(test, feature = "test-support"))]
+    #[cfg(feature = "test-support")]
     pub fn test_start_server(&mut self, server: Arc<ContextServer>, cx: &mut Context<Self>) {
         let configuration = Arc::new(ContextServerConfiguration::Custom {
             command: ContextServerCommand {
@@ -598,7 +598,7 @@ impl ContextServerStore {
         Ok(())
     }
 
-    async fn create_context_server(
+    pub async fn create_context_server(
         this: WeakEntity<Self>,
         id: ContextServerId,
         configuration: Arc<ContextServerConfiguration>,
@@ -926,886 +926,3 @@ impl ContextServerStore {
         Ok(())
     }
 }
-
-#[cfg(test)]
-mod tests {
-    use super::*;
-    use crate::{
-        FakeFs, Project, context_server_store::registry::ContextServerDescriptor,
-        project_settings::ProjectSettings,
-    };
-    use context_server::test::create_fake_transport;
-    use gpui::{AppContext, TestAppContext, UpdateGlobal as _};
-    use http_client::{FakeHttpClient, Response};
-    use serde_json::json;
-    use std::{cell::RefCell, path::PathBuf, rc::Rc};
-    use util::path;
-
-    #[gpui::test]
-    async fn test_context_server_status(cx: &mut TestAppContext) {
-        const SERVER_1_ID: &str = "mcp-1";
-        const SERVER_2_ID: &str = "mcp-2";
-
-        let (_fs, project) = setup_context_server_test(cx, json!({"code.rs": ""}), vec![]).await;
-
-        let registry = cx.new(|_| ContextServerDescriptorRegistry::new());
-        let store = cx.new(|cx| {
-            ContextServerStore::test(
-                registry.clone(),
-                project.read(cx).worktree_store(),
-                Some(project.downgrade()),
-                cx,
-            )
-        });
-
-        let server_1_id = ContextServerId(SERVER_1_ID.into());
-        let server_2_id = ContextServerId(SERVER_2_ID.into());
-
-        let server_1 = Arc::new(ContextServer::new(
-            server_1_id.clone(),
-            Arc::new(create_fake_transport(SERVER_1_ID, cx.executor())),
-        ));
-        let server_2 = Arc::new(ContextServer::new(
-            server_2_id.clone(),
-            Arc::new(create_fake_transport(SERVER_2_ID, cx.executor())),
-        ));
-
-        store.update(cx, |store, cx| store.test_start_server(server_1, cx));
-
-        cx.run_until_parked();
-
-        cx.update(|cx| {
-            assert_eq!(
-                store.read(cx).status_for_server(&server_1_id),
-                Some(ContextServerStatus::Running)
-            );
-            assert_eq!(store.read(cx).status_for_server(&server_2_id), None);
-        });
-
-        store.update(cx, |store, cx| {
-            store.test_start_server(server_2.clone(), cx)
-        });
-
-        cx.run_until_parked();
-
-        cx.update(|cx| {
-            assert_eq!(
-                store.read(cx).status_for_server(&server_1_id),
-                Some(ContextServerStatus::Running)
-            );
-            assert_eq!(
-                store.read(cx).status_for_server(&server_2_id),
-                Some(ContextServerStatus::Running)
-            );
-        });
-
-        store
-            .update(cx, |store, cx| store.stop_server(&server_2_id, cx))
-            .unwrap();
-
-        cx.update(|cx| {
-            assert_eq!(
-                store.read(cx).status_for_server(&server_1_id),
-                Some(ContextServerStatus::Running)
-            );
-            assert_eq!(
-                store.read(cx).status_for_server(&server_2_id),
-                Some(ContextServerStatus::Stopped)
-            );
-        });
-    }
-
-    #[gpui::test]
-    async fn test_context_server_status_events(cx: &mut TestAppContext) {
-        const SERVER_1_ID: &str = "mcp-1";
-        const SERVER_2_ID: &str = "mcp-2";
-
-        let (_fs, project) = setup_context_server_test(cx, json!({"code.rs": ""}), vec![]).await;
-
-        let registry = cx.new(|_| ContextServerDescriptorRegistry::new());
-        let store = cx.new(|cx| {
-            ContextServerStore::test(
-                registry.clone(),
-                project.read(cx).worktree_store(),
-                Some(project.downgrade()),
-                cx,
-            )
-        });
-
-        let server_1_id = ContextServerId(SERVER_1_ID.into());
-        let server_2_id = ContextServerId(SERVER_2_ID.into());
-
-        let server_1 = Arc::new(ContextServer::new(
-            server_1_id.clone(),
-            Arc::new(create_fake_transport(SERVER_1_ID, cx.executor())),
-        ));
-        let server_2 = Arc::new(ContextServer::new(
-            server_2_id.clone(),
-            Arc::new(create_fake_transport(SERVER_2_ID, cx.executor())),
-        ));
-
-        let _server_events = assert_server_events(
-            &store,
-            vec![
-                (server_1_id.clone(), ContextServerStatus::Starting),
-                (server_1_id, ContextServerStatus::Running),
-                (server_2_id.clone(), ContextServerStatus::Starting),
-                (server_2_id.clone(), ContextServerStatus::Running),
-                (server_2_id.clone(), ContextServerStatus::Stopped),
-            ],
-            cx,
-        );
-
-        store.update(cx, |store, cx| store.test_start_server(server_1, cx));
-
-        cx.run_until_parked();
-
-        store.update(cx, |store, cx| {
-            store.test_start_server(server_2.clone(), cx)
-        });
-
-        cx.run_until_parked();
-
-        store
-            .update(cx, |store, cx| store.stop_server(&server_2_id, cx))
-            .unwrap();
-    }
-
-    #[gpui::test(iterations = 25)]
-    async fn test_context_server_concurrent_starts(cx: &mut TestAppContext) {
-        const SERVER_1_ID: &str = "mcp-1";
-
-        let (_fs, project) = setup_context_server_test(cx, json!({"code.rs": ""}), vec![]).await;
-
-        let registry = cx.new(|_| ContextServerDescriptorRegistry::new());
-        let store = cx.new(|cx| {
-            ContextServerStore::test(
-                registry.clone(),
-                project.read(cx).worktree_store(),
-                Some(project.downgrade()),
-                cx,
-            )
-        });
-
-        let server_id = ContextServerId(SERVER_1_ID.into());
-
-        let server_with_same_id_1 = Arc::new(ContextServer::new(
-            server_id.clone(),
-            Arc::new(create_fake_transport(SERVER_1_ID, cx.executor())),
-        ));
-        let server_with_same_id_2 = Arc::new(ContextServer::new(
-            server_id.clone(),
-            Arc::new(create_fake_transport(SERVER_1_ID, cx.executor())),
-        ));
-
-        // If we start another server with the same id, we should report that we stopped the previous one
-        let _server_events = assert_server_events(
-            &store,
-            vec![
-                (server_id.clone(), ContextServerStatus::Starting),
-                (server_id.clone(), ContextServerStatus::Stopped),
-                (server_id.clone(), ContextServerStatus::Starting),
-                (server_id.clone(), ContextServerStatus::Running),
-            ],
-            cx,
-        );
-
-        store.update(cx, |store, cx| {
-            store.test_start_server(server_with_same_id_1.clone(), cx)
-        });
-        store.update(cx, |store, cx| {
-            store.test_start_server(server_with_same_id_2.clone(), cx)
-        });
-
-        cx.run_until_parked();
-
-        cx.update(|cx| {
-            assert_eq!(
-                store.read(cx).status_for_server(&server_id),
-                Some(ContextServerStatus::Running)
-            );
-        });
-    }
-
-    #[gpui::test]
-    async fn test_context_server_maintain_servers_loop(cx: &mut TestAppContext) {
-        const SERVER_1_ID: &str = "mcp-1";
-        const SERVER_2_ID: &str = "mcp-2";
-
-        let server_1_id = ContextServerId(SERVER_1_ID.into());
-        let server_2_id = ContextServerId(SERVER_2_ID.into());
-
-        let fake_descriptor_1 = Arc::new(FakeContextServerDescriptor::new(SERVER_1_ID));
-
-        let (_fs, project) = setup_context_server_test(cx, json!({"code.rs": ""}), vec![]).await;
-
-        let executor = cx.executor();
-        let store = project.read_with(cx, |project, _| project.context_server_store());
-        store.update(cx, |store, cx| {
-            store.set_context_server_factory(Box::new(move |id, _| {
-                Arc::new(ContextServer::new(
-                    id.clone(),
-                    Arc::new(create_fake_transport(id.0.to_string(), executor.clone())),
-                ))
-            }));
-            store.registry().update(cx, |registry, cx| {
-                registry.register_context_server_descriptor(
-                    SERVER_1_ID.into(),
-                    fake_descriptor_1,
-                    cx,
-                );
-            });
-        });
-
-        set_context_server_configuration(
-            vec![(
-                server_1_id.0.clone(),
-                settings::ContextServerSettingsContent::Extension {
-                    enabled: true,
-                    remote: false,
-                    settings: json!({
-                        "somevalue": true
-                    }),
-                },
-            )],
-            cx,
-        );
-
-        // Ensure that mcp-1 starts up
-        {
-            let _server_events = assert_server_events(
-                &store,
-                vec![
-                    (server_1_id.clone(), ContextServerStatus::Starting),
-                    (server_1_id.clone(), ContextServerStatus::Running),
-                ],
-                cx,
-            );
-            cx.run_until_parked();
-        }
-
-        // Ensure that mcp-1 is restarted when the configuration was changed
-        {
-            let _server_events = assert_server_events(
-                &store,
-                vec![
-                    (server_1_id.clone(), ContextServerStatus::Stopped),
-                    (server_1_id.clone(), ContextServerStatus::Starting),
-                    (server_1_id.clone(), ContextServerStatus::Running),
-                ],
-                cx,
-            );
-            set_context_server_configuration(
-                vec![(
-                    server_1_id.0.clone(),
-                    settings::ContextServerSettingsContent::Extension {
-                        enabled: true,
-                        remote: false,
-                        settings: json!({
-                            "somevalue": false
-                        }),
-                    },
-                )],
-                cx,
-            );
-
-            cx.run_until_parked();
-        }
-
-        // Ensure that mcp-1 is not restarted when the configuration was not changed
-        {
-            let _server_events = assert_server_events(&store, vec![], cx);
-            set_context_server_configuration(
-                vec![(
-                    server_1_id.0.clone(),
-                    settings::ContextServerSettingsContent::Extension {
-                        enabled: true,
-                        remote: false,
-                        settings: json!({
-                            "somevalue": false
-                        }),
-                    },
-                )],
-                cx,
-            );
-
-            cx.run_until_parked();
-        }
-
-        // Ensure that mcp-2 is started once it is added to the settings
-        {
-            let _server_events = assert_server_events(
-                &store,
-                vec![
-                    (server_2_id.clone(), ContextServerStatus::Starting),
-                    (server_2_id.clone(), ContextServerStatus::Running),
-                ],
-                cx,
-            );
-            set_context_server_configuration(
-                vec![
-                    (
-                        server_1_id.0.clone(),
-                        settings::ContextServerSettingsContent::Extension {
-                            enabled: true,
-                            remote: false,
-                            settings: json!({
-                                "somevalue": false
-                            }),
-                        },
-                    ),
-                    (
-                        server_2_id.0.clone(),
-                        settings::ContextServerSettingsContent::Stdio {
-                            enabled: true,
-                            remote: false,
-                            command: ContextServerCommand {
-                                path: "somebinary".into(),
-                                args: vec!["arg".to_string()],
-                                env: None,
-                                timeout: None,
-                            },
-                        },
-                    ),
-                ],
-                cx,
-            );
-
-            cx.run_until_parked();
-        }
-
-        // Ensure that mcp-2 is restarted once the args have changed
-        {
-            let _server_events = assert_server_events(
-                &store,
-                vec![
-                    (server_2_id.clone(), ContextServerStatus::Stopped),
-                    (server_2_id.clone(), ContextServerStatus::Starting),
-                    (server_2_id.clone(), ContextServerStatus::Running),
-                ],
-                cx,
-            );
-            set_context_server_configuration(
-                vec![
-                    (
-                        server_1_id.0.clone(),
-                        settings::ContextServerSettingsContent::Extension {
-                            enabled: true,
-                            remote: false,
-                            settings: json!({
-                                "somevalue": false
-                            }),
-                        },
-                    ),
-                    (
-                        server_2_id.0.clone(),
-                        settings::ContextServerSettingsContent::Stdio {
-                            enabled: true,
-                            remote: false,
-                            command: ContextServerCommand {
-                                path: "somebinary".into(),
-                                args: vec!["anotherArg".to_string()],
-                                env: None,
-                                timeout: None,
-                            },
-                        },
-                    ),
-                ],
-                cx,
-            );
-
-            cx.run_until_parked();
-        }
-
-        // Ensure that mcp-2 is removed once it is removed from the settings
-        {
-            let _server_events = assert_server_events(
-                &store,
-                vec![(server_2_id.clone(), ContextServerStatus::Stopped)],
-                cx,
-            );
-            set_context_server_configuration(
-                vec![(
-                    server_1_id.0.clone(),
-                    settings::ContextServerSettingsContent::Extension {
-                        enabled: true,
-                        remote: false,
-                        settings: json!({
-                            "somevalue": false
-                        }),
-                    },
-                )],
-                cx,
-            );
-
-            cx.run_until_parked();
-
-            cx.update(|cx| {
-                assert_eq!(store.read(cx).status_for_server(&server_2_id), None);
-            });
-        }
-
-        // Ensure that nothing happens if the settings do not change
-        {
-            let _server_events = assert_server_events(&store, vec![], cx);
-            set_context_server_configuration(
-                vec![(
-                    server_1_id.0.clone(),
-                    settings::ContextServerSettingsContent::Extension {
-                        enabled: true,
-                        remote: false,
-                        settings: json!({
-                            "somevalue": false
-                        }),
-                    },
-                )],
-                cx,
-            );
-
-            cx.run_until_parked();
-
-            cx.update(|cx| {
-                assert_eq!(
-                    store.read(cx).status_for_server(&server_1_id),
-                    Some(ContextServerStatus::Running)
-                );
-                assert_eq!(store.read(cx).status_for_server(&server_2_id), None);
-            });
-        }
-    }
-
-    #[gpui::test]
-    async fn test_context_server_enabled_disabled(cx: &mut TestAppContext) {
-        const SERVER_1_ID: &str = "mcp-1";
-
-        let server_1_id = ContextServerId(SERVER_1_ID.into());
-
-        let (_fs, project) = setup_context_server_test(cx, json!({"code.rs": ""}), vec![]).await;
-
-        let executor = cx.executor();
-        let store = project.read_with(cx, |project, _| project.context_server_store());
-        store.update(cx, |store, _| {
-            store.set_context_server_factory(Box::new(move |id, _| {
-                Arc::new(ContextServer::new(
-                    id.clone(),
-                    Arc::new(create_fake_transport(id.0.to_string(), executor.clone())),
-                ))
-            }));
-        });
-
-        set_context_server_configuration(
-            vec![(
-                server_1_id.0.clone(),
-                settings::ContextServerSettingsContent::Stdio {
-                    enabled: true,
-                    remote: false,
-                    command: ContextServerCommand {
-                        path: "somebinary".into(),
-                        args: vec!["arg".to_string()],
-                        env: None,
-                        timeout: None,
-                    },
-                },
-            )],
-            cx,
-        );
-
-        // Ensure that mcp-1 starts up
-        {
-            let _server_events = assert_server_events(
-                &store,
-                vec![
-                    (server_1_id.clone(), ContextServerStatus::Starting),
-                    (server_1_id.clone(), ContextServerStatus::Running),
-                ],
-                cx,
-            );
-            cx.run_until_parked();
-        }
-
-        // Ensure that mcp-1 is stopped once it is disabled.
-        {
-            let _server_events = assert_server_events(
-                &store,
-                vec![(server_1_id.clone(), ContextServerStatus::Stopped)],
-                cx,
-            );
-            set_context_server_configuration(
-                vec![(
-                    server_1_id.0.clone(),
-                    settings::ContextServerSettingsContent::Stdio {
-                        enabled: false,
-                        remote: false,
-                        command: ContextServerCommand {
-                            path: "somebinary".into(),
-                            args: vec!["arg".to_string()],
-                            env: None,
-                            timeout: None,
-                        },
-                    },
-                )],
-                cx,
-            );
-
-            cx.run_until_parked();
-        }
-
-        // Ensure that mcp-1 is started once it is enabled again.
-        {
-            let _server_events = assert_server_events(
-                &store,
-                vec![
-                    (server_1_id.clone(), ContextServerStatus::Starting),
-                    (server_1_id.clone(), ContextServerStatus::Running),
-                ],
-                cx,
-            );
-            set_context_server_configuration(
-                vec![(
-                    server_1_id.0.clone(),
-                    settings::ContextServerSettingsContent::Stdio {
-                        enabled: true,
-                        remote: false,
-                        command: ContextServerCommand {
-                            path: "somebinary".into(),
-                            args: vec!["arg".to_string()],
-                            timeout: None,
-                            env: None,
-                        },
-                    },
-                )],
-                cx,
-            );
-
-            cx.run_until_parked();
-        }
-    }
-
-    fn set_context_server_configuration(
-        context_servers: Vec<(Arc<str>, settings::ContextServerSettingsContent)>,
-        cx: &mut TestAppContext,
-    ) {
-        cx.update(|cx| {
-            SettingsStore::update_global(cx, |store, cx| {
-                store.update_user_settings(cx, |content| {
-                    content.project.context_servers.clear();
-                    for (id, config) in context_servers {
-                        content.project.context_servers.insert(id, config);
-                    }
-                });
-            })
-        });
-    }
-
-    #[gpui::test]
-    async fn test_remote_context_server(cx: &mut TestAppContext) {
-        const SERVER_ID: &str = "remote-server";
-        let server_id = ContextServerId(SERVER_ID.into());
-        let server_url = "http://example.com/api";
-
-        let client = FakeHttpClient::create(|_| async move {
-            use http_client::AsyncBody;
-
-            let response = Response::builder()
-                .status(200)
-                .header("Content-Type", "application/json")
-                .body(AsyncBody::from(
-                    serde_json::to_string(&json!({
-                        "jsonrpc": "2.0",
-                        "id": 0,
-                        "result": {
-                            "protocolVersion": "2024-11-05",
-                            "capabilities": {},
-                            "serverInfo": {
-                                "name": "test-server",
-                                "version": "1.0.0"
-                            }
-                        }
-                    }))
-                    .unwrap(),
-                ))
-                .unwrap();
-            Ok(response)
-        });
-        cx.update(|cx| cx.set_http_client(client));
-
-        let (_fs, project) = setup_context_server_test(cx, json!({ "code.rs": "" }), vec![]).await;
-
-        let store = project.read_with(cx, |project, _| project.context_server_store());
-
-        set_context_server_configuration(
-            vec![(
-                server_id.0.clone(),
-                settings::ContextServerSettingsContent::Http {
-                    enabled: true,
-                    url: server_url.to_string(),
-                    headers: Default::default(),
-                    timeout: None,
-                },
-            )],
-            cx,
-        );
-
-        let _server_events = assert_server_events(
-            &store,
-            vec![
-                (server_id.clone(), ContextServerStatus::Starting),
-                (server_id.clone(), ContextServerStatus::Running),
-            ],
-            cx,
-        );
-        cx.run_until_parked();
-    }
-
-    struct ServerEvents {
-        received_event_count: Rc<RefCell<usize>>,
-        expected_event_count: usize,
-        _subscription: Subscription,
-    }
-
-    impl Drop for ServerEvents {
-        fn drop(&mut self) {
-            let actual_event_count = *self.received_event_count.borrow();
-            assert_eq!(
-                actual_event_count, self.expected_event_count,
-                "
-                Expected to receive {} context server store events, but received {} events",
-                self.expected_event_count, actual_event_count
-            );
-        }
-    }
-
-    #[gpui::test]
-    async fn test_context_server_global_timeout(cx: &mut TestAppContext) {
-        cx.update(|cx| {
-            let settings_store = SettingsStore::test(cx);
-            cx.set_global(settings_store);
-            SettingsStore::update_global(cx, |store, cx| {
-                store
-                    .set_user_settings(r#"{"context_server_timeout": 90}"#, cx)
-                    .expect("Failed to set test user settings");
-            });
-        });
-
-        let (_fs, project) = setup_context_server_test(cx, json!({"code.rs": ""}), vec![]).await;
-
-        let registry = cx.new(|_| ContextServerDescriptorRegistry::new());
-        let store = cx.new(|cx| {
-            ContextServerStore::test(
-                registry.clone(),
-                project.read(cx).worktree_store(),
-                Some(project.downgrade()),
-                cx,
-            )
-        });
-
-        let mut async_cx = cx.to_async();
-        let result = ContextServerStore::create_context_server(
-            store.downgrade(),
-            ContextServerId("test-server".into()),
-            Arc::new(ContextServerConfiguration::Http {
-                url: url::Url::parse("http://localhost:8080").expect("Failed to parse test URL"),
-                headers: Default::default(),
-                timeout: None,
-            }),
-            &mut async_cx,
-        )
-        .await;
-
-        assert!(
-            result.is_ok(),
-            "Server should be created successfully with global timeout"
-        );
-    }
-
-    #[gpui::test]
-    async fn test_context_server_per_server_timeout_override(cx: &mut TestAppContext) {
-        const SERVER_ID: &str = "test-server";
-
-        cx.update(|cx| {
-            let settings_store = SettingsStore::test(cx);
-            cx.set_global(settings_store);
-            SettingsStore::update_global(cx, |store, cx| {
-                store
-                    .set_user_settings(r#"{"context_server_timeout": 60}"#, cx)
-                    .expect("Failed to set test user settings");
-            });
-        });
-
-        let (_fs, project) = setup_context_server_test(
-            cx,
-            json!({"code.rs": ""}),
-            vec![(
-                SERVER_ID.into(),
-                ContextServerSettings::Http {
-                    enabled: true,
-                    url: "http://localhost:8080".to_string(),
-                    headers: Default::default(),
-                    timeout: Some(120),
-                },
-            )],
-        )
-        .await;
-
-        let registry = cx.new(|_| ContextServerDescriptorRegistry::new());
-        let store = cx.new(|cx| {
-            ContextServerStore::test(
-                registry.clone(),
-                project.read(cx).worktree_store(),
-                Some(project.downgrade()),
-                cx,
-            )
-        });
-
-        let mut async_cx = cx.to_async();
-        let result = ContextServerStore::create_context_server(
-            store.downgrade(),
-            ContextServerId("test-server".into()),
-            Arc::new(ContextServerConfiguration::Http {
-                url: url::Url::parse("http://localhost:8080").expect("Failed to parse test URL"),
-                headers: Default::default(),
-                timeout: Some(120),
-            }),
-            &mut async_cx,
-        )
-        .await;
-
-        assert!(
-            result.is_ok(),
-            "Server should be created successfully with per-server timeout override"
-        );
-    }
-
-    #[gpui::test]
-    async fn test_context_server_stdio_timeout(cx: &mut TestAppContext) {
-        let (_fs, project) = setup_context_server_test(cx, json!({"code.rs": ""}), vec![]).await;
-
-        let registry = cx.new(|_| ContextServerDescriptorRegistry::new());
-        let store = cx.new(|cx| {
-            ContextServerStore::test(
-                registry.clone(),
-                project.read(cx).worktree_store(),
-                Some(project.downgrade()),
-                cx,
-            )
-        });
-
-        let mut async_cx = cx.to_async();
-        let result = ContextServerStore::create_context_server(
-            store.downgrade(),
-            ContextServerId("stdio-server".into()),
-            Arc::new(ContextServerConfiguration::Custom {
-                command: ContextServerCommand {
-                    path: "/usr/bin/node".into(),
-                    args: vec!["server.js".into()],
-                    env: None,
-                    timeout: Some(180000),
-                },
-                remote: false,
-            }),
-            &mut async_cx,
-        )
-        .await;
-
-        assert!(
-            result.is_ok(),
-            "Stdio server should be created successfully with timeout"
-        );
-    }
-
-    fn assert_server_events(
-        store: &Entity<ContextServerStore>,
-        expected_events: Vec<(ContextServerId, ContextServerStatus)>,
-        cx: &mut TestAppContext,
-    ) -> ServerEvents {
-        cx.update(|cx| {
-            let mut ix = 0;
-            let received_event_count = Rc::new(RefCell::new(0));
-            let expected_event_count = expected_events.len();
-            let subscription = cx.subscribe(store, {
-                let received_event_count = received_event_count.clone();
-                move |_, event, _| match event {
-                    Event::ServerStatusChanged {
-                        server_id: actual_server_id,
-                        status: actual_status,
-                    } => {
-                        let (expected_server_id, expected_status) = &expected_events[ix];
-
-                        assert_eq!(
-                            actual_server_id, expected_server_id,
-                            "Expected different server id at index {}",
-                            ix
-                        );
-                        assert_eq!(
-                            actual_status, expected_status,
-                            "Expected different status at index {}",
-                            ix
-                        );
-                        ix += 1;
-                        *received_event_count.borrow_mut() += 1;
-                    }
-                }
-            });
-            ServerEvents {
-                expected_event_count,
-                received_event_count,
-                _subscription: subscription,
-            }
-        })
-    }
-
-    async fn setup_context_server_test(
-        cx: &mut TestAppContext,
-        files: serde_json::Value,
-        context_server_configurations: Vec<(Arc<str>, ContextServerSettings)>,
-    ) -> (Arc<FakeFs>, Entity<Project>) {
-        cx.update(|cx| {
-            let settings_store = SettingsStore::test(cx);
-            cx.set_global(settings_store);
-            let mut settings = ProjectSettings::get_global(cx).clone();
-            for (id, config) in context_server_configurations {
-                settings.context_servers.insert(id, config);
-            }
-            ProjectSettings::override_global(settings, cx);
-        });
-
-        let fs = FakeFs::new(cx.executor());
-        fs.insert_tree(path!("/test"), files).await;
-        let project = Project::test(fs.clone(), [path!("/test").as_ref()], cx).await;
-
-        (fs, project)
-    }
-
-    struct FakeContextServerDescriptor {
-        path: PathBuf,
-    }
-
-    impl FakeContextServerDescriptor {
-        fn new(path: impl Into<PathBuf>) -> Self {
-            Self { path: path.into() }
-        }
-    }
-
-    impl ContextServerDescriptor for FakeContextServerDescriptor {
-        fn command(
-            &self,
-            _worktree_store: Entity<WorktreeStore>,
-            _cx: &AsyncApp,
-        ) -> Task<Result<ContextServerCommand>> {
-            Task::ready(Ok(ContextServerCommand {
-                path: self.path.clone(),
-                args: vec!["arg1".to_string(), "arg2".to_string()],
-                env: None,
-                timeout: None,
-            }))
-        }
-
-        fn configuration(
-            &self,
-            _worktree_store: Entity<WorktreeStore>,
-            _cx: &AsyncApp,
-        ) -> Task<Result<Option<::extension::ContextServerConfiguration>>> {
-            Task::ready(Ok(None))
-        }
-    }
-}

crates/project/src/debugger.rs 🔗

@@ -15,7 +15,7 @@ pub mod breakpoint_store;
 pub mod dap_command;
 pub mod dap_store;
 pub mod locators;
-mod memory;
+pub mod memory;
 pub mod session;
 
 #[cfg(any(feature = "test-support", test))]

crates/project/src/debugger/locators/go.rs 🔗

@@ -6,19 +6,19 @@ use gpui::{BackgroundExecutor, SharedString};
 use serde::{Deserialize, Serialize};
 use task::{DebugScenario, SpawnInTerminal, TaskTemplate};
 
-pub(crate) struct GoLocator;
+pub struct GoLocator;
 
 #[derive(Serialize, Deserialize, Debug, PartialEq, Eq)]
 #[serde(rename_all = "camelCase")]
-struct DelveLaunchRequest {
-    request: String,
-    mode: String,
-    program: String,
+pub struct DelveLaunchRequest {
+    pub request: String,
+    pub mode: String,
+    pub program: String,
     #[serde(skip_serializing_if = "Option::is_none")]
-    cwd: Option<String>,
-    args: Vec<String>,
-    build_flags: Vec<String>,
-    env: HashMap<String, String>,
+    pub cwd: Option<String>,
+    pub args: Vec<String>,
+    pub build_flags: Vec<String>,
+    pub env: HashMap<String, String>,
 }
 
 fn is_debug_flag(arg: &str) -> Option<bool> {
@@ -245,201 +245,3 @@ impl DapLocator for GoLocator {
         unreachable!()
     }
 }
-
-#[cfg(test)]
-mod tests {
-    use super::*;
-    use gpui::TestAppContext;
-    use task::{HideStrategy, RevealStrategy, RevealTarget, Shell, TaskTemplate};
-
-    #[gpui::test]
-    async fn test_create_scenario_for_go_build(_: &mut TestAppContext) {
-        let locator = GoLocator;
-        let task = TaskTemplate {
-            label: "go build".into(),
-            command: "go".into(),
-            args: vec!["build".into(), ".".into()],
-            env: Default::default(),
-            cwd: Some("${ZED_WORKTREE_ROOT}".into()),
-            use_new_terminal: false,
-            allow_concurrent_runs: false,
-            reveal: RevealStrategy::Always,
-            reveal_target: RevealTarget::Dock,
-            hide: HideStrategy::Never,
-            shell: Shell::System,
-            tags: vec![],
-            show_summary: true,
-            show_command: true,
-        };
-
-        let scenario = locator
-            .create_scenario(&task, "test label", &DebugAdapterName("Delve".into()))
-            .await;
-
-        assert!(scenario.is_none());
-    }
-
-    #[gpui::test]
-    async fn test_skip_non_go_commands_with_non_delve_adapter(_: &mut TestAppContext) {
-        let locator = GoLocator;
-        let task = TaskTemplate {
-            label: "cargo build".into(),
-            command: "cargo".into(),
-            args: vec!["build".into()],
-            env: Default::default(),
-            cwd: Some("${ZED_WORKTREE_ROOT}".into()),
-            use_new_terminal: false,
-            allow_concurrent_runs: false,
-            reveal: RevealStrategy::Always,
-            reveal_target: RevealTarget::Dock,
-            hide: HideStrategy::Never,
-            shell: Shell::System,
-            tags: vec![],
-            show_summary: true,
-            show_command: true,
-        };
-
-        let scenario = locator
-            .create_scenario(
-                &task,
-                "test label",
-                &DebugAdapterName("SomeOtherAdapter".into()),
-            )
-            .await;
-        assert!(scenario.is_none());
-
-        let scenario = locator
-            .create_scenario(&task, "test label", &DebugAdapterName("Delve".into()))
-            .await;
-        assert!(scenario.is_none());
-    }
-    #[gpui::test]
-    async fn test_go_locator_run(_: &mut TestAppContext) {
-        let locator = GoLocator;
-        let delve = DebugAdapterName("Delve".into());
-
-        let task = TaskTemplate {
-            label: "go run with flags".into(),
-            command: "go".into(),
-            args: vec![
-                "run".to_string(),
-                "-race".to_string(),
-                "-ldflags".to_string(),
-                "-X main.version=1.0".to_string(),
-                "./cmd/myapp".to_string(),
-                "--config".to_string(),
-                "production.yaml".to_string(),
-                "--verbose".to_string(),
-            ],
-            env: {
-                let mut env = HashMap::default();
-                env.insert("GO_ENV".to_string(), "production".to_string());
-                env
-            },
-            cwd: Some("/project/root".into()),
-            ..Default::default()
-        };
-
-        let scenario = locator
-            .create_scenario(&task, "test run label", &delve)
-            .await
-            .unwrap();
-
-        let config: DelveLaunchRequest = serde_json::from_value(scenario.config).unwrap();
-
-        assert_eq!(
-            config,
-            DelveLaunchRequest {
-                request: "launch".to_string(),
-                mode: "debug".to_string(),
-                program: "./cmd/myapp".to_string(),
-                build_flags: vec![
-                    "-race".to_string(),
-                    "-ldflags".to_string(),
-                    "-X main.version=1.0".to_string()
-                ],
-                args: vec![
-                    "--config".to_string(),
-                    "production.yaml".to_string(),
-                    "--verbose".to_string(),
-                ],
-                env: {
-                    let mut env = HashMap::default();
-                    env.insert("GO_ENV".to_string(), "production".to_string());
-                    env
-                },
-                cwd: Some("/project/root".to_string()),
-            }
-        );
-    }
-
-    #[gpui::test]
-    async fn test_go_locator_test(_: &mut TestAppContext) {
-        let locator = GoLocator;
-        let delve = DebugAdapterName("Delve".into());
-
-        // Test with tags and run flag
-        let task_with_tags = TaskTemplate {
-            label: "test".into(),
-            command: "go".into(),
-            args: vec![
-                "test".to_string(),
-                "-tags".to_string(),
-                "integration,unit".to_string(),
-                "-run".to_string(),
-                "Foo".to_string(),
-                ".".to_string(),
-            ],
-            ..Default::default()
-        };
-        let result = locator
-            .create_scenario(&task_with_tags, "", &delve)
-            .await
-            .unwrap();
-
-        let config: DelveLaunchRequest = serde_json::from_value(result.config).unwrap();
-
-        assert_eq!(
-            config,
-            DelveLaunchRequest {
-                request: "launch".to_string(),
-                mode: "test".to_string(),
-                program: ".".to_string(),
-                build_flags: vec!["-tags".to_string(), "integration,unit".to_string(),],
-                args: vec![
-                    "-test.run".to_string(),
-                    "Foo".to_string(),
-                    "-test.v".to_string()
-                ],
-                env: HashMap::default(),
-                cwd: None,
-            }
-        );
-    }
-
-    #[gpui::test]
-    async fn test_skip_unsupported_go_commands(_: &mut TestAppContext) {
-        let locator = GoLocator;
-        let task = TaskTemplate {
-            label: "go clean".into(),
-            command: "go".into(),
-            args: vec!["clean".into()],
-            env: Default::default(),
-            cwd: Some("${ZED_WORKTREE_ROOT}".into()),
-            use_new_terminal: false,
-            allow_concurrent_runs: false,
-            reveal: RevealStrategy::Always,
-            reveal_target: RevealTarget::Dock,
-            hide: HideStrategy::Never,
-            shell: Shell::System,
-            tags: vec![],
-            show_summary: true,
-            show_command: true,
-        };
-
-        let scenario = locator
-            .create_scenario(&task, "test label", &DebugAdapterName("Delve".into()))
-            .await;
-        assert!(scenario.is_none());
-    }
-}

crates/project/src/debugger/locators/python.rs 🔗

@@ -7,7 +7,7 @@ use gpui::{BackgroundExecutor, SharedString};
 
 use task::{DebugScenario, SpawnInTerminal, TaskTemplate, VariableName};
 
-pub(crate) struct PythonLocator;
+pub struct PythonLocator;
 
 #[async_trait]
 impl DapLocator for PythonLocator {
@@ -94,53 +94,3 @@ impl DapLocator for PythonLocator {
         bail!("Python locator should not require DapLocator::run to be ran");
     }
 }
-
-#[cfg(test)]
-mod test {
-    use serde_json::json;
-
-    use super::*;
-
-    #[gpui::test]
-    async fn test_python_locator() {
-        let adapter = DebugAdapterName("Debugpy".into());
-        let build_task = TaskTemplate {
-            label: "run module '$ZED_FILE'".into(),
-            command: "$ZED_CUSTOM_PYTHON_ACTIVE_ZED_TOOLCHAIN".into(),
-            args: vec!["-m".into(), "$ZED_CUSTOM_PYTHON_MODULE_NAME".into()],
-            env: Default::default(),
-            cwd: Some("$ZED_WORKTREE_ROOT".into()),
-            use_new_terminal: false,
-            allow_concurrent_runs: false,
-            reveal: task::RevealStrategy::Always,
-            reveal_target: task::RevealTarget::Dock,
-            hide: task::HideStrategy::Never,
-            tags: vec!["python-module-main-method".into()],
-            shell: task::Shell::System,
-            show_summary: false,
-            show_command: false,
-        };
-
-        let expected_scenario = DebugScenario {
-            adapter: "Debugpy".into(),
-            label: "run module 'main.py'".into(),
-            build: None,
-            config: json!({
-                "request": "launch",
-                "python": "$ZED_CUSTOM_PYTHON_ACTIVE_ZED_TOOLCHAIN",
-                "args": [],
-                "cwd": "$ZED_WORKTREE_ROOT",
-                "module": "$ZED_CUSTOM_PYTHON_MODULE_NAME",
-            }),
-            tcp_connection: None,
-        };
-
-        assert_eq!(
-            PythonLocator
-                .create_scenario(&build_task, "run module 'main.py'", &adapter)
-                .await
-                .expect("Failed to create a scenario"),
-            expected_scenario
-        );
-    }
-}

crates/project/src/debugger/memory.rs 🔗

@@ -23,15 +23,15 @@ const PAGE_SIZE: u64 = 4096;
 /// Represents the contents of a single page. We special-case unmapped pages to be allocation-free,
 /// since they're going to make up the majority of the memory in a program space (even though the user might not even get to see them - ever).
 #[derive(Clone, Debug)]
-pub(super) enum PageContents {
+pub enum PageContents {
     /// Whole page is unreadable.
     Unmapped,
     Mapped(Arc<MappedPageContents>),
 }
 
 impl PageContents {
-    #[cfg(test)]
-    fn mapped(contents: Vec<u8>) -> Self {
+    #[cfg(feature = "test-support")]
+    pub fn mapped(contents: Vec<u8>) -> Self {
         PageContents::Mapped(Arc::new(MappedPageContents(
             vec![PageChunk::Mapped(contents.into())].into(),
         )))
@@ -68,7 +68,7 @@ impl MappedPageContents {
 /// of the memory of a debuggee.
 
 #[derive(Default, Debug)]
-pub(super) struct MappedPageContents(
+pub struct MappedPageContents(
     /// Most of the time there should be only one chunk (either mapped or unmapped),
     /// but we do leave the possibility open of having multiple regions of memory in a single page.
     SmallVec<[PageChunk; 1]>,
@@ -77,7 +77,7 @@ pub(super) struct MappedPageContents(
 type MemoryAddress = u64;
 #[derive(Clone, Copy, Debug, PartialEq, PartialOrd, Ord, Eq)]
 #[repr(transparent)]
-pub(super) struct PageAddress(u64);
+pub struct PageAddress(pub u64);
 
 impl PageAddress {
     pub(super) fn iter_range(
@@ -273,7 +273,7 @@ pub struct MemoryIterator {
 }
 
 impl MemoryIterator {
-    fn new(
+    pub fn new(
         range: RangeInclusive<MemoryAddress>,
         pages: std::vec::IntoIter<(PageAddress, PageContents)>,
     ) -> Self {
@@ -336,49 +336,3 @@ impl Iterator for MemoryIterator {
         }
     }
 }
-
-#[cfg(test)]
-mod tests {
-    use crate::debugger::{
-        MemoryCell,
-        memory::{MemoryIterator, PageAddress, PageContents},
-    };
-
-    #[test]
-    fn iterate_over_unmapped_memory() {
-        let empty_iterator = MemoryIterator::new(0..=127, Default::default());
-        let actual = empty_iterator.collect::<Vec<_>>();
-        let expected = vec![MemoryCell(None); 128];
-        assert_eq!(actual.len(), expected.len());
-        assert_eq!(actual, expected);
-    }
-
-    #[test]
-    fn iterate_over_partially_mapped_memory() {
-        let it = MemoryIterator::new(
-            0..=127,
-            vec![(PageAddress(5), PageContents::mapped(vec![1]))].into_iter(),
-        );
-        let actual = it.collect::<Vec<_>>();
-        let expected = std::iter::repeat_n(MemoryCell(None), 5)
-            .chain(std::iter::once(MemoryCell(Some(1))))
-            .chain(std::iter::repeat_n(MemoryCell(None), 122))
-            .collect::<Vec<_>>();
-        assert_eq!(actual.len(), expected.len());
-        assert_eq!(actual, expected);
-    }
-
-    #[test]
-    fn reads_from_the_middle_of_a_page() {
-        let partial_iter = MemoryIterator::new(
-            20..=30,
-            vec![(PageAddress(0), PageContents::mapped((0..255).collect()))].into_iter(),
-        );
-        let actual = partial_iter.collect::<Vec<_>>();
-        let expected = (20..=30)
-            .map(|val| MemoryCell(Some(val)))
-            .collect::<Vec<_>>();
-        assert_eq!(actual.len(), expected.len());
-        assert_eq!(actual, expected);
-    }
-}

crates/project/src/git_store/conflict_set.rs 🔗

@@ -274,469 +274,3 @@ impl ConflictSet {
 }
 
 impl EventEmitter<ConflictSetUpdate> for ConflictSet {}
-
-#[cfg(test)]
-mod tests {
-    use std::sync::mpsc;
-
-    use crate::Project;
-
-    use super::*;
-    use fs::FakeFs;
-    use git::{
-        repository::{RepoPath, repo_path},
-        status::{UnmergedStatus, UnmergedStatusCode},
-    };
-    use gpui::{BackgroundExecutor, TestAppContext};
-    use serde_json::json;
-    use text::{Buffer, BufferId, Point, ReplicaId, ToOffset as _};
-    use unindent::Unindent as _;
-    use util::{path, rel_path::rel_path};
-
-    #[test]
-    fn test_parse_conflicts_in_buffer() {
-        // Create a buffer with conflict markers
-        let test_content = r#"
-            This is some text before the conflict.
-            <<<<<<< HEAD
-            This is our version
-            =======
-            This is their version
-            >>>>>>> branch-name
-
-            Another conflict:
-            <<<<<<< HEAD
-            Our second change
-            ||||||| merged common ancestors
-            Original content
-            =======
-            Their second change
-            >>>>>>> branch-name
-        "#
-        .unindent();
-
-        let buffer_id = BufferId::new(1).unwrap();
-        let buffer = Buffer::new(ReplicaId::LOCAL, buffer_id, test_content);
-        let snapshot = buffer.snapshot();
-
-        let conflict_snapshot = ConflictSet::parse(&snapshot);
-        assert_eq!(conflict_snapshot.conflicts.len(), 2);
-
-        let first = &conflict_snapshot.conflicts[0];
-        assert!(first.base.is_none());
-        assert_eq!(first.ours_branch_name.as_ref(), "HEAD");
-        assert_eq!(first.theirs_branch_name.as_ref(), "branch-name");
-        let our_text = snapshot
-            .text_for_range(first.ours.clone())
-            .collect::<String>();
-        let their_text = snapshot
-            .text_for_range(first.theirs.clone())
-            .collect::<String>();
-        assert_eq!(our_text, "This is our version\n");
-        assert_eq!(their_text, "This is their version\n");
-
-        let second = &conflict_snapshot.conflicts[1];
-        assert!(second.base.is_some());
-        assert_eq!(second.ours_branch_name.as_ref(), "HEAD");
-        assert_eq!(second.theirs_branch_name.as_ref(), "branch-name");
-        let our_text = snapshot
-            .text_for_range(second.ours.clone())
-            .collect::<String>();
-        let their_text = snapshot
-            .text_for_range(second.theirs.clone())
-            .collect::<String>();
-        let base_text = snapshot
-            .text_for_range(second.base.as_ref().unwrap().clone())
-            .collect::<String>();
-        assert_eq!(our_text, "Our second change\n");
-        assert_eq!(their_text, "Their second change\n");
-        assert_eq!(base_text, "Original content\n");
-
-        // Test conflicts_in_range
-        let range = snapshot.anchor_before(0)..snapshot.anchor_before(snapshot.len());
-        let conflicts_in_range = conflict_snapshot.conflicts_in_range(range, &snapshot);
-        assert_eq!(conflicts_in_range.len(), 2);
-
-        // Test with a range that includes only the first conflict
-        let first_conflict_end = conflict_snapshot.conflicts[0].range.end;
-        let range = snapshot.anchor_before(0)..first_conflict_end;
-        let conflicts_in_range = conflict_snapshot.conflicts_in_range(range, &snapshot);
-        assert_eq!(conflicts_in_range.len(), 1);
-
-        // Test with a range that includes only the second conflict
-        let second_conflict_start = conflict_snapshot.conflicts[1].range.start;
-        let range = second_conflict_start..snapshot.anchor_before(snapshot.len());
-        let conflicts_in_range = conflict_snapshot.conflicts_in_range(range, &snapshot);
-        assert_eq!(conflicts_in_range.len(), 1);
-
-        // Test with a range that doesn't include any conflicts
-        let range = buffer.anchor_after(first_conflict_end.to_next_offset(&buffer))
-            ..buffer.anchor_before(second_conflict_start.to_previous_offset(&buffer));
-        let conflicts_in_range = conflict_snapshot.conflicts_in_range(range, &snapshot);
-        assert_eq!(conflicts_in_range.len(), 0);
-    }
-
-    #[test]
-    fn test_nested_conflict_markers() {
-        // Create a buffer with nested conflict markers
-        let test_content = r#"
-            This is some text before the conflict.
-            <<<<<<< HEAD
-            This is our version
-            <<<<<<< HEAD
-            This is a nested conflict marker
-            =======
-            This is their version in a nested conflict
-            >>>>>>> branch-nested
-            =======
-            This is their version
-            >>>>>>> branch-name
-        "#
-        .unindent();
-
-        let buffer_id = BufferId::new(1).unwrap();
-        let buffer = Buffer::new(ReplicaId::LOCAL, buffer_id, test_content);
-        let snapshot = buffer.snapshot();
-
-        let conflict_snapshot = ConflictSet::parse(&snapshot);
-
-        assert_eq!(conflict_snapshot.conflicts.len(), 1);
-
-        // The conflict should have our version, their version, but no base
-        let conflict = &conflict_snapshot.conflicts[0];
-        assert!(conflict.base.is_none());
-        assert_eq!(conflict.ours_branch_name.as_ref(), "HEAD");
-        assert_eq!(conflict.theirs_branch_name.as_ref(), "branch-nested");
-
-        // Check that the nested conflict was detected correctly
-        let our_text = snapshot
-            .text_for_range(conflict.ours.clone())
-            .collect::<String>();
-        assert_eq!(our_text, "This is a nested conflict marker\n");
-        let their_text = snapshot
-            .text_for_range(conflict.theirs.clone())
-            .collect::<String>();
-        assert_eq!(their_text, "This is their version in a nested conflict\n");
-    }
-
-    #[test]
-    fn test_conflict_markers_at_eof() {
-        let test_content = r#"
-            <<<<<<< ours
-            =======
-            This is their version
-            >>>>>>> "#
-            .unindent();
-        let buffer_id = BufferId::new(1).unwrap();
-        let buffer = Buffer::new(ReplicaId::LOCAL, buffer_id, test_content);
-        let snapshot = buffer.snapshot();
-
-        let conflict_snapshot = ConflictSet::parse(&snapshot);
-        assert_eq!(conflict_snapshot.conflicts.len(), 1);
-        assert_eq!(
-            conflict_snapshot.conflicts[0].ours_branch_name.as_ref(),
-            "ours"
-        );
-        assert_eq!(
-            conflict_snapshot.conflicts[0].theirs_branch_name.as_ref(),
-            "Origin" // default branch name if there is none
-        );
-    }
-
-    #[test]
-    fn test_conflicts_in_range() {
-        // Create a buffer with conflict markers
-        let test_content = r#"
-            one
-            <<<<<<< HEAD1
-            two
-            =======
-            three
-            >>>>>>> branch1
-            four
-            five
-            <<<<<<< HEAD2
-            six
-            =======
-            seven
-            >>>>>>> branch2
-            eight
-            nine
-            <<<<<<< HEAD3
-            ten
-            =======
-            eleven
-            >>>>>>> branch3
-            twelve
-            <<<<<<< HEAD4
-            thirteen
-            =======
-            fourteen
-            >>>>>>> branch4
-            fifteen
-        "#
-        .unindent();
-
-        let buffer_id = BufferId::new(1).unwrap();
-        let buffer = Buffer::new(ReplicaId::LOCAL, buffer_id, test_content.clone());
-        let snapshot = buffer.snapshot();
-
-        let conflict_snapshot = ConflictSet::parse(&snapshot);
-        assert_eq!(conflict_snapshot.conflicts.len(), 4);
-        assert_eq!(
-            conflict_snapshot.conflicts[0].ours_branch_name.as_ref(),
-            "HEAD1"
-        );
-        assert_eq!(
-            conflict_snapshot.conflicts[0].theirs_branch_name.as_ref(),
-            "branch1"
-        );
-        assert_eq!(
-            conflict_snapshot.conflicts[1].ours_branch_name.as_ref(),
-            "HEAD2"
-        );
-        assert_eq!(
-            conflict_snapshot.conflicts[1].theirs_branch_name.as_ref(),
-            "branch2"
-        );
-        assert_eq!(
-            conflict_snapshot.conflicts[2].ours_branch_name.as_ref(),
-            "HEAD3"
-        );
-        assert_eq!(
-            conflict_snapshot.conflicts[2].theirs_branch_name.as_ref(),
-            "branch3"
-        );
-        assert_eq!(
-            conflict_snapshot.conflicts[3].ours_branch_name.as_ref(),
-            "HEAD4"
-        );
-        assert_eq!(
-            conflict_snapshot.conflicts[3].theirs_branch_name.as_ref(),
-            "branch4"
-        );
-
-        let range = test_content.find("seven").unwrap()..test_content.find("eleven").unwrap();
-        let range = buffer.anchor_before(range.start)..buffer.anchor_after(range.end);
-        assert_eq!(
-            conflict_snapshot.conflicts_in_range(range, &snapshot),
-            &conflict_snapshot.conflicts[1..=2]
-        );
-
-        let range = test_content.find("one").unwrap()..test_content.find("<<<<<<< HEAD2").unwrap();
-        let range = buffer.anchor_before(range.start)..buffer.anchor_after(range.end);
-        assert_eq!(
-            conflict_snapshot.conflicts_in_range(range, &snapshot),
-            &conflict_snapshot.conflicts[0..=1]
-        );
-
-        let range =
-            test_content.find("eight").unwrap() - 1..test_content.find(">>>>>>> branch3").unwrap();
-        let range = buffer.anchor_before(range.start)..buffer.anchor_after(range.end);
-        assert_eq!(
-            conflict_snapshot.conflicts_in_range(range, &snapshot),
-            &conflict_snapshot.conflicts[1..=2]
-        );
-
-        let range = test_content.find("thirteen").unwrap() - 1..test_content.len();
-        let range = buffer.anchor_before(range.start)..buffer.anchor_after(range.end);
-        assert_eq!(
-            conflict_snapshot.conflicts_in_range(range, &snapshot),
-            &conflict_snapshot.conflicts[3..=3]
-        );
-    }
-
-    #[gpui::test]
-    async fn test_conflict_updates(executor: BackgroundExecutor, cx: &mut TestAppContext) {
-        zlog::init_test();
-        cx.update(|cx| {
-            settings::init(cx);
-        });
-        let initial_text = "
-            one
-            two
-            three
-            four
-            five
-        "
-        .unindent();
-        let fs = FakeFs::new(executor);
-        fs.insert_tree(
-            path!("/project"),
-            json!({
-                ".git": {},
-                "a.txt": initial_text,
-            }),
-        )
-        .await;
-        let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
-        let (git_store, buffer) = project.update(cx, |project, cx| {
-            (
-                project.git_store().clone(),
-                project.open_local_buffer(path!("/project/a.txt"), cx),
-            )
-        });
-        let buffer = buffer.await.unwrap();
-        let conflict_set = git_store.update(cx, |git_store, cx| {
-            git_store.open_conflict_set(buffer.clone(), cx)
-        });
-        let (events_tx, events_rx) = mpsc::channel::<ConflictSetUpdate>();
-        let _conflict_set_subscription = cx.update(|cx| {
-            cx.subscribe(&conflict_set, move |_, event, _| {
-                events_tx.send(event.clone()).ok();
-            })
-        });
-        let conflicts_snapshot =
-            conflict_set.read_with(cx, |conflict_set, _| conflict_set.snapshot());
-        assert!(conflicts_snapshot.conflicts.is_empty());
-
-        buffer.update(cx, |buffer, cx| {
-            buffer.edit(
-                [
-                    (4..4, "<<<<<<< HEAD\n"),
-                    (14..14, "=======\nTWO\n>>>>>>> branch\n"),
-                ],
-                None,
-                cx,
-            );
-        });
-
-        cx.run_until_parked();
-        events_rx.try_recv().expect_err(
-            "no conflicts should be registered as long as the file's status is unchanged",
-        );
-
-        fs.with_git_state(path!("/project/.git").as_ref(), true, |state| {
-            state.unmerged_paths.insert(
-                repo_path("a.txt"),
-                UnmergedStatus {
-                    first_head: UnmergedStatusCode::Updated,
-                    second_head: UnmergedStatusCode::Updated,
-                },
-            );
-            // Cause the repository to emit MergeHeadsChanged.
-            state.refs.insert("MERGE_HEAD".into(), "123".into())
-        })
-        .unwrap();
-
-        cx.run_until_parked();
-        let update = events_rx
-            .try_recv()
-            .expect("status change should trigger conflict parsing");
-        assert_eq!(update.old_range, 0..0);
-        assert_eq!(update.new_range, 0..1);
-
-        let conflict = conflict_set.read_with(cx, |conflict_set, _| {
-            conflict_set.snapshot().conflicts[0].clone()
-        });
-        cx.update(|cx| {
-            conflict.resolve(buffer.clone(), std::slice::from_ref(&conflict.theirs), cx);
-        });
-
-        cx.run_until_parked();
-        let update = events_rx
-            .try_recv()
-            .expect("conflicts should be removed after resolution");
-        assert_eq!(update.old_range, 0..1);
-        assert_eq!(update.new_range, 0..0);
-    }
-
-    #[gpui::test]
-    async fn test_conflict_updates_without_merge_head(
-        executor: BackgroundExecutor,
-        cx: &mut TestAppContext,
-    ) {
-        zlog::init_test();
-        cx.update(|cx| {
-            settings::init(cx);
-        });
-
-        let initial_text = "
-            zero
-            <<<<<<< HEAD
-            one
-            =======
-            two
-            >>>>>>> Stashed Changes
-            three
-        "
-        .unindent();
-
-        let fs = FakeFs::new(executor);
-        fs.insert_tree(
-            path!("/project"),
-            json!({
-                ".git": {},
-                "a.txt": initial_text,
-            }),
-        )
-        .await;
-
-        let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
-        let (git_store, buffer) = project.update(cx, |project, cx| {
-            (
-                project.git_store().clone(),
-                project.open_local_buffer(path!("/project/a.txt"), cx),
-            )
-        });
-
-        cx.run_until_parked();
-        fs.with_git_state(path!("/project/.git").as_ref(), true, |state| {
-            state.unmerged_paths.insert(
-                RepoPath::from_rel_path(rel_path("a.txt")),
-                UnmergedStatus {
-                    first_head: UnmergedStatusCode::Updated,
-                    second_head: UnmergedStatusCode::Updated,
-                },
-            )
-        })
-        .unwrap();
-
-        let buffer = buffer.await.unwrap();
-
-        // Open the conflict set for a file that currently has conflicts.
-        let conflict_set = git_store.update(cx, |git_store, cx| {
-            git_store.open_conflict_set(buffer.clone(), cx)
-        });
-
-        cx.run_until_parked();
-        conflict_set.update(cx, |conflict_set, cx| {
-            let conflict_range = conflict_set.snapshot().conflicts[0]
-                .range
-                .to_point(buffer.read(cx));
-            assert_eq!(conflict_range, Point::new(1, 0)..Point::new(6, 0));
-        });
-
-        // Simulate the conflict being removed by e.g. staging the file.
-        fs.with_git_state(path!("/project/.git").as_ref(), true, |state| {
-            state.unmerged_paths.remove(&repo_path("a.txt"))
-        })
-        .unwrap();
-
-        cx.run_until_parked();
-        conflict_set.update(cx, |conflict_set, _| {
-            assert!(!conflict_set.has_conflict);
-            assert_eq!(conflict_set.snapshot.conflicts.len(), 0);
-        });
-
-        // Simulate the conflict being re-added.
-        fs.with_git_state(path!("/project/.git").as_ref(), true, |state| {
-            state.unmerged_paths.insert(
-                repo_path("a.txt"),
-                UnmergedStatus {
-                    first_head: UnmergedStatusCode::Updated,
-                    second_head: UnmergedStatusCode::Updated,
-                },
-            )
-        })
-        .unwrap();
-
-        cx.run_until_parked();
-        conflict_set.update(cx, |conflict_set, cx| {
-            let conflict_range = conflict_set.snapshot().conflicts[0]
-                .range
-                .to_point(buffer.read(cx));
-            assert_eq!(conflict_range, Point::new(1, 0)..Point::new(6, 0));
-        });
-    }
-}

crates/project/src/git_store/git_traversal.rs 🔗

@@ -250,544 +250,3 @@ impl AsRef<Entry> for GitEntry {
         &self.entry
     }
 }
-
-#[cfg(test)]
-mod tests {
-    use std::time::Duration;
-
-    use crate::Project;
-
-    use super::*;
-    use fs::FakeFs;
-    use git::status::{FileStatus, StatusCode, TrackedSummary, UnmergedStatus, UnmergedStatusCode};
-    use gpui::TestAppContext;
-    use serde_json::json;
-    use settings::SettingsStore;
-    use util::{path, rel_path::rel_path};
-
-    const CONFLICT: FileStatus = FileStatus::Unmerged(UnmergedStatus {
-        first_head: UnmergedStatusCode::Updated,
-        second_head: UnmergedStatusCode::Updated,
-    });
-    const ADDED: GitSummary = GitSummary {
-        index: TrackedSummary::ADDED,
-        count: 1,
-        ..GitSummary::UNCHANGED
-    };
-    const MODIFIED: GitSummary = GitSummary {
-        index: TrackedSummary::MODIFIED,
-        count: 1,
-        ..GitSummary::UNCHANGED
-    };
-
-    #[gpui::test]
-    async fn test_git_traversal_with_one_repo(cx: &mut TestAppContext) {
-        init_test(cx);
-        let fs = FakeFs::new(cx.background_executor.clone());
-        fs.insert_tree(
-            path!("/root"),
-            json!({
-                "x": {
-                    ".git": {},
-                    "x1.txt": "foo",
-                    "x2.txt": "bar",
-                    "y": {
-                        ".git": {},
-                        "y1.txt": "baz",
-                        "y2.txt": "qux"
-                    },
-                    "z.txt": "sneaky..."
-                },
-                "z": {
-                    ".git": {},
-                    "z1.txt": "quux",
-                    "z2.txt": "quuux"
-                }
-            }),
-        )
-        .await;
-
-        fs.set_status_for_repo(
-            Path::new(path!("/root/x/.git")),
-            &[
-                ("x2.txt", StatusCode::Modified.index()),
-                ("z.txt", StatusCode::Added.index()),
-            ],
-        );
-        fs.set_status_for_repo(Path::new(path!("/root/x/y/.git")), &[("y1.txt", CONFLICT)]);
-        fs.set_status_for_repo(
-            Path::new(path!("/root/z/.git")),
-            &[("z2.txt", StatusCode::Added.index())],
-        );
-
-        let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
-        cx.executor().run_until_parked();
-
-        let (repo_snapshots, worktree_snapshot) = project.read_with(cx, |project, cx| {
-            (
-                project.git_store().read(cx).repo_snapshots(cx),
-                project.worktrees(cx).next().unwrap().read(cx).snapshot(),
-            )
-        });
-
-        let traversal = GitTraversal::new(
-            &repo_snapshots,
-            worktree_snapshot.traverse_from_path(true, false, true, RelPath::unix("x").unwrap()),
-        );
-        let entries = traversal
-            .map(|entry| (entry.path.clone(), entry.git_summary))
-            .collect::<Vec<_>>();
-        pretty_assertions::assert_eq!(
-            entries,
-            [
-                (rel_path("x/x1.txt").into(), GitSummary::UNCHANGED),
-                (rel_path("x/x2.txt").into(), MODIFIED),
-                (rel_path("x/y/y1.txt").into(), GitSummary::CONFLICT),
-                (rel_path("x/y/y2.txt").into(), GitSummary::UNCHANGED),
-                (rel_path("x/z.txt").into(), ADDED),
-                (rel_path("z/z1.txt").into(), GitSummary::UNCHANGED),
-                (rel_path("z/z2.txt").into(), ADDED),
-            ]
-        )
-    }
-
-    #[gpui::test]
-    async fn test_git_traversal_with_nested_repos(cx: &mut TestAppContext) {
-        init_test(cx);
-        let fs = FakeFs::new(cx.background_executor.clone());
-        fs.insert_tree(
-            path!("/root"),
-            json!({
-                "x": {
-                    ".git": {},
-                    "x1.txt": "foo",
-                    "x2.txt": "bar",
-                    "y": {
-                        ".git": {},
-                        "y1.txt": "baz",
-                        "y2.txt": "qux"
-                    },
-                    "z.txt": "sneaky..."
-                },
-                "z": {
-                    ".git": {},
-                    "z1.txt": "quux",
-                    "z2.txt": "quuux"
-                }
-            }),
-        )
-        .await;
-
-        fs.set_status_for_repo(
-            Path::new(path!("/root/x/.git")),
-            &[
-                ("x2.txt", StatusCode::Modified.index()),
-                ("z.txt", StatusCode::Added.index()),
-            ],
-        );
-        fs.set_status_for_repo(Path::new(path!("/root/x/y/.git")), &[("y1.txt", CONFLICT)]);
-
-        fs.set_status_for_repo(
-            Path::new(path!("/root/z/.git")),
-            &[("z2.txt", StatusCode::Added.index())],
-        );
-
-        let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
-        cx.executor().run_until_parked();
-
-        let (repo_snapshots, worktree_snapshot) = project.read_with(cx, |project, cx| {
-            (
-                project.git_store().read(cx).repo_snapshots(cx),
-                project.worktrees(cx).next().unwrap().read(cx).snapshot(),
-            )
-        });
-
-        // Sanity check the propagation for x/y and z
-        check_git_statuses(
-            &repo_snapshots,
-            &worktree_snapshot,
-            &[
-                ("x/y", GitSummary::CONFLICT),
-                ("x/y/y1.txt", GitSummary::CONFLICT),
-                ("x/y/y2.txt", GitSummary::UNCHANGED),
-            ],
-        );
-        check_git_statuses(
-            &repo_snapshots,
-            &worktree_snapshot,
-            &[
-                ("z", ADDED),
-                ("z/z1.txt", GitSummary::UNCHANGED),
-                ("z/z2.txt", ADDED),
-            ],
-        );
-
-        // Test one of the fundamental cases of propagation blocking, the transition from one git repository to another
-        check_git_statuses(
-            &repo_snapshots,
-            &worktree_snapshot,
-            &[
-                ("x", MODIFIED + ADDED),
-                ("x/y", GitSummary::CONFLICT),
-                ("x/y/y1.txt", GitSummary::CONFLICT),
-            ],
-        );
-
-        // Sanity check everything around it
-        check_git_statuses(
-            &repo_snapshots,
-            &worktree_snapshot,
-            &[
-                ("x", MODIFIED + ADDED),
-                ("x/x1.txt", GitSummary::UNCHANGED),
-                ("x/x2.txt", MODIFIED),
-                ("x/y", GitSummary::CONFLICT),
-                ("x/y/y1.txt", GitSummary::CONFLICT),
-                ("x/y/y2.txt", GitSummary::UNCHANGED),
-                ("x/z.txt", ADDED),
-            ],
-        );
-
-        // Test the other fundamental case, transitioning from git repository to non-git repository
-        check_git_statuses(
-            &repo_snapshots,
-            &worktree_snapshot,
-            &[
-                ("", GitSummary::UNCHANGED),
-                ("x", MODIFIED + ADDED),
-                ("x/x1.txt", GitSummary::UNCHANGED),
-            ],
-        );
-
-        // And all together now
-        check_git_statuses(
-            &repo_snapshots,
-            &worktree_snapshot,
-            &[
-                ("", GitSummary::UNCHANGED),
-                ("x", MODIFIED + ADDED),
-                ("x/x1.txt", GitSummary::UNCHANGED),
-                ("x/x2.txt", MODIFIED),
-                ("x/y", GitSummary::CONFLICT),
-                ("x/y/y1.txt", GitSummary::CONFLICT),
-                ("x/y/y2.txt", GitSummary::UNCHANGED),
-                ("x/z.txt", ADDED),
-                ("z", ADDED),
-                ("z/z1.txt", GitSummary::UNCHANGED),
-                ("z/z2.txt", ADDED),
-            ],
-        );
-    }
-
-    #[gpui::test]
-    async fn test_git_traversal_simple(cx: &mut TestAppContext) {
-        init_test(cx);
-        let fs = FakeFs::new(cx.background_executor.clone());
-        fs.insert_tree(
-            path!("/root"),
-            json!({
-                ".git": {},
-                "a": {
-                    "b": {
-                        "c1.txt": "",
-                        "c2.txt": "",
-                    },
-                    "d": {
-                        "e1.txt": "",
-                        "e2.txt": "",
-                        "e3.txt": "",
-                    }
-                },
-                "f": {
-                    "no-status.txt": ""
-                },
-                "g": {
-                    "h1.txt": "",
-                    "h2.txt": ""
-                },
-            }),
-        )
-        .await;
-
-        fs.set_status_for_repo(
-            Path::new(path!("/root/.git")),
-            &[
-                ("a/b/c1.txt", StatusCode::Added.index()),
-                ("a/d/e2.txt", StatusCode::Modified.index()),
-                ("g/h2.txt", CONFLICT),
-            ],
-        );
-
-        let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
-        cx.executor().run_until_parked();
-
-        let (repo_snapshots, worktree_snapshot) = project.read_with(cx, |project, cx| {
-            (
-                project.git_store().read(cx).repo_snapshots(cx),
-                project.worktrees(cx).next().unwrap().read(cx).snapshot(),
-            )
-        });
-
-        check_git_statuses(
-            &repo_snapshots,
-            &worktree_snapshot,
-            &[
-                ("", GitSummary::CONFLICT + MODIFIED + ADDED),
-                ("g", GitSummary::CONFLICT),
-                ("g/h2.txt", GitSummary::CONFLICT),
-            ],
-        );
-
-        check_git_statuses(
-            &repo_snapshots,
-            &worktree_snapshot,
-            &[
-                ("", GitSummary::CONFLICT + ADDED + MODIFIED),
-                ("a", ADDED + MODIFIED),
-                ("a/b", ADDED),
-                ("a/b/c1.txt", ADDED),
-                ("a/b/c2.txt", GitSummary::UNCHANGED),
-                ("a/d", MODIFIED),
-                ("a/d/e2.txt", MODIFIED),
-                ("f", GitSummary::UNCHANGED),
-                ("f/no-status.txt", GitSummary::UNCHANGED),
-                ("g", GitSummary::CONFLICT),
-                ("g/h2.txt", GitSummary::CONFLICT),
-            ],
-        );
-
-        check_git_statuses(
-            &repo_snapshots,
-            &worktree_snapshot,
-            &[
-                ("a/b", ADDED),
-                ("a/b/c1.txt", ADDED),
-                ("a/b/c2.txt", GitSummary::UNCHANGED),
-                ("a/d", MODIFIED),
-                ("a/d/e1.txt", GitSummary::UNCHANGED),
-                ("a/d/e2.txt", MODIFIED),
-                ("f", GitSummary::UNCHANGED),
-                ("f/no-status.txt", GitSummary::UNCHANGED),
-                ("g", GitSummary::CONFLICT),
-            ],
-        );
-
-        check_git_statuses(
-            &repo_snapshots,
-            &worktree_snapshot,
-            &[
-                ("a/b/c1.txt", ADDED),
-                ("a/b/c2.txt", GitSummary::UNCHANGED),
-                ("a/d/e1.txt", GitSummary::UNCHANGED),
-                ("a/d/e2.txt", MODIFIED),
-                ("f/no-status.txt", GitSummary::UNCHANGED),
-            ],
-        );
-    }
-
-    #[gpui::test]
-    async fn test_git_traversal_with_repos_under_project(cx: &mut TestAppContext) {
-        init_test(cx);
-        let fs = FakeFs::new(cx.background_executor.clone());
-        fs.insert_tree(
-            path!("/root"),
-            json!({
-                "x": {
-                    ".git": {},
-                    "x1.txt": "foo",
-                    "x2.txt": "bar"
-                },
-                "y": {
-                    ".git": {},
-                    "y1.txt": "baz",
-                    "y2.txt": "qux"
-                },
-                "z": {
-                    ".git": {},
-                    "z1.txt": "quux",
-                    "z2.txt": "quuux"
-                }
-            }),
-        )
-        .await;
-
-        fs.set_status_for_repo(
-            Path::new(path!("/root/x/.git")),
-            &[("x1.txt", StatusCode::Added.index())],
-        );
-        fs.set_status_for_repo(
-            Path::new(path!("/root/y/.git")),
-            &[
-                ("y1.txt", CONFLICT),
-                ("y2.txt", StatusCode::Modified.index()),
-            ],
-        );
-        fs.set_status_for_repo(
-            Path::new(path!("/root/z/.git")),
-            &[("z2.txt", StatusCode::Modified.index())],
-        );
-
-        let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
-        cx.executor().run_until_parked();
-
-        let (repo_snapshots, worktree_snapshot) = project.read_with(cx, |project, cx| {
-            (
-                project.git_store().read(cx).repo_snapshots(cx),
-                project.worktrees(cx).next().unwrap().read(cx).snapshot(),
-            )
-        });
-
-        check_git_statuses(
-            &repo_snapshots,
-            &worktree_snapshot,
-            &[("x", ADDED), ("x/x1.txt", ADDED)],
-        );
-
-        check_git_statuses(
-            &repo_snapshots,
-            &worktree_snapshot,
-            &[
-                ("y", GitSummary::CONFLICT + MODIFIED),
-                ("y/y1.txt", GitSummary::CONFLICT),
-                ("y/y2.txt", MODIFIED),
-            ],
-        );
-
-        check_git_statuses(
-            &repo_snapshots,
-            &worktree_snapshot,
-            &[("z", MODIFIED), ("z/z2.txt", MODIFIED)],
-        );
-
-        check_git_statuses(
-            &repo_snapshots,
-            &worktree_snapshot,
-            &[("x", ADDED), ("x/x1.txt", ADDED)],
-        );
-
-        check_git_statuses(
-            &repo_snapshots,
-            &worktree_snapshot,
-            &[
-                ("x", ADDED),
-                ("x/x1.txt", ADDED),
-                ("x/x2.txt", GitSummary::UNCHANGED),
-                ("y", GitSummary::CONFLICT + MODIFIED),
-                ("y/y1.txt", GitSummary::CONFLICT),
-                ("y/y2.txt", MODIFIED),
-                ("z", MODIFIED),
-                ("z/z1.txt", GitSummary::UNCHANGED),
-                ("z/z2.txt", MODIFIED),
-            ],
-        );
-    }
-
-    fn init_test(cx: &mut gpui::TestAppContext) {
-        zlog::init_test();
-
-        cx.update(|cx| {
-            let settings_store = SettingsStore::test(cx);
-            cx.set_global(settings_store);
-        });
-    }
-
-    #[gpui::test]
-    async fn test_bump_mtime_of_git_repo_workdir(cx: &mut TestAppContext) {
-        init_test(cx);
-
-        // Create a worktree with a git directory.
-        let fs = FakeFs::new(cx.background_executor.clone());
-        fs.insert_tree(
-            path!("/root"),
-            json!({
-                ".git": {},
-                "a.txt": "",
-                "b": {
-                    "c.txt": "",
-                },
-            }),
-        )
-        .await;
-        fs.set_head_and_index_for_repo(
-            path!("/root/.git").as_ref(),
-            &[("a.txt", "".into()), ("b/c.txt", "".into())],
-        );
-        cx.run_until_parked();
-
-        let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
-        cx.executor().run_until_parked();
-
-        let (old_entry_ids, old_mtimes) = project.read_with(cx, |project, cx| {
-            let tree = project.worktrees(cx).next().unwrap().read(cx);
-            (
-                tree.entries(true, 0).map(|e| e.id).collect::<Vec<_>>(),
-                tree.entries(true, 0).map(|e| e.mtime).collect::<Vec<_>>(),
-            )
-        });
-
-        // Regression test: after the directory is scanned, touch the git repo's
-        // working directory, bumping its mtime. That directory keeps its project
-        // entry id after the directories are re-scanned.
-        fs.touch_path(path!("/root")).await;
-        cx.executor().run_until_parked();
-
-        let (new_entry_ids, new_mtimes) = project.read_with(cx, |project, cx| {
-            let tree = project.worktrees(cx).next().unwrap().read(cx);
-            (
-                tree.entries(true, 0).map(|e| e.id).collect::<Vec<_>>(),
-                tree.entries(true, 0).map(|e| e.mtime).collect::<Vec<_>>(),
-            )
-        });
-        assert_eq!(new_entry_ids, old_entry_ids);
-        assert_ne!(new_mtimes, old_mtimes);
-
-        // Regression test: changes to the git repository should still be
-        // detected.
-        fs.set_head_for_repo(
-            path!("/root/.git").as_ref(),
-            &[("a.txt", "".into()), ("b/c.txt", "something-else".into())],
-            "deadbeef",
-        );
-        cx.executor().run_until_parked();
-        cx.executor().advance_clock(Duration::from_secs(1));
-
-        let (repo_snapshots, worktree_snapshot) = project.read_with(cx, |project, cx| {
-            (
-                project.git_store().read(cx).repo_snapshots(cx),
-                project.worktrees(cx).next().unwrap().read(cx).snapshot(),
-            )
-        });
-
-        check_git_statuses(
-            &repo_snapshots,
-            &worktree_snapshot,
-            &[
-                ("", MODIFIED),
-                ("a.txt", GitSummary::UNCHANGED),
-                ("b/c.txt", MODIFIED),
-            ],
-        );
-    }
-
-    #[track_caller]
-    fn check_git_statuses(
-        repo_snapshots: &HashMap<RepositoryId, RepositorySnapshot>,
-        worktree_snapshot: &worktree::Snapshot,
-        expected_statuses: &[(&str, GitSummary)],
-    ) {
-        let mut traversal = GitTraversal::new(
-            repo_snapshots,
-            worktree_snapshot.traverse_from_path(true, true, false, RelPath::empty()),
-        );
-        let found_statuses = expected_statuses
-            .iter()
-            .map(|&(path, _)| {
-                let git_entry = traversal
-                    .find(|git_entry| git_entry.path.as_ref() == rel_path(path))
-                    .unwrap_or_else(|| panic!("Traversal has no entry for {path:?}"));
-                (path, git_entry.git_summary)
-            })
-            .collect::<Vec<_>>();
-        pretty_assertions::assert_eq!(found_statuses, expected_statuses);
-    }
-}

crates/project/src/image_store.rs 🔗

@@ -108,7 +108,7 @@ pub struct ImageItem {
 }
 
 impl ImageItem {
-    fn compute_metadata_from_bytes(image_bytes: &[u8]) -> Result<ImageMetadata> {
+    pub fn compute_metadata_from_bytes(image_bytes: &[u8]) -> Result<ImageMetadata> {
         let image_format = image::guess_format(image_bytes)?;
 
         let mut image_reader = ImageReader::new(std::io::Cursor::new(image_bytes));
@@ -904,84 +904,3 @@ fn create_gpui_image(content: Vec<u8>) -> anyhow::Result<Arc<gpui::Image>> {
         content,
     )))
 }
-
-#[cfg(test)]
-mod tests {
-    use super::*;
-    use fs::FakeFs;
-    use gpui::TestAppContext;
-    use serde_json::json;
-    use settings::SettingsStore;
-    use util::rel_path::rel_path;
-
-    pub fn init_test(cx: &mut TestAppContext) {
-        zlog::init_test();
-
-        cx.update(|cx| {
-            let settings_store = SettingsStore::test(cx);
-            cx.set_global(settings_store);
-        });
-    }
-
-    #[gpui::test]
-    async fn test_image_not_loaded_twice(cx: &mut TestAppContext) {
-        init_test(cx);
-        let fs = FakeFs::new(cx.executor());
-
-        fs.insert_tree("/root", json!({})).await;
-        // Create a png file that consists of a single white pixel
-        fs.insert_file(
-            "/root/image_1.png",
-            vec![
-                0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A, 0x00, 0x00, 0x00, 0x0D, 0x49, 0x48,
-                0x44, 0x52, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01, 0x08, 0x06, 0x00, 0x00,
-                0x00, 0x1F, 0x15, 0xC4, 0x89, 0x00, 0x00, 0x00, 0x0A, 0x49, 0x44, 0x41, 0x54, 0x78,
-                0x9C, 0x63, 0x00, 0x01, 0x00, 0x00, 0x05, 0x00, 0x01, 0x0D, 0x0A, 0x2D, 0xB4, 0x00,
-                0x00, 0x00, 0x00, 0x49, 0x45, 0x4E, 0x44, 0xAE, 0x42, 0x60, 0x82,
-            ],
-        )
-        .await;
-
-        let project = Project::test(fs, ["/root".as_ref()], cx).await;
-
-        let worktree_id =
-            cx.update(|cx| project.read(cx).worktrees(cx).next().unwrap().read(cx).id());
-
-        let project_path = ProjectPath {
-            worktree_id,
-            path: rel_path("image_1.png").into(),
-        };
-
-        let (task1, task2) = project.update(cx, |project, cx| {
-            (
-                project.open_image(project_path.clone(), cx),
-                project.open_image(project_path.clone(), cx),
-            )
-        });
-
-        let image1 = task1.await.unwrap();
-        let image2 = task2.await.unwrap();
-
-        assert_eq!(image1, image2);
-    }
-
-    #[gpui::test]
-    fn test_compute_metadata_from_bytes() {
-        // Single white pixel PNG
-        let png_bytes = vec![
-            0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A, 0x00, 0x00, 0x00, 0x0D, 0x49, 0x48,
-            0x44, 0x52, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01, 0x08, 0x06, 0x00, 0x00,
-            0x00, 0x1F, 0x15, 0xC4, 0x89, 0x00, 0x00, 0x00, 0x0A, 0x49, 0x44, 0x41, 0x54, 0x78,
-            0x9C, 0x63, 0x00, 0x01, 0x00, 0x00, 0x05, 0x00, 0x01, 0x0D, 0x0A, 0x2D, 0xB4, 0x00,
-            0x00, 0x00, 0x00, 0x49, 0x45, 0x4E, 0x44, 0xAE, 0x42, 0x60, 0x82,
-        ];
-
-        let metadata = ImageItem::compute_metadata_from_bytes(&png_bytes).unwrap();
-
-        assert_eq!(metadata.width, 1);
-        assert_eq!(metadata.height, 1);
-        assert_eq!(metadata.file_size, png_bytes.len() as u64);
-        assert_eq!(metadata.format, image::ImageFormat::Png);
-        assert!(metadata.colors.is_some());
-    }
-}

crates/project/src/lsp_command.rs 🔗

@@ -1,4 +1,4 @@
-mod signature_help;
+pub mod signature_help;
 
 use crate::{
     CodeAction, CompletionSource, CoreCompletion, CoreCompletionResponse, DocumentColor,
@@ -273,7 +273,7 @@ pub(crate) struct LinkedEditingRange {
 }
 
 #[derive(Clone, Debug)]
-pub(crate) struct GetDocumentDiagnostics {
+pub struct GetDocumentDiagnostics {
     /// We cannot blindly rely on server's capabilities.diagnostic_provider, as they're a singular field, whereas
     /// a server can register multiple diagnostic providers post-mortem.
     pub registration_id: Option<SharedString>,
@@ -3797,7 +3797,7 @@ impl GetDocumentDiagnostics {
             .collect()
     }
 
-    fn deserialize_lsp_diagnostic(diagnostic: proto::LspDiagnostic) -> Result<lsp::Diagnostic> {
+    pub fn deserialize_lsp_diagnostic(diagnostic: proto::LspDiagnostic) -> Result<lsp::Diagnostic> {
         let start = diagnostic.start.context("invalid start range")?;
         let end = diagnostic.end.context("invalid end range")?;
 
@@ -3871,7 +3871,7 @@ impl GetDocumentDiagnostics {
         })
     }
 
-    fn serialize_lsp_diagnostic(diagnostic: lsp::Diagnostic) -> Result<proto::LspDiagnostic> {
+    pub fn serialize_lsp_diagnostic(diagnostic: lsp::Diagnostic) -> Result<proto::LspDiagnostic> {
         let range = language::range_from_lsp(diagnostic.range);
         let related_information = diagnostic
             .related_information
@@ -4527,132 +4527,3 @@ fn process_full_diagnostics_report(
         }
     }
 }
-
-#[cfg(test)]
-mod tests {
-    use super::*;
-    use lsp::{DiagnosticSeverity, DiagnosticTag};
-    use serde_json::json;
-
-    #[test]
-    fn test_serialize_lsp_diagnostic() {
-        let lsp_diagnostic = lsp::Diagnostic {
-            range: lsp::Range {
-                start: lsp::Position::new(0, 1),
-                end: lsp::Position::new(2, 3),
-            },
-            severity: Some(DiagnosticSeverity::ERROR),
-            code: Some(lsp::NumberOrString::String("E001".to_string())),
-            source: Some("test-source".to_string()),
-            message: "Test error message".to_string(),
-            related_information: None,
-            tags: Some(vec![DiagnosticTag::DEPRECATED]),
-            code_description: None,
-            data: Some(json!({"detail": "test detail"})),
-        };
-
-        let proto_diagnostic = GetDocumentDiagnostics::serialize_lsp_diagnostic(lsp_diagnostic)
-            .expect("Failed to serialize diagnostic");
-
-        let start = proto_diagnostic.start.unwrap();
-        let end = proto_diagnostic.end.unwrap();
-        assert_eq!(start.row, 0);
-        assert_eq!(start.column, 1);
-        assert_eq!(end.row, 2);
-        assert_eq!(end.column, 3);
-        assert_eq!(
-            proto_diagnostic.severity,
-            proto::lsp_diagnostic::Severity::Error as i32
-        );
-        assert_eq!(proto_diagnostic.code, Some("E001".to_string()));
-        assert_eq!(proto_diagnostic.source, Some("test-source".to_string()));
-        assert_eq!(proto_diagnostic.message, "Test error message");
-    }
-
-    #[test]
-    fn test_deserialize_lsp_diagnostic() {
-        let proto_diagnostic = proto::LspDiagnostic {
-            start: Some(proto::PointUtf16 { row: 0, column: 1 }),
-            end: Some(proto::PointUtf16 { row: 2, column: 3 }),
-            severity: proto::lsp_diagnostic::Severity::Warning as i32,
-            code: Some("ERR".to_string()),
-            source: Some("Prism".to_string()),
-            message: "assigned but unused variable - a".to_string(),
-            related_information: vec![],
-            tags: vec![],
-            code_description: None,
-            data: None,
-        };
-
-        let lsp_diagnostic = GetDocumentDiagnostics::deserialize_lsp_diagnostic(proto_diagnostic)
-            .expect("Failed to deserialize diagnostic");
-
-        assert_eq!(lsp_diagnostic.range.start.line, 0);
-        assert_eq!(lsp_diagnostic.range.start.character, 1);
-        assert_eq!(lsp_diagnostic.range.end.line, 2);
-        assert_eq!(lsp_diagnostic.range.end.character, 3);
-        assert_eq!(lsp_diagnostic.severity, Some(DiagnosticSeverity::WARNING));
-        assert_eq!(
-            lsp_diagnostic.code,
-            Some(lsp::NumberOrString::String("ERR".to_string()))
-        );
-        assert_eq!(lsp_diagnostic.source, Some("Prism".to_string()));
-        assert_eq!(lsp_diagnostic.message, "assigned but unused variable - a");
-    }
-
-    #[test]
-    fn test_related_information() {
-        let related_info = lsp::DiagnosticRelatedInformation {
-            location: lsp::Location {
-                uri: lsp::Uri::from_str("file:///test.rs").unwrap(),
-                range: lsp::Range {
-                    start: lsp::Position::new(1, 1),
-                    end: lsp::Position::new(1, 5),
-                },
-            },
-            message: "Related info message".to_string(),
-        };
-
-        let lsp_diagnostic = lsp::Diagnostic {
-            range: lsp::Range {
-                start: lsp::Position::new(0, 0),
-                end: lsp::Position::new(0, 1),
-            },
-            severity: Some(DiagnosticSeverity::INFORMATION),
-            code: None,
-            source: Some("Prism".to_string()),
-            message: "assigned but unused variable - a".to_string(),
-            related_information: Some(vec![related_info]),
-            tags: None,
-            code_description: None,
-            data: None,
-        };
-
-        let proto_diagnostic = GetDocumentDiagnostics::serialize_lsp_diagnostic(lsp_diagnostic)
-            .expect("Failed to serialize diagnostic");
-
-        assert_eq!(proto_diagnostic.related_information.len(), 1);
-        let related = &proto_diagnostic.related_information[0];
-        assert_eq!(related.location_url, Some("file:///test.rs".to_string()));
-        assert_eq!(related.message, "Related info message");
-    }
-
-    #[test]
-    fn test_invalid_ranges() {
-        let proto_diagnostic = proto::LspDiagnostic {
-            start: None,
-            end: Some(proto::PointUtf16 { row: 2, column: 3 }),
-            severity: proto::lsp_diagnostic::Severity::Error as i32,
-            code: None,
-            source: None,
-            message: "Test message".to_string(),
-            related_information: vec![],
-            tags: vec![],
-            code_description: None,
-            data: None,
-        };
-
-        let result = GetDocumentDiagnostics::deserialize_lsp_diagnostic(proto_diagnostic);
-        assert!(result.is_err());
-    }
-}

crates/project/src/lsp_command/signature_help.rs 🔗

@@ -269,525 +269,3 @@ fn proto_to_lsp_documentation(documentation: proto::Documentation) -> Option<lsp
         })
     }
 }
-
-#[cfg(test)]
-mod tests {
-    use gpui::{FontWeight, HighlightStyle, SharedString, TestAppContext};
-    use lsp::{Documentation, MarkupContent, MarkupKind};
-
-    use crate::lsp_command::signature_help::SignatureHelp;
-
-    fn current_parameter() -> HighlightStyle {
-        HighlightStyle {
-            font_weight: Some(FontWeight::EXTRA_BOLD),
-            ..Default::default()
-        }
-    }
-
-    #[gpui::test]
-    fn test_create_signature_help_markdown_string_1(cx: &mut TestAppContext) {
-        let signature_help = lsp::SignatureHelp {
-            signatures: vec![lsp::SignatureInformation {
-                label: "fn test(foo: u8, bar: &str)".to_string(),
-                documentation: Some(Documentation::String(
-                    "This is a test documentation".to_string(),
-                )),
-                parameters: Some(vec![
-                    lsp::ParameterInformation {
-                        label: lsp::ParameterLabel::Simple("foo: u8".to_string()),
-                        documentation: None,
-                    },
-                    lsp::ParameterInformation {
-                        label: lsp::ParameterLabel::Simple("bar: &str".to_string()),
-                        documentation: None,
-                    },
-                ]),
-                active_parameter: None,
-            }],
-            active_signature: Some(0),
-            active_parameter: Some(0),
-        };
-        let maybe_markdown = cx.update(|cx| SignatureHelp::new(signature_help, None, None, cx));
-        assert!(maybe_markdown.is_some());
-
-        let markdown = maybe_markdown.unwrap();
-        let signature = markdown.signatures[markdown.active_signature].clone();
-        let markdown = (signature.label, signature.highlights);
-        assert_eq!(
-            markdown,
-            (
-                SharedString::new("fn test(foo: u8, bar: &str)"),
-                vec![(8..15, current_parameter())]
-            )
-        );
-        assert_eq!(
-            signature
-                .documentation
-                .unwrap()
-                .update(cx, |documentation, _| documentation.source().to_owned()),
-            "This is a test documentation",
-        )
-    }
-
-    #[gpui::test]
-    fn test_create_signature_help_markdown_string_2(cx: &mut TestAppContext) {
-        let signature_help = lsp::SignatureHelp {
-            signatures: vec![lsp::SignatureInformation {
-                label: "fn test(foo: u8, bar: &str)".to_string(),
-                documentation: Some(Documentation::MarkupContent(MarkupContent {
-                    kind: MarkupKind::Markdown,
-                    value: "This is a test documentation".to_string(),
-                })),
-                parameters: Some(vec![
-                    lsp::ParameterInformation {
-                        label: lsp::ParameterLabel::Simple("foo: u8".to_string()),
-                        documentation: None,
-                    },
-                    lsp::ParameterInformation {
-                        label: lsp::ParameterLabel::Simple("bar: &str".to_string()),
-                        documentation: None,
-                    },
-                ]),
-                active_parameter: None,
-            }],
-            active_signature: Some(0),
-            active_parameter: Some(1),
-        };
-        let maybe_markdown = cx.update(|cx| SignatureHelp::new(signature_help, None, None, cx));
-        assert!(maybe_markdown.is_some());
-
-        let markdown = maybe_markdown.unwrap();
-        let signature = markdown.signatures[markdown.active_signature].clone();
-        let markdown = (signature.label, signature.highlights);
-        assert_eq!(
-            markdown,
-            (
-                SharedString::new("fn test(foo: u8, bar: &str)"),
-                vec![(17..26, current_parameter())]
-            )
-        );
-        assert_eq!(
-            signature
-                .documentation
-                .unwrap()
-                .update(cx, |documentation, _| documentation.source().to_owned()),
-            "This is a test documentation",
-        )
-    }
-
-    #[gpui::test]
-    fn test_create_signature_help_markdown_string_3(cx: &mut TestAppContext) {
-        let signature_help = lsp::SignatureHelp {
-            signatures: vec![
-                lsp::SignatureInformation {
-                    label: "fn test1(foo: u8, bar: &str)".to_string(),
-                    documentation: None,
-                    parameters: Some(vec![
-                        lsp::ParameterInformation {
-                            label: lsp::ParameterLabel::Simple("foo: u8".to_string()),
-                            documentation: None,
-                        },
-                        lsp::ParameterInformation {
-                            label: lsp::ParameterLabel::Simple("bar: &str".to_string()),
-                            documentation: None,
-                        },
-                    ]),
-                    active_parameter: None,
-                },
-                lsp::SignatureInformation {
-                    label: "fn test2(hoge: String, fuga: bool)".to_string(),
-                    documentation: None,
-                    parameters: Some(vec![
-                        lsp::ParameterInformation {
-                            label: lsp::ParameterLabel::Simple("hoge: String".to_string()),
-                            documentation: None,
-                        },
-                        lsp::ParameterInformation {
-                            label: lsp::ParameterLabel::Simple("fuga: bool".to_string()),
-                            documentation: None,
-                        },
-                    ]),
-                    active_parameter: None,
-                },
-            ],
-            active_signature: Some(0),
-            active_parameter: Some(0),
-        };
-        let maybe_markdown = cx.update(|cx| SignatureHelp::new(signature_help, None, None, cx));
-        assert!(maybe_markdown.is_some());
-
-        let markdown = maybe_markdown.unwrap();
-        let signature = markdown.signatures[markdown.active_signature].clone();
-        let markdown = (signature.label, signature.highlights);
-        assert_eq!(
-            markdown,
-            (
-                SharedString::new("fn test1(foo: u8, bar: &str)"),
-                vec![(9..16, current_parameter())]
-            )
-        );
-    }
-
-    #[gpui::test]
-    fn test_create_signature_help_markdown_string_4(cx: &mut TestAppContext) {
-        let signature_help = lsp::SignatureHelp {
-            signatures: vec![
-                lsp::SignatureInformation {
-                    label: "fn test1(foo: u8, bar: &str)".to_string(),
-                    documentation: None,
-                    parameters: Some(vec![
-                        lsp::ParameterInformation {
-                            label: lsp::ParameterLabel::Simple("foo: u8".to_string()),
-                            documentation: None,
-                        },
-                        lsp::ParameterInformation {
-                            label: lsp::ParameterLabel::Simple("bar: &str".to_string()),
-                            documentation: None,
-                        },
-                    ]),
-                    active_parameter: None,
-                },
-                lsp::SignatureInformation {
-                    label: "fn test2(hoge: String, fuga: bool)".to_string(),
-                    documentation: None,
-                    parameters: Some(vec![
-                        lsp::ParameterInformation {
-                            label: lsp::ParameterLabel::Simple("hoge: String".to_string()),
-                            documentation: None,
-                        },
-                        lsp::ParameterInformation {
-                            label: lsp::ParameterLabel::Simple("fuga: bool".to_string()),
-                            documentation: None,
-                        },
-                    ]),
-                    active_parameter: None,
-                },
-            ],
-            active_signature: Some(1),
-            active_parameter: Some(0),
-        };
-        let maybe_markdown = cx.update(|cx| SignatureHelp::new(signature_help, None, None, cx));
-        assert!(maybe_markdown.is_some());
-
-        let markdown = maybe_markdown.unwrap();
-        let signature = markdown.signatures[markdown.active_signature].clone();
-        let markdown = (signature.label, signature.highlights);
-        assert_eq!(
-            markdown,
-            (
-                SharedString::new("fn test2(hoge: String, fuga: bool)"),
-                vec![(9..21, current_parameter())]
-            )
-        );
-    }
-
-    #[gpui::test]
-    fn test_create_signature_help_markdown_string_5(cx: &mut TestAppContext) {
-        let signature_help = lsp::SignatureHelp {
-            signatures: vec![
-                lsp::SignatureInformation {
-                    label: "fn test1(foo: u8, bar: &str)".to_string(),
-                    documentation: None,
-                    parameters: Some(vec![
-                        lsp::ParameterInformation {
-                            label: lsp::ParameterLabel::Simple("foo: u8".to_string()),
-                            documentation: None,
-                        },
-                        lsp::ParameterInformation {
-                            label: lsp::ParameterLabel::Simple("bar: &str".to_string()),
-                            documentation: None,
-                        },
-                    ]),
-                    active_parameter: None,
-                },
-                lsp::SignatureInformation {
-                    label: "fn test2(hoge: String, fuga: bool)".to_string(),
-                    documentation: None,
-                    parameters: Some(vec![
-                        lsp::ParameterInformation {
-                            label: lsp::ParameterLabel::Simple("hoge: String".to_string()),
-                            documentation: None,
-                        },
-                        lsp::ParameterInformation {
-                            label: lsp::ParameterLabel::Simple("fuga: bool".to_string()),
-                            documentation: None,
-                        },
-                    ]),
-                    active_parameter: None,
-                },
-            ],
-            active_signature: Some(1),
-            active_parameter: Some(1),
-        };
-        let maybe_markdown = cx.update(|cx| SignatureHelp::new(signature_help, None, None, cx));
-        assert!(maybe_markdown.is_some());
-
-        let markdown = maybe_markdown.unwrap();
-        let signature = markdown.signatures[markdown.active_signature].clone();
-        let markdown = (signature.label, signature.highlights);
-        assert_eq!(
-            markdown,
-            (
-                SharedString::new("fn test2(hoge: String, fuga: bool)"),
-                vec![(23..33, current_parameter())]
-            )
-        );
-    }
-
-    #[gpui::test]
-    fn test_create_signature_help_markdown_string_6(cx: &mut TestAppContext) {
-        let signature_help = lsp::SignatureHelp {
-            signatures: vec![
-                lsp::SignatureInformation {
-                    label: "fn test1(foo: u8, bar: &str)".to_string(),
-                    documentation: None,
-                    parameters: Some(vec![
-                        lsp::ParameterInformation {
-                            label: lsp::ParameterLabel::Simple("foo: u8".to_string()),
-                            documentation: None,
-                        },
-                        lsp::ParameterInformation {
-                            label: lsp::ParameterLabel::Simple("bar: &str".to_string()),
-                            documentation: None,
-                        },
-                    ]),
-                    active_parameter: None,
-                },
-                lsp::SignatureInformation {
-                    label: "fn test2(hoge: String, fuga: bool)".to_string(),
-                    documentation: None,
-                    parameters: Some(vec![
-                        lsp::ParameterInformation {
-                            label: lsp::ParameterLabel::Simple("hoge: String".to_string()),
-                            documentation: None,
-                        },
-                        lsp::ParameterInformation {
-                            label: lsp::ParameterLabel::Simple("fuga: bool".to_string()),
-                            documentation: None,
-                        },
-                    ]),
-                    active_parameter: None,
-                },
-            ],
-            active_signature: Some(1),
-            active_parameter: None,
-        };
-        let maybe_markdown = cx.update(|cx| SignatureHelp::new(signature_help, None, None, cx));
-        assert!(maybe_markdown.is_some());
-
-        let markdown = maybe_markdown.unwrap();
-        let signature = markdown.signatures[markdown.active_signature].clone();
-        let markdown = (signature.label, signature.highlights);
-        assert_eq!(
-            markdown,
-            (
-                SharedString::new("fn test2(hoge: String, fuga: bool)"),
-                vec![(9..21, current_parameter())]
-            )
-        );
-    }
-
-    #[gpui::test]
-    fn test_create_signature_help_markdown_string_7(cx: &mut TestAppContext) {
-        let signature_help = lsp::SignatureHelp {
-            signatures: vec![
-                lsp::SignatureInformation {
-                    label: "fn test1(foo: u8, bar: &str)".to_string(),
-                    documentation: None,
-                    parameters: Some(vec![
-                        lsp::ParameterInformation {
-                            label: lsp::ParameterLabel::Simple("foo: u8".to_string()),
-                            documentation: None,
-                        },
-                        lsp::ParameterInformation {
-                            label: lsp::ParameterLabel::Simple("bar: &str".to_string()),
-                            documentation: None,
-                        },
-                    ]),
-                    active_parameter: None,
-                },
-                lsp::SignatureInformation {
-                    label: "fn test2(hoge: String, fuga: bool)".to_string(),
-                    documentation: None,
-                    parameters: Some(vec![
-                        lsp::ParameterInformation {
-                            label: lsp::ParameterLabel::Simple("hoge: String".to_string()),
-                            documentation: None,
-                        },
-                        lsp::ParameterInformation {
-                            label: lsp::ParameterLabel::Simple("fuga: bool".to_string()),
-                            documentation: None,
-                        },
-                    ]),
-                    active_parameter: None,
-                },
-                lsp::SignatureInformation {
-                    label: "fn test3(one: usize, two: u32)".to_string(),
-                    documentation: None,
-                    parameters: Some(vec![
-                        lsp::ParameterInformation {
-                            label: lsp::ParameterLabel::Simple("one: usize".to_string()),
-                            documentation: None,
-                        },
-                        lsp::ParameterInformation {
-                            label: lsp::ParameterLabel::Simple("two: u32".to_string()),
-                            documentation: None,
-                        },
-                    ]),
-                    active_parameter: None,
-                },
-            ],
-            active_signature: Some(2),
-            active_parameter: Some(1),
-        };
-        let maybe_markdown = cx.update(|cx| SignatureHelp::new(signature_help, None, None, cx));
-        assert!(maybe_markdown.is_some());
-
-        let markdown = maybe_markdown.unwrap();
-        let signature = markdown.signatures[markdown.active_signature].clone();
-        let markdown = (signature.label, signature.highlights);
-        assert_eq!(
-            markdown,
-            (
-                SharedString::new("fn test3(one: usize, two: u32)"),
-                vec![(21..29, current_parameter())]
-            )
-        );
-    }
-
-    #[gpui::test]
-    fn test_create_signature_help_markdown_string_8(cx: &mut TestAppContext) {
-        let signature_help = lsp::SignatureHelp {
-            signatures: vec![],
-            active_signature: None,
-            active_parameter: None,
-        };
-        let maybe_markdown = cx.update(|cx| SignatureHelp::new(signature_help, None, None, cx));
-        assert!(maybe_markdown.is_none());
-    }
-
-    #[gpui::test]
-    fn test_create_signature_help_markdown_string_9(cx: &mut TestAppContext) {
-        let signature_help = lsp::SignatureHelp {
-            signatures: vec![lsp::SignatureInformation {
-                label: "fn test(foo: u8, bar: &str)".to_string(),
-                documentation: None,
-                parameters: Some(vec![
-                    lsp::ParameterInformation {
-                        label: lsp::ParameterLabel::LabelOffsets([8, 15]),
-                        documentation: None,
-                    },
-                    lsp::ParameterInformation {
-                        label: lsp::ParameterLabel::LabelOffsets([17, 26]),
-                        documentation: None,
-                    },
-                ]),
-                active_parameter: None,
-            }],
-            active_signature: Some(0),
-            active_parameter: Some(0),
-        };
-        let maybe_markdown = cx.update(|cx| SignatureHelp::new(signature_help, None, None, cx));
-        assert!(maybe_markdown.is_some());
-
-        let markdown = maybe_markdown.unwrap();
-        let signature = markdown.signatures[markdown.active_signature].clone();
-        let markdown = (signature.label, signature.highlights);
-        assert_eq!(
-            markdown,
-            (
-                SharedString::new("fn test(foo: u8, bar: &str)"),
-                vec![(8..15, current_parameter())]
-            )
-        );
-    }
-
-    #[gpui::test]
-    fn test_parameter_documentation(cx: &mut TestAppContext) {
-        let signature_help = lsp::SignatureHelp {
-            signatures: vec![lsp::SignatureInformation {
-                label: "fn test(foo: u8, bar: &str)".to_string(),
-                documentation: Some(Documentation::String(
-                    "This is a test documentation".to_string(),
-                )),
-                parameters: Some(vec![
-                    lsp::ParameterInformation {
-                        label: lsp::ParameterLabel::Simple("foo: u8".to_string()),
-                        documentation: Some(Documentation::String("The foo parameter".to_string())),
-                    },
-                    lsp::ParameterInformation {
-                        label: lsp::ParameterLabel::Simple("bar: &str".to_string()),
-                        documentation: Some(Documentation::String("The bar parameter".to_string())),
-                    },
-                ]),
-                active_parameter: None,
-            }],
-            active_signature: Some(0),
-            active_parameter: Some(0),
-        };
-        let maybe_signature_help =
-            cx.update(|cx| SignatureHelp::new(signature_help, None, None, cx));
-        assert!(maybe_signature_help.is_some());
-
-        let signature_help = maybe_signature_help.unwrap();
-        let signature = &signature_help.signatures[signature_help.active_signature];
-
-        // Check that parameter documentation is extracted
-        assert_eq!(signature.parameters.len(), 2);
-        assert_eq!(
-            signature.parameters[0]
-                .documentation
-                .as_ref()
-                .unwrap()
-                .update(cx, |documentation, _| documentation.source().to_owned()),
-            "The foo parameter",
-        );
-        assert_eq!(
-            signature.parameters[1]
-                .documentation
-                .as_ref()
-                .unwrap()
-                .update(cx, |documentation, _| documentation.source().to_owned()),
-            "The bar parameter",
-        );
-
-        // Check that the active parameter is correct
-        assert_eq!(signature.active_parameter, Some(0));
-    }
-
-    #[gpui::test]
-    fn test_create_signature_help_implements_utf16_spec(cx: &mut TestAppContext) {
-        let signature_help = lsp::SignatureHelp {
-            signatures: vec![lsp::SignatureInformation {
-                label: "fn test(🦀: u8, 🦀: &str)".to_string(),
-                documentation: None,
-                parameters: Some(vec![
-                    lsp::ParameterInformation {
-                        label: lsp::ParameterLabel::LabelOffsets([8, 10]),
-                        documentation: None,
-                    },
-                    lsp::ParameterInformation {
-                        label: lsp::ParameterLabel::LabelOffsets([16, 18]),
-                        documentation: None,
-                    },
-                ]),
-                active_parameter: None,
-            }],
-            active_signature: Some(0),
-            active_parameter: Some(0),
-        };
-        let signature_help = cx.update(|cx| SignatureHelp::new(signature_help, None, None, cx));
-        assert!(signature_help.is_some());
-
-        let markdown = signature_help.unwrap();
-        let signature = markdown.signatures[markdown.active_signature].clone();
-        let markdown = (signature.label, signature.highlights);
-        assert_eq!(
-            markdown,
-            (
-                SharedString::new("fn test(🦀: u8, 🦀: &str)"),
-                vec![(8..12, current_parameter())]
-            )
-        );
-    }
-}

crates/project/src/lsp_store.rs 🔗

@@ -3012,7 +3012,7 @@ impl LocalLspStore {
     }
 
     #[allow(clippy::type_complexity)]
-    pub(crate) fn edits_from_lsp(
+    pub fn edits_from_lsp(
         &mut self,
         buffer: &Entity<Buffer>,
         lsp_edits: impl 'static + Send + IntoIterator<Item = lsp::TextEdit>,
@@ -8474,7 +8474,7 @@ impl LspStore {
             .collect();
     }
 
-    #[cfg(test)]
+    #[cfg(feature = "test-support")]
     pub fn update_diagnostic_entries(
         &mut self,
         server_id: LanguageServerId,
@@ -14107,7 +14107,7 @@ pub enum ResolvedHint {
     Resolving(Shared<Task<()>>),
 }
 
-fn glob_literal_prefix(glob: &Path) -> PathBuf {
+pub fn glob_literal_prefix(glob: &Path) -> PathBuf {
     glob.components()
         .take_while(|component| match component {
             path::Component::Normal(part) => !part.to_string_lossy().contains(['*', '?', '{', '}']),
@@ -14515,7 +14515,7 @@ fn include_text(server: &lsp::LanguageServer) -> Option<bool> {
 /// breaking the completions menu presentation.
 ///
 /// Sanitize the text to ensure there are no newlines, or, if there are some, remove them and also remove long space sequences if there were newlines.
-fn ensure_uniform_list_compatible_label(label: &mut CodeLabel) {
+pub fn ensure_uniform_list_compatible_label(label: &mut CodeLabel) {
     let mut new_text = String::with_capacity(label.text.len());
     let mut offset_map = vec![0; label.text.len() + 1];
     let mut last_char_was_space = false;
@@ -14613,80 +14613,3 @@ fn ensure_uniform_list_compatible_label(label: &mut CodeLabel) {
 
     label.text = new_text;
 }
-
-#[cfg(test)]
-mod tests {
-    use language::HighlightId;
-
-    use super::*;
-
-    #[test]
-    fn test_glob_literal_prefix() {
-        assert_eq!(glob_literal_prefix(Path::new("**/*.js")), Path::new(""));
-        assert_eq!(
-            glob_literal_prefix(Path::new("node_modules/**/*.js")),
-            Path::new("node_modules")
-        );
-        assert_eq!(
-            glob_literal_prefix(Path::new("foo/{bar,baz}.js")),
-            Path::new("foo")
-        );
-        assert_eq!(
-            glob_literal_prefix(Path::new("foo/bar/baz.js")),
-            Path::new("foo/bar/baz.js")
-        );
-
-        #[cfg(target_os = "windows")]
-        {
-            assert_eq!(glob_literal_prefix(Path::new("**\\*.js")), Path::new(""));
-            assert_eq!(
-                glob_literal_prefix(Path::new("node_modules\\**/*.js")),
-                Path::new("node_modules")
-            );
-            assert_eq!(
-                glob_literal_prefix(Path::new("foo/{bar,baz}.js")),
-                Path::new("foo")
-            );
-            assert_eq!(
-                glob_literal_prefix(Path::new("foo\\bar\\baz.js")),
-                Path::new("foo/bar/baz.js")
-            );
-        }
-    }
-
-    #[test]
-    fn test_multi_len_chars_normalization() {
-        let mut label = CodeLabel::new(
-            "myElˇ (parameter) myElˇ: {\n    foo: string;\n}".to_string(),
-            0..6,
-            vec![(0..6, HighlightId(1))],
-        );
-        ensure_uniform_list_compatible_label(&mut label);
-        assert_eq!(
-            label,
-            CodeLabel::new(
-                "myElˇ (parameter) myElˇ: { foo: string; }".to_string(),
-                0..6,
-                vec![(0..6, HighlightId(1))],
-            )
-        );
-    }
-
-    #[test]
-    fn test_trailing_newline_in_completion_documentation() {
-        let doc = lsp::Documentation::String(
-            "Inappropriate argument value (of correct type).\n".to_string(),
-        );
-        let completion_doc: CompletionDocumentation = doc.into();
-        assert!(
-            matches!(completion_doc, CompletionDocumentation::SingleLine(s) if s == "Inappropriate argument value (of correct type).")
-        );
-
-        let doc = lsp::Documentation::String("  some value  \n".to_string());
-        let completion_doc: CompletionDocumentation = doc.into();
-        assert!(matches!(
-            completion_doc,
-            CompletionDocumentation::SingleLine(s) if s == "some value"
-        ));
-    }
-}

crates/project/src/manifest_tree.rs 🔗

@@ -4,7 +4,7 @@
 //! This then is used to provide those locations to language servers & determine locations eligible for toolchain selection.
 
 mod manifest_store;
-mod path_trie;
+pub mod path_trie;
 mod server_tree;
 
 use std::{borrow::Borrow, collections::hash_map::Entry, ops::ControlFlow, sync::Arc};

crates/project/src/manifest_tree/path_trie.rs 🔗

@@ -13,7 +13,7 @@ use util::rel_path::RelPath;
 /// A path is unexplored when the closest ancestor of a path is not the path itself; that means that we have not yet ran the scan on that path.
 /// For example, if there's a project root at path `python/project` and we query for a path `python/project/subdir/another_subdir/file.py`, there is
 /// a known root at `python/project` and the unexplored part is `subdir/another_subdir` - we need to run a scan on these 2 directories.
-pub(super) struct RootPathTrie<Label> {
+pub struct RootPathTrie<Label> {
     worktree_relative_path: Arc<RelPath>,
     labels: BTreeMap<Label, LabelPresence>,
     children: BTreeMap<Arc<str>, RootPathTrie<Label>>,
@@ -32,13 +32,13 @@ pub(super) struct RootPathTrie<Label> {
 /// Storing absent nodes allows us to recognize which paths have already been scanned for a project root unsuccessfully. This way we don't need to run
 /// such scan more than once.
 #[derive(Clone, Copy, Debug, PartialOrd, PartialEq, Ord, Eq)]
-pub(super) enum LabelPresence {
+pub enum LabelPresence {
     KnownAbsent,
     Present,
 }
 
 impl<Label: Ord + Clone> RootPathTrie<Label> {
-    pub(super) fn new() -> Self {
+    pub fn new() -> Self {
         Self::new_with_key(Arc::from(RelPath::empty()))
     }
 
@@ -74,11 +74,11 @@ impl<Label: Ord + Clone> RootPathTrie<Label> {
         current
     }
 
-    pub(super) fn insert(&mut self, path: &TriePath, value: Label, presence: LabelPresence) {
+    pub fn insert(&mut self, path: &TriePath, value: Label, presence: LabelPresence) {
         self.insert_inner(path, value, presence);
     }
 
-    pub(super) fn walk<'a>(
+    pub fn walk<'a>(
         &'a self,
         path: &TriePath,
         callback: &mut dyn for<'b> FnMut(
@@ -103,7 +103,7 @@ impl<Label: Ord + Clone> RootPathTrie<Label> {
         }
     }
 
-    pub(super) fn remove(&mut self, path: &TriePath) {
+    pub fn remove(&mut self, path: &TriePath) {
         let mut current = self;
         for path in path.0.iter().take(path.0.len().saturating_sub(1)) {
             current = match current.children.get_mut(path) {
@@ -119,10 +119,10 @@ impl<Label: Ord + Clone> RootPathTrie<Label> {
 
 /// [TriePath] is a [Path] preprocessed for amortizing the cost of doing multiple lookups in distinct [RootPathTrie]s.
 #[derive(Clone)]
-pub(super) struct TriePath(Arc<[Arc<str>]>);
+pub struct TriePath(Arc<[Arc<str>]>);
 
 impl TriePath {
-    fn new(value: &RelPath) -> Self {
+    pub fn new(value: &RelPath) -> Self {
         TriePath(
             value
                 .components()
@@ -137,129 +137,3 @@ impl From<&RelPath> for TriePath {
         Self::new(value)
     }
 }
-
-#[cfg(test)]
-mod tests {
-    use std::collections::BTreeSet;
-
-    use util::rel_path::rel_path;
-
-    use super::*;
-
-    #[test]
-    fn test_insert_and_lookup() {
-        let mut trie = RootPathTrie::<()>::new();
-        trie.insert(
-            &TriePath::new(rel_path("a/b/c")),
-            (),
-            LabelPresence::Present,
-        );
-
-        trie.walk(&TriePath::new(rel_path("a/b/c")), &mut |path, nodes| {
-            assert_eq!(nodes.get(&()), Some(&LabelPresence::Present));
-            assert_eq!(path.as_unix_str(), "a/b/c");
-            ControlFlow::Continue(())
-        });
-        // Now let's annotate a parent with "Known missing" node.
-        trie.insert(
-            &TriePath::new(rel_path("a")),
-            (),
-            LabelPresence::KnownAbsent,
-        );
-
-        // Ensure that we walk from the root to the leaf.
-        let mut visited_paths = BTreeSet::new();
-        trie.walk(&TriePath::new(rel_path("a/b/c")), &mut |path, nodes| {
-            if path.as_unix_str() == "a/b/c" {
-                assert_eq!(visited_paths, BTreeSet::from_iter([rel_path("a").into()]));
-                assert_eq!(nodes.get(&()), Some(&LabelPresence::Present));
-            } else if path.as_unix_str() == "a" {
-                assert!(visited_paths.is_empty());
-                assert_eq!(nodes.get(&()), Some(&LabelPresence::KnownAbsent));
-            } else {
-                panic!("Unknown path");
-            }
-            // Assert that we only ever visit a path once.
-            assert!(visited_paths.insert(path.clone()));
-            ControlFlow::Continue(())
-        });
-
-        // One can also pass a path whose prefix is in the tree, but not that path itself.
-        let mut visited_paths = BTreeSet::new();
-        trie.walk(
-            &TriePath::new(rel_path("a/b/c/d/e/f/g")),
-            &mut |path, nodes| {
-                if path.as_unix_str() == "a/b/c" {
-                    assert_eq!(visited_paths, BTreeSet::from_iter([rel_path("a").into()]));
-                    assert_eq!(nodes.get(&()), Some(&LabelPresence::Present));
-                } else if path.as_unix_str() == "a" {
-                    assert!(visited_paths.is_empty());
-                    assert_eq!(nodes.get(&()), Some(&LabelPresence::KnownAbsent));
-                } else {
-                    panic!("Unknown path");
-                }
-                // Assert that we only ever visit a path once.
-                assert!(visited_paths.insert(path.clone()));
-                ControlFlow::Continue(())
-            },
-        );
-
-        // Test breaking from the tree-walk.
-        let mut visited_paths = BTreeSet::new();
-        trie.walk(&TriePath::new(rel_path("a/b/c")), &mut |path, nodes| {
-            if path.as_unix_str() == "a" {
-                assert!(visited_paths.is_empty());
-                assert_eq!(nodes.get(&()), Some(&LabelPresence::KnownAbsent));
-            } else {
-                panic!("Unknown path");
-            }
-            // Assert that we only ever visit a path once.
-            assert!(visited_paths.insert(path.clone()));
-            ControlFlow::Break(())
-        });
-        assert_eq!(visited_paths.len(), 1);
-
-        // Entry removal.
-        trie.insert(
-            &TriePath::new(rel_path("a/b")),
-            (),
-            LabelPresence::KnownAbsent,
-        );
-        let mut visited_paths = BTreeSet::new();
-        trie.walk(&TriePath::new(rel_path("a/b/c")), &mut |path, _nodes| {
-            // Assert that we only ever visit a path once.
-            assert!(visited_paths.insert(path.clone()));
-            ControlFlow::Continue(())
-        });
-        assert_eq!(visited_paths.len(), 3);
-        trie.remove(&TriePath::new(rel_path("a/b")));
-        let mut visited_paths = BTreeSet::new();
-        trie.walk(&TriePath::new(rel_path("a/b/c")), &mut |path, _nodes| {
-            // Assert that we only ever visit a path once.
-            assert!(visited_paths.insert(path.clone()));
-            ControlFlow::Continue(())
-        });
-        assert_eq!(visited_paths.len(), 1);
-        assert_eq!(
-            visited_paths.into_iter().next().unwrap(),
-            rel_path("a").into()
-        );
-    }
-
-    #[test]
-    fn path_to_a_root_can_contain_multiple_known_nodes() {
-        let mut trie = RootPathTrie::<()>::new();
-        trie.insert(&TriePath::new(rel_path("a/b")), (), LabelPresence::Present);
-        trie.insert(&TriePath::new(rel_path("a")), (), LabelPresence::Present);
-        let mut visited_paths = BTreeSet::new();
-        trie.walk(&TriePath::new(rel_path("a/b/c")), &mut |path, nodes| {
-            assert_eq!(nodes.get(&()), Some(&LabelPresence::Present));
-            if path.as_unix_str() != "a" && path.as_unix_str() != "a/b" {
-                panic!("Unexpected path: {}", path.as_unix_str());
-            }
-            assert!(visited_paths.insert(path.clone()));
-            ControlFlow::Continue(())
-        });
-        assert_eq!(visited_paths.len(), 2);
-    }
-}

crates/project/src/project.rs 🔗

@@ -1,7 +1,7 @@
 pub mod agent_registry_store;
 pub mod agent_server_store;
 pub mod buffer_store;
-mod color_extractor;
+pub mod color_extractor;
 pub mod connection_manager;
 pub mod context_server_store;
 pub mod debounced_delay;
@@ -10,12 +10,12 @@ pub mod git_store;
 pub mod image_store;
 pub mod lsp_command;
 pub mod lsp_store;
-mod manifest_tree;
+pub mod manifest_tree;
 pub mod prettier_store;
 pub mod project_search;
 pub mod project_settings;
 pub mod search;
-mod task_inventory;
+pub mod task_inventory;
 pub mod task_store;
 pub mod telemetry_snapshot;
 pub mod terminals;
@@ -23,9 +23,6 @@ pub mod toolchain_store;
 pub mod trusted_worktrees;
 pub mod worktree_store;
 
-#[cfg(test)]
-mod project_tests;
-
 mod environment;
 use buffer_diff::BufferDiff;
 use context_server_store::ContextServerStore;
@@ -33,7 +30,7 @@ pub use environment::ProjectEnvironmentEvent;
 use git::repository::get_git_committer;
 use git_store::{Repository, RepositoryId};
 pub mod search_history;
-mod yarn;
+pub mod yarn;
 
 use dap::inline_value::{InlineValueLocation, VariableLookupKind, VariableScope};
 use itertools::Either;
@@ -74,8 +71,7 @@ use debugger::{
 };
 
 pub use environment::ProjectEnvironment;
-#[cfg(test)]
-use futures::future::join_all;
+
 use futures::{
     StreamExt,
     channel::mpsc::{self, UnboundedReceiver},
@@ -722,7 +718,7 @@ impl LspAction {
         }
     }
 
-    fn action_kind(&self) -> Option<lsp::CodeActionKind> {
+    pub fn action_kind(&self) -> Option<lsp::CodeActionKind> {
         match self {
             Self::Action(action) => action.kind.clone(),
             Self::Command(_) => Some(lsp::CodeActionKind::new("command")),
@@ -1010,7 +1006,7 @@ impl DirectoryLister {
     }
 }
 
-#[cfg(any(test, feature = "test-support"))]
+#[cfg(feature = "test-support")]
 pub const DEFAULT_COMPLETION_CONTEXT: CompletionContext = CompletionContext {
     trigger_kind: lsp::CompletionTriggerKind::INVOKED,
     trigger_character: None,
@@ -1711,7 +1707,7 @@ impl Project {
             )
         });
 
-        let agent_server_store = cx.new(|cx| AgentServerStore::collab(cx));
+        let agent_server_store = cx.new(|_cx| AgentServerStore::collab());
         let replica_id = ReplicaId::new(response.payload.replica_id as u16);
 
         let project = cx.new(|cx| {
@@ -1896,7 +1892,7 @@ impl Project {
         }
     }
 
-    #[cfg(any(test, feature = "test-support"))]
+    #[cfg(feature = "test-support")]
     pub async fn example(
         root_paths: impl IntoIterator<Item = &Path>,
         cx: &mut AsyncApp,
@@ -1937,7 +1933,7 @@ impl Project {
         project
     }
 
-    #[cfg(any(test, feature = "test-support"))]
+    #[cfg(feature = "test-support")]
     pub async fn test(
         fs: Arc<dyn Fs>,
         root_paths: impl IntoIterator<Item = &Path>,
@@ -1946,7 +1942,7 @@ impl Project {
         Self::test_project(fs, root_paths, false, cx).await
     }
 
-    #[cfg(any(test, feature = "test-support"))]
+    #[cfg(feature = "test-support")]
     pub async fn test_with_worktree_trust(
         fs: Arc<dyn Fs>,
         root_paths: impl IntoIterator<Item = &Path>,
@@ -1955,7 +1951,7 @@ impl Project {
         Self::test_project(fs, root_paths, true, cx).await
     }
 
-    #[cfg(any(test, feature = "test-support"))]
+    #[cfg(feature = "test-support")]
     async fn test_project(
         fs: Arc<dyn Fs>,
         root_paths: impl IntoIterator<Item = &Path>,
@@ -2089,7 +2085,7 @@ impl Project {
         });
     }
 
-    #[cfg(any(test, feature = "test-support"))]
+    #[cfg(feature = "test-support")]
     #[inline]
     pub fn has_open_buffer(&self, path: impl Into<ProjectPath>, cx: &App) -> bool {
         self.buffer_store
@@ -2858,7 +2854,7 @@ impl Project {
         })
     }
 
-    #[cfg(any(test, feature = "test-support"))]
+    #[cfg(feature = "test-support")]
     pub fn open_local_buffer_with_lsp(
         &mut self,
         abs_path: impl AsRef<Path>,
@@ -2886,7 +2882,7 @@ impl Project {
         })
     }
 
-    #[cfg(any(test, feature = "test-support"))]
+    #[cfg(feature = "test-support")]
     pub fn open_buffer_with_lsp(
         &mut self,
         path: impl Into<ProjectPath>,
@@ -5520,7 +5516,7 @@ impl Project {
             })
     }
 
-    #[cfg(any(test, feature = "test-support"))]
+    #[cfg(feature = "test-support")]
     pub fn has_language_servers_for(&self, buffer: &Buffer, cx: &mut App) -> bool {
         self.lsp_store.update(cx, |this, cx| {
             this.running_language_servers_for_local_buffer(buffer, cx)
@@ -5552,8 +5548,9 @@ impl Project {
         &self.agent_server_store
     }
 
-    #[cfg(test)]
-    fn git_scans_complete(&self, cx: &Context<Self>) -> Task<()> {
+    #[cfg(feature = "test-support")]
+    pub fn git_scans_complete(&self, cx: &Context<Self>) -> Task<()> {
+        use futures::future::join_all;
         cx.spawn(async move |this, cx| {
             let scans_complete = this
                 .read_with(cx, |this, cx| {
@@ -5973,55 +5970,3 @@ fn provide_inline_values(
 
     variables
 }
-
-#[cfg(test)]
-mod disable_ai_settings_tests {
-    use super::*;
-    use gpui::TestAppContext;
-    use settings::Settings;
-
-    #[gpui::test]
-    async fn test_disable_ai_settings_security(cx: &mut TestAppContext) {
-        cx.update(|cx| {
-            settings::init(cx);
-
-            // Test 1: Default is false (AI enabled)
-            assert!(
-                !DisableAiSettings::get_global(cx).disable_ai,
-                "Default should allow AI"
-            );
-        });
-
-        let disable_true = serde_json::json!({
-            "disable_ai": true
-        })
-        .to_string();
-        let disable_false = serde_json::json!({
-            "disable_ai": false
-        })
-        .to_string();
-
-        cx.update_global::<SettingsStore, _>(|store, cx| {
-            store.set_user_settings(&disable_false, cx).unwrap();
-            store.set_global_settings(&disable_true, cx).unwrap();
-        });
-        cx.update(|cx| {
-            assert!(
-                DisableAiSettings::get_global(cx).disable_ai,
-                "Local false cannot override global true"
-            );
-        });
-
-        cx.update_global::<SettingsStore, _>(|store, cx| {
-            store.set_global_settings(&disable_false, cx).unwrap();
-            store.set_user_settings(&disable_true, cx).unwrap();
-        });
-
-        cx.update(|cx| {
-            assert!(
-                DisableAiSettings::get_global(cx).disable_ai,
-                "Local false cannot override global true"
-            );
-        });
-    }
-}

crates/project/src/project_search.rs 🔗

@@ -824,13 +824,13 @@ struct MatchingEntry {
 /// scanned based on include/exclude patterns of a search query (as include/exclude parameters may match paths inside it).
 /// It is kind-of doing an inverse of glob. Given a glob pattern like `src/**/` and a parent path like `src`, we need to decide whether the parent
 /// may contain glob hits.
-struct PathInclusionMatcher {
+pub struct PathInclusionMatcher {
     included: BTreeSet<PathBuf>,
     query: Arc<SearchQuery>,
 }
 
 impl PathInclusionMatcher {
-    fn new(query: Arc<SearchQuery>) -> Self {
+    pub fn new(query: Arc<SearchQuery>) -> Self {
         let mut included = BTreeSet::new();
         // To do an inverse glob match, we split each glob into it's prefix and the glob part.
         // For example, `src/**/*.rs` becomes `src/` and `**/*.rs`. The glob part gets dropped.
@@ -846,7 +846,7 @@ impl PathInclusionMatcher {
         Self { included, query }
     }
 
-    fn should_scan_gitignored_dir(
+    pub fn should_scan_gitignored_dir(
         &self,
         entry: &Entry,
         snapshot: &Snapshot,
@@ -1024,123 +1024,3 @@ impl<T: 'static + Send> AdaptiveBatcher<T> {
         self._batch_task.await;
     }
 }
-
-#[cfg(test)]
-mod tests {
-    use super::*;
-    use fs::FakeFs;
-    use serde_json::json;
-    use settings::Settings;
-    use util::{
-        path,
-        paths::{PathMatcher, PathStyle},
-        rel_path::RelPath,
-    };
-    use worktree::{Entry, EntryKind, WorktreeSettings};
-
-    use crate::{
-        Project, project_search::PathInclusionMatcher, project_tests::init_test,
-        search::SearchQuery,
-    };
-
-    #[gpui::test]
-    async fn test_path_inclusion_matcher(cx: &mut gpui::TestAppContext) {
-        init_test(cx);
-
-        let fs = FakeFs::new(cx.background_executor.clone());
-        fs.insert_tree(
-            "/root",
-            json!({
-                ".gitignore": "src/data/\n",
-                "src": {
-                    "data": {
-                        "main.csv": "field_1,field_2,field_3",
-                    },
-                    "lib": {
-                        "main.txt": "Are you familiar with fields?",
-                    },
-                },
-            }),
-        )
-        .await;
-
-        let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
-        let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
-        let (worktree_settings, worktree_snapshot) = worktree.update(cx, |worktree, cx| {
-            let settings_location = worktree.settings_location(cx);
-            return (
-                WorktreeSettings::get(Some(settings_location), cx).clone(),
-                worktree.snapshot(),
-            );
-        });
-
-        // Manually create a test entry for the gitignored directory since it won't
-        // be loaded by the worktree
-        let entry = Entry {
-            id: ProjectEntryId::from_proto(1),
-            kind: EntryKind::UnloadedDir,
-            path: Arc::from(RelPath::unix(Path::new("src/data")).unwrap()),
-            inode: 0,
-            mtime: None,
-            canonical_path: None,
-            is_ignored: true,
-            is_hidden: false,
-            is_always_included: false,
-            is_external: false,
-            is_private: false,
-            size: 0,
-            char_bag: Default::default(),
-            is_fifo: false,
-        };
-
-        // 1. Test searching for `field`, including ignored files without any
-        // inclusion and exclusion filters.
-        let include_ignored = true;
-        let files_to_include = PathMatcher::default();
-        let files_to_exclude = PathMatcher::default();
-        let match_full_paths = false;
-        let search_query = SearchQuery::text(
-            "field",
-            false,
-            false,
-            include_ignored,
-            files_to_include,
-            files_to_exclude,
-            match_full_paths,
-            None,
-        )
-        .unwrap();
-
-        let path_matcher = PathInclusionMatcher::new(Arc::new(search_query));
-        assert!(path_matcher.should_scan_gitignored_dir(
-            &entry,
-            &worktree_snapshot,
-            &worktree_settings
-        ));
-
-        // 2. Test searching for `field`, including ignored files but updating
-        // `files_to_include` to only include files under `src/lib`.
-        let include_ignored = true;
-        let files_to_include = PathMatcher::new(vec!["src/lib"], PathStyle::Posix).unwrap();
-        let files_to_exclude = PathMatcher::default();
-        let match_full_paths = false;
-        let search_query = SearchQuery::text(
-            "field",
-            false,
-            false,
-            include_ignored,
-            files_to_include,
-            files_to_exclude,
-            match_full_paths,
-            None,
-        )
-        .unwrap();
-
-        let path_matcher = PathInclusionMatcher::new(Arc::new(search_query));
-        assert!(!path_matcher.should_scan_gitignored_dir(
-            &entry,
-            &worktree_snapshot,
-            &worktree_settings
-        ));
-    }
-}

crates/project/src/search.rs 🔗

@@ -627,161 +627,3 @@ impl SearchQuery {
         }
     }
 }
-
-#[cfg(test)]
-mod tests {
-    use super::*;
-
-    #[test]
-    fn path_matcher_creation_for_valid_paths() {
-        for valid_path in [
-            "file",
-            "Cargo.toml",
-            ".DS_Store",
-            "~/dir/another_dir/",
-            "./dir/file",
-            "dir/[a-z].txt",
-        ] {
-            let path_matcher = PathMatcher::new(&[valid_path.to_owned()], PathStyle::local())
-                .unwrap_or_else(|e| {
-                    panic!("Valid path {valid_path} should be accepted, but got: {e}")
-                });
-            assert!(
-                path_matcher
-                    .is_match(&RelPath::new(valid_path.as_ref(), PathStyle::local()).unwrap()),
-                "Path matcher for valid path {valid_path} should match itself"
-            )
-        }
-    }
-
-    #[test]
-    fn path_matcher_creation_for_globs() {
-        for invalid_glob in ["dir/[].txt", "dir/[a-z.txt", "dir/{file"] {
-            match PathMatcher::new(&[invalid_glob.to_owned()], PathStyle::local()) {
-                Ok(_) => panic!("Invalid glob {invalid_glob} should not be accepted"),
-                Err(_expected) => {}
-            }
-        }
-
-        for valid_glob in [
-            "dir/?ile",
-            "dir/*.txt",
-            "dir/**/file",
-            "dir/[a-z].txt",
-            "{dir,file}",
-        ] {
-            match PathMatcher::new(&[valid_glob.to_owned()], PathStyle::local()) {
-                Ok(_expected) => {}
-                Err(e) => panic!("Valid glob should be accepted, but got: {e}"),
-            }
-        }
-    }
-
-    #[test]
-    fn test_case_sensitive_pattern_items() {
-        let case_sensitive = false;
-        let search_query = SearchQuery::regex(
-            "test\\C",
-            false,
-            case_sensitive,
-            false,
-            false,
-            Default::default(),
-            Default::default(),
-            false,
-            None,
-        )
-        .expect("Should be able to create a regex SearchQuery");
-
-        assert_eq!(
-            search_query.case_sensitive(),
-            true,
-            "Case sensitivity should be enabled when \\C pattern item is present in the query."
-        );
-
-        let case_sensitive = true;
-        let search_query = SearchQuery::regex(
-            "test\\c",
-            true,
-            case_sensitive,
-            false,
-            false,
-            Default::default(),
-            Default::default(),
-            false,
-            None,
-        )
-        .expect("Should be able to create a regex SearchQuery");
-
-        assert_eq!(
-            search_query.case_sensitive(),
-            false,
-            "Case sensitivity should be disabled when \\c pattern item is present, even if initially set to true."
-        );
-
-        let case_sensitive = false;
-        let search_query = SearchQuery::regex(
-            "test\\c\\C",
-            false,
-            case_sensitive,
-            false,
-            false,
-            Default::default(),
-            Default::default(),
-            false,
-            None,
-        )
-        .expect("Should be able to create a regex SearchQuery");
-
-        assert_eq!(
-            search_query.case_sensitive(),
-            true,
-            "Case sensitivity should be enabled when \\C is the last pattern item, even after a \\c."
-        );
-
-        let case_sensitive = false;
-        let search_query = SearchQuery::regex(
-            "tests\\\\C",
-            false,
-            case_sensitive,
-            false,
-            false,
-            Default::default(),
-            Default::default(),
-            false,
-            None,
-        )
-        .expect("Should be able to create a regex SearchQuery");
-
-        assert_eq!(
-            search_query.case_sensitive(),
-            false,
-            "Case sensitivity should not be enabled when \\C pattern item is preceded by a backslash."
-        );
-    }
-
-    #[gpui::test]
-    async fn test_multiline_regex(cx: &mut gpui::TestAppContext) {
-        let search_query = SearchQuery::regex(
-            "^hello$\n",
-            false,
-            false,
-            false,
-            false,
-            Default::default(),
-            Default::default(),
-            false,
-            None,
-        )
-        .expect("Should be able to create a regex SearchQuery");
-
-        use language::Buffer;
-        let text = crate::Rope::from("hello\nworld\nhello\nworld");
-        let snapshot = cx
-            .update(|app| Buffer::build_snapshot(text, None, None, app))
-            .await;
-
-        let results = search_query.search(&snapshot, None).await;
-        assert_eq!(results, vec![0..6, 12..18]);
-    }
-}

crates/project/src/search_history.rs 🔗

@@ -91,160 +91,8 @@ impl SearchHistory {
         cursor.selection = Some(prev_index);
         Some(previous)
     }
-}
-
-#[cfg(test)]
-mod tests {
-    use super::*;
-
-    #[test]
-    fn test_add() {
-        const MAX_HISTORY_LEN: usize = 20;
-        let mut search_history = SearchHistory::new(
-            Some(MAX_HISTORY_LEN),
-            QueryInsertionBehavior::ReplacePreviousIfContains,
-        );
-        let mut cursor = SearchHistoryCursor::default();
-
-        assert_eq!(
-            search_history.current(&cursor),
-            None,
-            "No current selection should be set for the default search history"
-        );
-
-        search_history.add(&mut cursor, "rust".to_string());
-        assert_eq!(
-            search_history.current(&cursor),
-            Some("rust"),
-            "Newly added item should be selected"
-        );
-
-        // check if duplicates are not added
-        search_history.add(&mut cursor, "rust".to_string());
-        assert_eq!(
-            search_history.history.len(),
-            1,
-            "Should not add a duplicate"
-        );
-        assert_eq!(search_history.current(&cursor), Some("rust"));
-
-        // check if new string containing the previous string replaces it
-        search_history.add(&mut cursor, "rustlang".to_string());
-        assert_eq!(
-            search_history.history.len(),
-            1,
-            "Should replace previous item if it's a substring"
-        );
-        assert_eq!(search_history.current(&cursor), Some("rustlang"));
-
-        // add item when it equals to current item if it's not the last one
-        search_history.add(&mut cursor, "php".to_string());
-        search_history.previous(&mut cursor);
-        assert_eq!(search_history.current(&cursor), Some("rustlang"));
-        search_history.add(&mut cursor, "rustlang".to_string());
-        assert_eq!(search_history.history.len(), 3, "Should add item");
-        assert_eq!(search_history.current(&cursor), Some("rustlang"));
-
-        // push enough items to test SEARCH_HISTORY_LIMIT
-        for i in 0..MAX_HISTORY_LEN * 2 {
-            search_history.add(&mut cursor, format!("item{i}"));
-        }
-        assert!(search_history.history.len() <= MAX_HISTORY_LEN);
-    }
-
-    #[test]
-    fn test_next_and_previous() {
-        let mut search_history = SearchHistory::new(None, QueryInsertionBehavior::AlwaysInsert);
-        let mut cursor = SearchHistoryCursor::default();
-
-        assert_eq!(
-            search_history.next(&mut cursor),
-            None,
-            "Default search history should not have a next item"
-        );
-
-        search_history.add(&mut cursor, "Rust".to_string());
-        assert_eq!(search_history.next(&mut cursor), None);
-        search_history.add(&mut cursor, "JavaScript".to_string());
-        assert_eq!(search_history.next(&mut cursor), None);
-        search_history.add(&mut cursor, "TypeScript".to_string());
-        assert_eq!(search_history.next(&mut cursor), None);
-
-        assert_eq!(search_history.current(&cursor), Some("TypeScript"));
-
-        assert_eq!(search_history.previous(&mut cursor), Some("JavaScript"));
-        assert_eq!(search_history.current(&cursor), Some("JavaScript"));
-
-        assert_eq!(search_history.previous(&mut cursor), Some("Rust"));
-        assert_eq!(search_history.current(&cursor), Some("Rust"));
-
-        assert_eq!(search_history.previous(&mut cursor), None);
-        assert_eq!(search_history.current(&cursor), Some("Rust"));
-
-        assert_eq!(search_history.next(&mut cursor), Some("JavaScript"));
-        assert_eq!(search_history.current(&cursor), Some("JavaScript"));
-
-        assert_eq!(search_history.next(&mut cursor), Some("TypeScript"));
-        assert_eq!(search_history.current(&cursor), Some("TypeScript"));
-
-        assert_eq!(search_history.next(&mut cursor), None);
-        assert_eq!(search_history.current(&cursor), Some("TypeScript"));
-    }
-
-    #[test]
-    fn test_reset_selection() {
-        let mut search_history = SearchHistory::new(None, QueryInsertionBehavior::AlwaysInsert);
-        let mut cursor = SearchHistoryCursor::default();
-
-        search_history.add(&mut cursor, "Rust".to_string());
-        search_history.add(&mut cursor, "JavaScript".to_string());
-        search_history.add(&mut cursor, "TypeScript".to_string());
-
-        assert_eq!(search_history.current(&cursor), Some("TypeScript"));
-        cursor.reset();
-        assert_eq!(search_history.current(&cursor), None);
-        assert_eq!(
-            search_history.previous(&mut cursor),
-            Some("TypeScript"),
-            "Should start from the end after reset on previous item query"
-        );
-
-        search_history.previous(&mut cursor);
-        assert_eq!(search_history.current(&cursor), Some("JavaScript"));
-        search_history.previous(&mut cursor);
-        assert_eq!(search_history.current(&cursor), Some("Rust"));
-
-        cursor.reset();
-        assert_eq!(search_history.current(&cursor), None);
-    }
-
-    #[test]
-    fn test_multiple_cursors() {
-        let mut search_history = SearchHistory::new(None, QueryInsertionBehavior::AlwaysInsert);
-        let mut cursor1 = SearchHistoryCursor::default();
-        let mut cursor2 = SearchHistoryCursor::default();
-
-        search_history.add(&mut cursor1, "Rust".to_string());
-        search_history.add(&mut cursor1, "JavaScript".to_string());
-        search_history.add(&mut cursor1, "TypeScript".to_string());
-
-        search_history.add(&mut cursor2, "Python".to_string());
-        search_history.add(&mut cursor2, "Java".to_string());
-        search_history.add(&mut cursor2, "C++".to_string());
-
-        assert_eq!(search_history.current(&cursor1), Some("TypeScript"));
-        assert_eq!(search_history.current(&cursor2), Some("C++"));
-
-        assert_eq!(search_history.previous(&mut cursor1), Some("JavaScript"));
-        assert_eq!(search_history.previous(&mut cursor2), Some("Java"));
-
-        assert_eq!(search_history.next(&mut cursor1), Some("TypeScript"));
-        assert_eq!(search_history.next(&mut cursor1), Some("Python"));
-
-        cursor1.reset();
-        cursor2.reset();
 
-        assert_eq!(search_history.current(&cursor1), None);
-        assert_eq!(search_history.current(&cursor2), None);
+    pub fn len(&self) -> usize {
+        self.history.len()
     }
 }

crates/project/src/task_inventory.rs 🔗

@@ -639,7 +639,7 @@ impl Inventory {
     /// Will fail if the JSON is not a valid array of objects, but will continue if any object will not parse into a [`TaskTemplate`].
     ///
     /// Global tasks are updated for no worktree provided, otherwise the worktree metadata for a given path will be updated.
-    pub(crate) fn update_file_based_tasks(
+    pub fn update_file_based_tasks(
         &mut self,
         location: TaskSettingsLocation<'_>,
         raw_tasks_json: Option<&str>,
@@ -717,7 +717,7 @@ impl Inventory {
     /// Will fail if the JSON is not a valid array of objects, but will continue if any object will not parse into a [`TaskTemplate`].
     ///
     /// Global tasks are updated for no worktree provided, otherwise the worktree metadata for a given path will be updated.
-    pub(crate) fn update_file_based_scenarios(
+    pub fn update_file_based_scenarios(
         &mut self,
         location: TaskSettingsLocation<'_>,
         raw_tasks_json: Option<&str>,
@@ -825,7 +825,7 @@ fn task_lru_comparator(
         })
 }
 
-fn task_source_kind_preference(kind: &TaskSourceKind) -> u32 {
+pub fn task_source_kind_preference(kind: &TaskSourceKind) -> u32 {
     match kind {
         TaskSourceKind::Lsp { .. } => 0,
         TaskSourceKind::Language { .. } => 1,
@@ -844,114 +844,6 @@ fn task_variables_preference(task: &ResolvedTask) -> Reverse<usize> {
     })
 }
 
-#[cfg(test)]
-mod test_inventory {
-    use gpui::{AppContext as _, Entity, Task, TestAppContext};
-    use itertools::Itertools;
-    use task::TaskContext;
-    use worktree::WorktreeId;
-
-    use crate::Inventory;
-
-    use super::TaskSourceKind;
-
-    pub(super) fn task_template_names(
-        inventory: &Entity<Inventory>,
-        worktree: Option<WorktreeId>,
-        cx: &mut TestAppContext,
-    ) -> Task<Vec<String>> {
-        let new_tasks = inventory.update(cx, |inventory, cx| {
-            inventory.list_tasks(None, None, worktree, cx)
-        });
-        cx.background_spawn(async move {
-            new_tasks
-                .await
-                .into_iter()
-                .map(|(_, task)| task.label)
-                .sorted()
-                .collect()
-        })
-    }
-
-    pub(super) fn register_task_used(
-        inventory: &Entity<Inventory>,
-        task_name: &str,
-        cx: &mut TestAppContext,
-    ) -> Task<()> {
-        let tasks = inventory.update(cx, |inventory, cx| {
-            inventory.list_tasks(None, None, None, cx)
-        });
-
-        let task_name = task_name.to_owned();
-        let inventory = inventory.clone();
-        cx.spawn(|mut cx| async move {
-            let (task_source_kind, task) = tasks
-                .await
-                .into_iter()
-                .find(|(_, task)| task.label == task_name)
-                .unwrap_or_else(|| panic!("Failed to find task with name {task_name}"));
-
-            let id_base = task_source_kind.to_id_base();
-            inventory.update(&mut cx, |inventory, _| {
-                inventory.task_scheduled(
-                    task_source_kind.clone(),
-                    task.resolve_task(&id_base, &TaskContext::default())
-                        .unwrap_or_else(|| panic!("Failed to resolve task with name {task_name}")),
-                )
-            });
-        })
-    }
-
-    pub(super) fn register_worktree_task_used(
-        inventory: &Entity<Inventory>,
-        worktree_id: WorktreeId,
-        task_name: &str,
-        cx: &mut TestAppContext,
-    ) -> Task<()> {
-        let tasks = inventory.update(cx, |inventory, cx| {
-            inventory.list_tasks(None, None, Some(worktree_id), cx)
-        });
-
-        let inventory = inventory.clone();
-        let task_name = task_name.to_owned();
-        cx.spawn(|mut cx| async move {
-            let (task_source_kind, task) = tasks
-                .await
-                .into_iter()
-                .find(|(_, task)| task.label == task_name)
-                .unwrap_or_else(|| panic!("Failed to find task with name {task_name}"));
-            let id_base = task_source_kind.to_id_base();
-            inventory.update(&mut cx, |inventory, _| {
-                inventory.task_scheduled(
-                    task_source_kind.clone(),
-                    task.resolve_task(&id_base, &TaskContext::default())
-                        .unwrap_or_else(|| panic!("Failed to resolve task with name {task_name}")),
-                );
-            });
-        })
-    }
-
-    pub(super) async fn list_tasks(
-        inventory: &Entity<Inventory>,
-        worktree: Option<WorktreeId>,
-        cx: &mut TestAppContext,
-    ) -> Vec<(TaskSourceKind, String)> {
-        let task_context = &TaskContext::default();
-        inventory
-            .update(cx, |inventory, cx| {
-                inventory.list_tasks(None, None, worktree, cx)
-            })
-            .await
-            .into_iter()
-            .filter_map(|(source_kind, task)| {
-                let id_base = source_kind.to_id_base();
-                Some((source_kind, task.resolve_task(&id_base, task_context)?))
-            })
-            .map(|(source_kind, resolved_task)| (source_kind, resolved_task.resolved_label))
-            .collect()
-    }
-}
-
 /// A context provided that tries to provide values for all non-custom [`VariableName`] variants for a currently opened file.
 /// Applied as a base for every custom [`ContextProvider`] unless explicitly oped out.
 pub struct BasicContextProvider {
@@ -1079,524 +971,3 @@ impl ContextProvider for ContextProviderWithTasks {
         Task::ready(Some(self.templates.clone()))
     }
 }
-
-#[cfg(test)]
-mod tests {
-    use gpui::TestAppContext;
-    use paths::tasks_file;
-    use pretty_assertions::assert_eq;
-    use serde_json::json;
-    use settings::SettingsLocation;
-    use std::path::Path;
-    use util::rel_path::rel_path;
-
-    use crate::task_store::TaskStore;
-
-    use super::test_inventory::*;
-    use super::*;
-
-    #[gpui::test]
-    async fn test_task_list_sorting(cx: &mut TestAppContext) {
-        init_test(cx);
-        let inventory = cx.update(|cx| Inventory::new(cx));
-        let initial_tasks = resolved_task_names(&inventory, None, cx).await;
-        assert!(
-            initial_tasks.is_empty(),
-            "No tasks expected for empty inventory, but got {initial_tasks:?}"
-        );
-        let initial_tasks = task_template_names(&inventory, None, cx).await;
-        assert!(
-            initial_tasks.is_empty(),
-            "No tasks expected for empty inventory, but got {initial_tasks:?}"
-        );
-        cx.run_until_parked();
-        let expected_initial_state = [
-            "1_a_task".to_string(),
-            "1_task".to_string(),
-            "2_task".to_string(),
-            "3_task".to_string(),
-        ];
-
-        inventory.update(cx, |inventory, _| {
-            inventory
-                .update_file_based_tasks(
-                    TaskSettingsLocation::Global(tasks_file()),
-                    Some(&mock_tasks_from_names(
-                        expected_initial_state.iter().map(|name| name.as_str()),
-                    )),
-                )
-                .unwrap();
-        });
-        assert_eq!(
-            task_template_names(&inventory, None, cx).await,
-            &expected_initial_state,
-        );
-        assert_eq!(
-            resolved_task_names(&inventory, None, cx).await,
-            &expected_initial_state,
-            "Tasks with equal amount of usages should be sorted alphanumerically"
-        );
-
-        register_task_used(&inventory, "2_task", cx).await;
-        assert_eq!(
-            task_template_names(&inventory, None, cx).await,
-            &expected_initial_state,
-        );
-        assert_eq!(
-            resolved_task_names(&inventory, None, cx).await,
-            vec![
-                "2_task".to_string(),
-                "1_a_task".to_string(),
-                "1_task".to_string(),
-                "3_task".to_string()
-            ],
-        );
-
-        register_task_used(&inventory, "1_task", cx).await;
-        register_task_used(&inventory, "1_task", cx).await;
-        register_task_used(&inventory, "1_task", cx).await;
-        register_task_used(&inventory, "3_task", cx).await;
-        assert_eq!(
-            task_template_names(&inventory, None, cx).await,
-            &expected_initial_state,
-        );
-        assert_eq!(
-            resolved_task_names(&inventory, None, cx).await,
-            vec![
-                "3_task".to_string(),
-                "1_task".to_string(),
-                "2_task".to_string(),
-                "1_a_task".to_string(),
-            ],
-            "Most recently used task should be at the top"
-        );
-
-        let worktree_id = WorktreeId::from_usize(0);
-        let local_worktree_location = SettingsLocation {
-            worktree_id,
-            path: rel_path("foo"),
-        };
-        inventory.update(cx, |inventory, _| {
-            inventory
-                .update_file_based_tasks(
-                    TaskSettingsLocation::Worktree(local_worktree_location),
-                    Some(&mock_tasks_from_names(["worktree_task_1"])),
-                )
-                .unwrap();
-        });
-        assert_eq!(
-            resolved_task_names(&inventory, None, cx).await,
-            vec![
-                "3_task".to_string(),
-                "1_task".to_string(),
-                "2_task".to_string(),
-                "1_a_task".to_string(),
-            ],
-            "Most recently used task should be at the top"
-        );
-        assert_eq!(
-            resolved_task_names(&inventory, Some(worktree_id), cx).await,
-            vec![
-                "3_task".to_string(),
-                "1_task".to_string(),
-                "2_task".to_string(),
-                "worktree_task_1".to_string(),
-                "1_a_task".to_string(),
-            ],
-        );
-        register_worktree_task_used(&inventory, worktree_id, "worktree_task_1", cx).await;
-        assert_eq!(
-            resolved_task_names(&inventory, Some(worktree_id), cx).await,
-            vec![
-                "worktree_task_1".to_string(),
-                "3_task".to_string(),
-                "1_task".to_string(),
-                "2_task".to_string(),
-                "1_a_task".to_string(),
-            ],
-            "Most recently used worktree task should be at the top"
-        );
-
-        inventory.update(cx, |inventory, _| {
-            inventory
-                .update_file_based_tasks(
-                    TaskSettingsLocation::Global(tasks_file()),
-                    Some(&mock_tasks_from_names(
-                        ["10_hello", "11_hello"]
-                            .into_iter()
-                            .chain(expected_initial_state.iter().map(|name| name.as_str())),
-                    )),
-                )
-                .unwrap();
-        });
-        cx.run_until_parked();
-        let expected_updated_state = [
-            "10_hello".to_string(),
-            "11_hello".to_string(),
-            "1_a_task".to_string(),
-            "1_task".to_string(),
-            "2_task".to_string(),
-            "3_task".to_string(),
-        ];
-        assert_eq!(
-            task_template_names(&inventory, None, cx).await,
-            &expected_updated_state,
-        );
-        assert_eq!(
-            resolved_task_names(&inventory, None, cx).await,
-            vec![
-                "worktree_task_1".to_string(),
-                "1_a_task".to_string(),
-                "1_task".to_string(),
-                "2_task".to_string(),
-                "3_task".to_string(),
-                "10_hello".to_string(),
-                "11_hello".to_string(),
-            ],
-            "After global tasks update, worktree task usage is not erased and it's the first still; global task is back to regular order as its file was updated"
-        );
-
-        register_task_used(&inventory, "11_hello", cx).await;
-        assert_eq!(
-            task_template_names(&inventory, None, cx).await,
-            &expected_updated_state,
-        );
-        assert_eq!(
-            resolved_task_names(&inventory, None, cx).await,
-            vec![
-                "11_hello".to_string(),
-                "worktree_task_1".to_string(),
-                "1_a_task".to_string(),
-                "1_task".to_string(),
-                "2_task".to_string(),
-                "3_task".to_string(),
-                "10_hello".to_string(),
-            ],
-        );
-    }
-
-    #[gpui::test]
-    async fn test_reloading_debug_scenarios(cx: &mut TestAppContext) {
-        init_test(cx);
-        let inventory = cx.update(|cx| Inventory::new(cx));
-        inventory.update(cx, |inventory, _| {
-            inventory
-                .update_file_based_scenarios(
-                    TaskSettingsLocation::Global(Path::new("")),
-                    Some(
-                        r#"
-                        [{
-                            "label": "test scenario",
-                            "adapter": "CodeLLDB",
-                            "request": "launch",
-                            "program": "wowzer",
-                        }]
-                        "#,
-                    ),
-                )
-                .unwrap();
-        });
-
-        let (_, scenario) = inventory
-            .update(cx, |this, cx| {
-                this.list_debug_scenarios(&TaskContexts::default(), vec![], vec![], false, cx)
-            })
-            .await
-            .1
-            .first()
-            .unwrap()
-            .clone();
-
-        inventory.update(cx, |this, _| {
-            this.scenario_scheduled(scenario.clone(), TaskContext::default(), None, None);
-        });
-
-        assert_eq!(
-            inventory
-                .update(cx, |this, cx| {
-                    this.list_debug_scenarios(&TaskContexts::default(), vec![], vec![], false, cx)
-                })
-                .await
-                .0
-                .first()
-                .unwrap()
-                .clone()
-                .0,
-            scenario
-        );
-
-        inventory.update(cx, |this, _| {
-            this.update_file_based_scenarios(
-                TaskSettingsLocation::Global(Path::new("")),
-                Some(
-                    r#"
-                        [{
-                            "label": "test scenario",
-                            "adapter": "Delve",
-                            "request": "launch",
-                            "program": "wowzer",
-                        }]
-                        "#,
-                ),
-            )
-            .unwrap();
-        });
-
-        assert_eq!(
-            inventory
-                .update(cx, |this, cx| {
-                    this.list_debug_scenarios(&TaskContexts::default(), vec![], vec![], false, cx)
-                })
-                .await
-                .0
-                .first()
-                .unwrap()
-                .0
-                .adapter,
-            "Delve",
-        );
-
-        inventory.update(cx, |this, _| {
-            this.update_file_based_scenarios(
-                TaskSettingsLocation::Global(Path::new("")),
-                Some(
-                    r#"
-                        [{
-                            "label": "testing scenario",
-                            "adapter": "Delve",
-                            "request": "launch",
-                            "program": "wowzer",
-                        }]
-                        "#,
-                ),
-            )
-            .unwrap();
-        });
-
-        assert!(
-            inventory
-                .update(cx, |this, cx| {
-                    this.list_debug_scenarios(&TaskContexts::default(), vec![], vec![], false, cx)
-                })
-                .await
-                .0
-                .is_empty(),
-        );
-    }
-
-    #[gpui::test]
-    async fn test_inventory_static_task_filters(cx: &mut TestAppContext) {
-        init_test(cx);
-        let inventory = cx.update(|cx| Inventory::new(cx));
-        let common_name = "common_task_name";
-        let worktree_1 = WorktreeId::from_usize(1);
-        let worktree_2 = WorktreeId::from_usize(2);
-
-        cx.run_until_parked();
-        let worktree_independent_tasks = vec![
-            (
-                TaskSourceKind::AbsPath {
-                    id_base: "global tasks.json".into(),
-                    abs_path: paths::tasks_file().clone(),
-                },
-                common_name.to_string(),
-            ),
-            (
-                TaskSourceKind::AbsPath {
-                    id_base: "global tasks.json".into(),
-                    abs_path: paths::tasks_file().clone(),
-                },
-                "static_source_1".to_string(),
-            ),
-            (
-                TaskSourceKind::AbsPath {
-                    id_base: "global tasks.json".into(),
-                    abs_path: paths::tasks_file().clone(),
-                },
-                "static_source_2".to_string(),
-            ),
-        ];
-        let worktree_1_tasks = [
-            (
-                TaskSourceKind::Worktree {
-                    id: worktree_1,
-                    directory_in_worktree: rel_path(".zed").into(),
-                    id_base: "local worktree tasks from directory \".zed\"".into(),
-                },
-                common_name.to_string(),
-            ),
-            (
-                TaskSourceKind::Worktree {
-                    id: worktree_1,
-                    directory_in_worktree: rel_path(".zed").into(),
-                    id_base: "local worktree tasks from directory \".zed\"".into(),
-                },
-                "worktree_1".to_string(),
-            ),
-        ];
-        let worktree_2_tasks = [
-            (
-                TaskSourceKind::Worktree {
-                    id: worktree_2,
-                    directory_in_worktree: rel_path(".zed").into(),
-                    id_base: "local worktree tasks from directory \".zed\"".into(),
-                },
-                common_name.to_string(),
-            ),
-            (
-                TaskSourceKind::Worktree {
-                    id: worktree_2,
-                    directory_in_worktree: rel_path(".zed").into(),
-                    id_base: "local worktree tasks from directory \".zed\"".into(),
-                },
-                "worktree_2".to_string(),
-            ),
-        ];
-
-        inventory.update(cx, |inventory, _| {
-            inventory
-                .update_file_based_tasks(
-                    TaskSettingsLocation::Global(tasks_file()),
-                    Some(&mock_tasks_from_names(
-                        worktree_independent_tasks
-                            .iter()
-                            .map(|(_, name)| name.as_str()),
-                    )),
-                )
-                .unwrap();
-            inventory
-                .update_file_based_tasks(
-                    TaskSettingsLocation::Worktree(SettingsLocation {
-                        worktree_id: worktree_1,
-                        path: rel_path(".zed"),
-                    }),
-                    Some(&mock_tasks_from_names(
-                        worktree_1_tasks.iter().map(|(_, name)| name.as_str()),
-                    )),
-                )
-                .unwrap();
-            inventory
-                .update_file_based_tasks(
-                    TaskSettingsLocation::Worktree(SettingsLocation {
-                        worktree_id: worktree_2,
-                        path: rel_path(".zed"),
-                    }),
-                    Some(&mock_tasks_from_names(
-                        worktree_2_tasks.iter().map(|(_, name)| name.as_str()),
-                    )),
-                )
-                .unwrap();
-        });
-
-        assert_eq!(
-            list_tasks_sorted_by_last_used(&inventory, None, cx).await,
-            worktree_independent_tasks,
-            "Without a worktree, only worktree-independent tasks should be listed"
-        );
-        assert_eq!(
-            list_tasks_sorted_by_last_used(&inventory, Some(worktree_1), cx).await,
-            worktree_1_tasks
-                .iter()
-                .chain(worktree_independent_tasks.iter())
-                .cloned()
-                .sorted_by_key(|(kind, label)| (task_source_kind_preference(kind), label.clone()))
-                .collect::<Vec<_>>(),
-        );
-        assert_eq!(
-            list_tasks_sorted_by_last_used(&inventory, Some(worktree_2), cx).await,
-            worktree_2_tasks
-                .iter()
-                .chain(worktree_independent_tasks.iter())
-                .cloned()
-                .sorted_by_key(|(kind, label)| (task_source_kind_preference(kind), label.clone()))
-                .collect::<Vec<_>>(),
-        );
-
-        assert_eq!(
-            list_tasks(&inventory, None, cx).await,
-            worktree_independent_tasks,
-            "Without a worktree, only worktree-independent tasks should be listed"
-        );
-        assert_eq!(
-            list_tasks(&inventory, Some(worktree_1), cx).await,
-            worktree_1_tasks
-                .iter()
-                .chain(worktree_independent_tasks.iter())
-                .cloned()
-                .collect::<Vec<_>>(),
-        );
-        assert_eq!(
-            list_tasks(&inventory, Some(worktree_2), cx).await,
-            worktree_2_tasks
-                .iter()
-                .chain(worktree_independent_tasks.iter())
-                .cloned()
-                .collect::<Vec<_>>(),
-        );
-    }
-
-    fn init_test(_cx: &mut TestAppContext) {
-        zlog::init_test();
-        TaskStore::init(None);
-    }
-
-    fn resolved_task_names(
-        inventory: &Entity<Inventory>,
-        worktree: Option<WorktreeId>,
-        cx: &mut TestAppContext,
-    ) -> Task<Vec<String>> {
-        let tasks = inventory.update(cx, |inventory, cx| {
-            let mut task_contexts = TaskContexts::default();
-            task_contexts.active_worktree_context =
-                worktree.map(|worktree| (worktree, TaskContext::default()));
-
-            inventory.used_and_current_resolved_tasks(Arc::new(task_contexts), cx)
-        });
-
-        cx.background_spawn(async move {
-            let (used, current) = tasks.await;
-            used.into_iter()
-                .chain(current)
-                .map(|(_, task)| task.original_task().label.clone())
-                .collect()
-        })
-    }
-
-    fn mock_tasks_from_names<'a>(task_names: impl IntoIterator<Item = &'a str> + 'a) -> String {
-        serde_json::to_string(&serde_json::Value::Array(
-            task_names
-                .into_iter()
-                .map(|task_name| {
-                    json!({
-                        "label": task_name,
-                        "command": "echo",
-                        "args": vec![task_name],
-                    })
-                })
-                .collect::<Vec<_>>(),
-        ))
-        .unwrap()
-    }
-
-    async fn list_tasks_sorted_by_last_used(
-        inventory: &Entity<Inventory>,
-        worktree: Option<WorktreeId>,
-        cx: &mut TestAppContext,
-    ) -> Vec<(TaskSourceKind, String)> {
-        let (used, current) = inventory
-            .update(cx, |inventory, cx| {
-                let mut task_contexts = TaskContexts::default();
-                task_contexts.active_worktree_context =
-                    worktree.map(|worktree| (worktree, TaskContext::default()));
-
-                inventory.used_and_current_resolved_tasks(Arc::new(task_contexts), cx)
-            })
-            .await;
-        let mut all = used;
-        all.extend(current);
-        all.into_iter()
-            .map(|(source_kind, task)| (source_kind, task.resolved_label))
-            .sorted_by_key(|(kind, label)| (task_source_kind_preference(kind), label.clone()))
-            .collect()
-    }
-}

crates/project/src/trusted_worktrees.rs 🔗

@@ -160,7 +160,7 @@ impl From<RemoteConnectionOptions> for RemoteHostLocation {
                 Some(SharedString::new(docker_connection_options.name)),
                 SharedString::new(docker_connection_options.container_id),
             ),
-            #[cfg(any(test, feature = "test-support"))]
+            #[cfg(feature = "test-support")]
             RemoteConnectionOptions::Mock(mock) => {
                 (None, SharedString::new(format!("mock-{}", mock.id)))
             }
@@ -252,6 +252,17 @@ impl TrustedWorktreesStore {
             })
     }
 
+    #[cfg(feature = "test-support")]
+    pub fn restricted_worktrees_for_store(
+        &self,
+        worktree_store: &Entity<WorktreeStore>,
+    ) -> HashSet<WorktreeId> {
+        self.restricted
+            .get(&worktree_store.downgrade())
+            .unwrap()
+            .clone()
+    }
+
     /// Adds certain entities on this host to the trusted list.
     /// This will emit [`TrustedWorktreesEvent::Trusted`] event for all passed entries
     /// and the ones that got auto trusted based on trust hierarchy (see module-level docs).
@@ -685,973 +696,3 @@ fn find_worktree_in_store(
         None
     }
 }
-
-#[cfg(test)]
-mod tests {
-    use std::{cell::RefCell, path::PathBuf, rc::Rc};
-
-    use collections::HashSet;
-    use gpui::TestAppContext;
-    use serde_json::json;
-    use settings::SettingsStore;
-    use util::path;
-
-    use crate::{FakeFs, Project};
-
-    use super::*;
-
-    fn init_test(cx: &mut TestAppContext) {
-        cx.update(|cx| {
-            if cx.try_global::<SettingsStore>().is_none() {
-                let settings_store = SettingsStore::test(cx);
-                cx.set_global(settings_store);
-            }
-            if cx.try_global::<TrustedWorktrees>().is_some() {
-                cx.remove_global::<TrustedWorktrees>();
-            }
-        });
-    }
-
-    fn init_trust_global(
-        worktree_store: Entity<WorktreeStore>,
-        cx: &mut TestAppContext,
-    ) -> Entity<TrustedWorktreesStore> {
-        cx.update(|cx| {
-            init(HashMap::default(), cx);
-            track_worktree_trust(worktree_store, None, None, None, cx);
-            TrustedWorktrees::try_get_global(cx).expect("global should be set")
-        })
-    }
-
-    #[gpui::test]
-    async fn test_single_worktree_trust(cx: &mut TestAppContext) {
-        init_test(cx);
-
-        let fs = FakeFs::new(cx.executor());
-        fs.insert_tree(path!("/root"), json!({ "main.rs": "fn main() {}" }))
-            .await;
-
-        let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
-        let worktree_store = project.read_with(cx, |project, _| project.worktree_store());
-        let worktree_id = worktree_store.read_with(cx, |store, cx| {
-            store.worktrees().next().unwrap().read(cx).id()
-        });
-
-        let trusted_worktrees = init_trust_global(worktree_store.clone(), cx);
-
-        let events: Rc<RefCell<Vec<TrustedWorktreesEvent>>> = Rc::default();
-        cx.update({
-            let events = events.clone();
-            |cx| {
-                cx.subscribe(&trusted_worktrees, move |_, event, _| {
-                    events.borrow_mut().push(match event {
-                        TrustedWorktreesEvent::Trusted(host, paths) => {
-                            TrustedWorktreesEvent::Trusted(host.clone(), paths.clone())
-                        }
-                        TrustedWorktreesEvent::Restricted(host, paths) => {
-                            TrustedWorktreesEvent::Restricted(host.clone(), paths.clone())
-                        }
-                    });
-                })
-            }
-        })
-        .detach();
-
-        let can_trust = trusted_worktrees.update(cx, |store, cx| {
-            store.can_trust(&worktree_store, worktree_id, cx)
-        });
-        assert!(!can_trust, "worktree should be restricted by default");
-
-        {
-            let events = events.borrow();
-            assert_eq!(events.len(), 1);
-            match &events[0] {
-                TrustedWorktreesEvent::Restricted(event_worktree_store, paths) => {
-                    assert_eq!(event_worktree_store, &worktree_store.downgrade());
-                    assert!(paths.contains(&PathTrust::Worktree(worktree_id)));
-                }
-                _ => panic!("expected Restricted event"),
-            }
-        }
-
-        let has_restricted = trusted_worktrees.read_with(cx, |store, cx| {
-            store.has_restricted_worktrees(&worktree_store, cx)
-        });
-        assert!(has_restricted, "should have restricted worktrees");
-
-        let restricted = trusted_worktrees.read_with(cx, |trusted_worktrees, cx| {
-            trusted_worktrees.restricted_worktrees(&worktree_store, cx)
-        });
-        assert!(restricted.iter().any(|(id, _)| *id == worktree_id));
-
-        events.borrow_mut().clear();
-
-        let can_trust_again = trusted_worktrees.update(cx, |store, cx| {
-            store.can_trust(&worktree_store, worktree_id, cx)
-        });
-        assert!(!can_trust_again, "worktree should still be restricted");
-        assert!(
-            events.borrow().is_empty(),
-            "no duplicate Restricted event on repeated can_trust"
-        );
-
-        trusted_worktrees.update(cx, |store, cx| {
-            store.trust(
-                &worktree_store,
-                HashSet::from_iter([PathTrust::Worktree(worktree_id)]),
-                cx,
-            );
-        });
-
-        {
-            let events = events.borrow();
-            assert_eq!(events.len(), 1);
-            match &events[0] {
-                TrustedWorktreesEvent::Trusted(event_worktree_store, paths) => {
-                    assert_eq!(event_worktree_store, &worktree_store.downgrade());
-                    assert!(paths.contains(&PathTrust::Worktree(worktree_id)));
-                }
-                _ => panic!("expected Trusted event"),
-            }
-        }
-
-        let can_trust_after = trusted_worktrees.update(cx, |store, cx| {
-            store.can_trust(&worktree_store, worktree_id, cx)
-        });
-        assert!(can_trust_after, "worktree should be trusted after trust()");
-
-        let has_restricted_after = trusted_worktrees.read_with(cx, |store, cx| {
-            store.has_restricted_worktrees(&worktree_store, cx)
-        });
-        assert!(
-            !has_restricted_after,
-            "should have no restricted worktrees after trust"
-        );
-
-        let restricted_after = trusted_worktrees.read_with(cx, |trusted_worktrees, cx| {
-            trusted_worktrees.restricted_worktrees(&worktree_store, cx)
-        });
-        assert!(
-            restricted_after.is_empty(),
-            "restricted set should be empty"
-        );
-    }
-
-    #[gpui::test]
-    async fn test_single_file_worktree_trust(cx: &mut TestAppContext) {
-        init_test(cx);
-
-        let fs = FakeFs::new(cx.executor());
-        fs.insert_tree(path!("/root"), json!({ "foo.rs": "fn foo() {}" }))
-            .await;
-
-        let project = Project::test(fs, [path!("/root/foo.rs").as_ref()], cx).await;
-        let worktree_store = project.read_with(cx, |project, _| project.worktree_store());
-        let worktree_id = worktree_store.read_with(cx, |store, cx| {
-            let worktree = store.worktrees().next().unwrap();
-            let worktree = worktree.read(cx);
-            assert!(worktree.is_single_file(), "expected single-file worktree");
-            worktree.id()
-        });
-
-        let trusted_worktrees = init_trust_global(worktree_store.clone(), cx);
-
-        let events: Rc<RefCell<Vec<TrustedWorktreesEvent>>> = Rc::default();
-        cx.update({
-            let events = events.clone();
-            |cx| {
-                cx.subscribe(&trusted_worktrees, move |_, event, _| {
-                    events.borrow_mut().push(match event {
-                        TrustedWorktreesEvent::Trusted(host, paths) => {
-                            TrustedWorktreesEvent::Trusted(host.clone(), paths.clone())
-                        }
-                        TrustedWorktreesEvent::Restricted(host, paths) => {
-                            TrustedWorktreesEvent::Restricted(host.clone(), paths.clone())
-                        }
-                    });
-                })
-            }
-        })
-        .detach();
-
-        let can_trust = trusted_worktrees.update(cx, |store, cx| {
-            store.can_trust(&worktree_store, worktree_id, cx)
-        });
-        assert!(
-            !can_trust,
-            "single-file worktree should be restricted by default"
-        );
-
-        {
-            let events = events.borrow();
-            assert_eq!(events.len(), 1);
-            match &events[0] {
-                TrustedWorktreesEvent::Restricted(event_worktree_store, paths) => {
-                    assert_eq!(event_worktree_store, &worktree_store.downgrade());
-                    assert!(paths.contains(&PathTrust::Worktree(worktree_id)));
-                }
-                _ => panic!("expected Restricted event"),
-            }
-        }
-
-        events.borrow_mut().clear();
-
-        trusted_worktrees.update(cx, |store, cx| {
-            store.trust(
-                &worktree_store,
-                HashSet::from_iter([PathTrust::Worktree(worktree_id)]),
-                cx,
-            );
-        });
-
-        {
-            let events = events.borrow();
-            assert_eq!(events.len(), 1);
-            match &events[0] {
-                TrustedWorktreesEvent::Trusted(event_worktree_store, paths) => {
-                    assert_eq!(event_worktree_store, &worktree_store.downgrade());
-                    assert!(paths.contains(&PathTrust::Worktree(worktree_id)));
-                }
-                _ => panic!("expected Trusted event"),
-            }
-        }
-
-        let can_trust_after = trusted_worktrees.update(cx, |store, cx| {
-            store.can_trust(&worktree_store, worktree_id, cx)
-        });
-        assert!(
-            can_trust_after,
-            "single-file worktree should be trusted after trust()"
-        );
-    }
-
-    #[gpui::test]
-    async fn test_multiple_single_file_worktrees_trust_one(cx: &mut TestAppContext) {
-        init_test(cx);
-
-        let fs = FakeFs::new(cx.executor());
-        fs.insert_tree(
-            path!("/root"),
-            json!({
-                "a.rs": "fn a() {}",
-                "b.rs": "fn b() {}",
-                "c.rs": "fn c() {}"
-            }),
-        )
-        .await;
-
-        let project = Project::test(
-            fs,
-            [
-                path!("/root/a.rs").as_ref(),
-                path!("/root/b.rs").as_ref(),
-                path!("/root/c.rs").as_ref(),
-            ],
-            cx,
-        )
-        .await;
-        let worktree_store = project.read_with(cx, |project, _| project.worktree_store());
-        let worktree_ids: Vec<_> = worktree_store.read_with(cx, |store, cx| {
-            store
-                .worktrees()
-                .map(|worktree| {
-                    let worktree = worktree.read(cx);
-                    assert!(worktree.is_single_file());
-                    worktree.id()
-                })
-                .collect()
-        });
-        assert_eq!(worktree_ids.len(), 3);
-
-        let trusted_worktrees = init_trust_global(worktree_store.clone(), cx);
-
-        for &worktree_id in &worktree_ids {
-            let can_trust = trusted_worktrees.update(cx, |store, cx| {
-                store.can_trust(&worktree_store, worktree_id, cx)
-            });
-            assert!(
-                !can_trust,
-                "worktree {worktree_id:?} should be restricted initially"
-            );
-        }
-
-        trusted_worktrees.update(cx, |store, cx| {
-            store.trust(
-                &worktree_store,
-                HashSet::from_iter([PathTrust::Worktree(worktree_ids[1])]),
-                cx,
-            );
-        });
-
-        let can_trust_0 = trusted_worktrees.update(cx, |store, cx| {
-            store.can_trust(&worktree_store, worktree_ids[0], cx)
-        });
-        let can_trust_1 = trusted_worktrees.update(cx, |store, cx| {
-            store.can_trust(&worktree_store, worktree_ids[1], cx)
-        });
-        let can_trust_2 = trusted_worktrees.update(cx, |store, cx| {
-            store.can_trust(&worktree_store, worktree_ids[2], cx)
-        });
-
-        assert!(!can_trust_0, "worktree 0 should still be restricted");
-        assert!(can_trust_1, "worktree 1 should be trusted");
-        assert!(!can_trust_2, "worktree 2 should still be restricted");
-    }
-
-    #[gpui::test]
-    async fn test_two_directory_worktrees_separate_trust(cx: &mut TestAppContext) {
-        init_test(cx);
-
-        let fs = FakeFs::new(cx.executor());
-        fs.insert_tree(
-            path!("/projects"),
-            json!({
-                "project_a": { "main.rs": "fn main() {}" },
-                "project_b": { "lib.rs": "pub fn lib() {}" }
-            }),
-        )
-        .await;
-
-        let project = Project::test(
-            fs,
-            [
-                path!("/projects/project_a").as_ref(),
-                path!("/projects/project_b").as_ref(),
-            ],
-            cx,
-        )
-        .await;
-        let worktree_store = project.read_with(cx, |project, _| project.worktree_store());
-        let worktree_ids: Vec<_> = worktree_store.read_with(cx, |store, cx| {
-            store
-                .worktrees()
-                .map(|worktree| {
-                    let worktree = worktree.read(cx);
-                    assert!(!worktree.is_single_file());
-                    worktree.id()
-                })
-                .collect()
-        });
-        assert_eq!(worktree_ids.len(), 2);
-
-        let trusted_worktrees = init_trust_global(worktree_store.clone(), cx);
-
-        let can_trust_a = trusted_worktrees.update(cx, |store, cx| {
-            store.can_trust(&worktree_store, worktree_ids[0], cx)
-        });
-        let can_trust_b = trusted_worktrees.update(cx, |store, cx| {
-            store.can_trust(&worktree_store, worktree_ids[1], cx)
-        });
-        assert!(!can_trust_a, "project_a should be restricted initially");
-        assert!(!can_trust_b, "project_b should be restricted initially");
-
-        trusted_worktrees.update(cx, |store, cx| {
-            store.trust(
-                &worktree_store,
-                HashSet::from_iter([PathTrust::Worktree(worktree_ids[0])]),
-                cx,
-            );
-        });
-
-        let can_trust_a = trusted_worktrees.update(cx, |store, cx| {
-            store.can_trust(&worktree_store, worktree_ids[0], cx)
-        });
-        let can_trust_b = trusted_worktrees.update(cx, |store, cx| {
-            store.can_trust(&worktree_store, worktree_ids[1], cx)
-        });
-        assert!(can_trust_a, "project_a should be trusted after trust()");
-        assert!(!can_trust_b, "project_b should still be restricted");
-
-        trusted_worktrees.update(cx, |store, cx| {
-            store.trust(
-                &worktree_store,
-                HashSet::from_iter([PathTrust::Worktree(worktree_ids[1])]),
-                cx,
-            );
-        });
-
-        let can_trust_a = trusted_worktrees.update(cx, |store, cx| {
-            store.can_trust(&worktree_store, worktree_ids[0], cx)
-        });
-        let can_trust_b = trusted_worktrees.update(cx, |store, cx| {
-            store.can_trust(&worktree_store, worktree_ids[1], cx)
-        });
-        assert!(can_trust_a, "project_a should remain trusted");
-        assert!(can_trust_b, "project_b should now be trusted");
-    }
-
-    #[gpui::test]
-    async fn test_directory_worktree_trust_enables_single_file(cx: &mut TestAppContext) {
-        init_test(cx);
-
-        let fs = FakeFs::new(cx.executor());
-        fs.insert_tree(
-            path!("/"),
-            json!({
-                "project": { "main.rs": "fn main() {}" },
-                "standalone.rs": "fn standalone() {}"
-            }),
-        )
-        .await;
-
-        let project = Project::test(
-            fs,
-            [path!("/project").as_ref(), path!("/standalone.rs").as_ref()],
-            cx,
-        )
-        .await;
-        let worktree_store = project.read_with(cx, |project, _| project.worktree_store());
-        let (dir_worktree_id, file_worktree_id) = worktree_store.read_with(cx, |store, cx| {
-            let worktrees: Vec<_> = store.worktrees().collect();
-            assert_eq!(worktrees.len(), 2);
-            let (dir_worktree, file_worktree) = if worktrees[0].read(cx).is_single_file() {
-                (&worktrees[1], &worktrees[0])
-            } else {
-                (&worktrees[0], &worktrees[1])
-            };
-            assert!(!dir_worktree.read(cx).is_single_file());
-            assert!(file_worktree.read(cx).is_single_file());
-            (dir_worktree.read(cx).id(), file_worktree.read(cx).id())
-        });
-
-        let trusted_worktrees = init_trust_global(worktree_store.clone(), cx);
-
-        let can_trust_file = trusted_worktrees.update(cx, |store, cx| {
-            store.can_trust(&worktree_store, file_worktree_id, cx)
-        });
-        assert!(
-            !can_trust_file,
-            "single-file worktree should be restricted initially"
-        );
-
-        let can_trust_directory = trusted_worktrees.update(cx, |store, cx| {
-            store.can_trust(&worktree_store, dir_worktree_id, cx)
-        });
-        assert!(
-            !can_trust_directory,
-            "directory worktree should be restricted initially"
-        );
-
-        trusted_worktrees.update(cx, |store, cx| {
-            store.trust(
-                &worktree_store,
-                HashSet::from_iter([PathTrust::Worktree(dir_worktree_id)]),
-                cx,
-            );
-        });
-
-        let can_trust_dir = trusted_worktrees.update(cx, |store, cx| {
-            store.can_trust(&worktree_store, dir_worktree_id, cx)
-        });
-        let can_trust_file_after = trusted_worktrees.update(cx, |store, cx| {
-            store.can_trust(&worktree_store, file_worktree_id, cx)
-        });
-        assert!(can_trust_dir, "directory worktree should be trusted");
-        assert!(
-            can_trust_file_after,
-            "single-file worktree should be trusted after directory worktree trust"
-        );
-    }
-
-    #[gpui::test]
-    async fn test_parent_path_trust_enables_single_file(cx: &mut TestAppContext) {
-        init_test(cx);
-
-        let fs = FakeFs::new(cx.executor());
-        fs.insert_tree(
-            path!("/"),
-            json!({
-                "project": { "main.rs": "fn main() {}" },
-                "standalone.rs": "fn standalone() {}"
-            }),
-        )
-        .await;
-
-        let project = Project::test(
-            fs,
-            [path!("/project").as_ref(), path!("/standalone.rs").as_ref()],
-            cx,
-        )
-        .await;
-        let worktree_store = project.read_with(cx, |project, _| project.worktree_store());
-        let (dir_worktree_id, file_worktree_id) = worktree_store.read_with(cx, |store, cx| {
-            let worktrees: Vec<_> = store.worktrees().collect();
-            assert_eq!(worktrees.len(), 2);
-            let (dir_worktree, file_worktree) = if worktrees[0].read(cx).is_single_file() {
-                (&worktrees[1], &worktrees[0])
-            } else {
-                (&worktrees[0], &worktrees[1])
-            };
-            assert!(!dir_worktree.read(cx).is_single_file());
-            assert!(file_worktree.read(cx).is_single_file());
-            (dir_worktree.read(cx).id(), file_worktree.read(cx).id())
-        });
-
-        let trusted_worktrees = init_trust_global(worktree_store.clone(), cx);
-
-        let can_trust_file = trusted_worktrees.update(cx, |store, cx| {
-            store.can_trust(&worktree_store, file_worktree_id, cx)
-        });
-        assert!(
-            !can_trust_file,
-            "single-file worktree should be restricted initially"
-        );
-
-        let can_trust_directory = trusted_worktrees.update(cx, |store, cx| {
-            store.can_trust(&worktree_store, dir_worktree_id, cx)
-        });
-        assert!(
-            !can_trust_directory,
-            "directory worktree should be restricted initially"
-        );
-
-        trusted_worktrees.update(cx, |store, cx| {
-            store.trust(
-                &worktree_store,
-                HashSet::from_iter([PathTrust::AbsPath(PathBuf::from(path!("/project")))]),
-                cx,
-            );
-        });
-
-        let can_trust_dir = trusted_worktrees.update(cx, |store, cx| {
-            store.can_trust(&worktree_store, dir_worktree_id, cx)
-        });
-        let can_trust_file_after = trusted_worktrees.update(cx, |store, cx| {
-            store.can_trust(&worktree_store, file_worktree_id, cx)
-        });
-        assert!(
-            can_trust_dir,
-            "directory worktree should be trusted after its parent is trusted"
-        );
-        assert!(
-            can_trust_file_after,
-            "single-file worktree should be trusted after directory worktree trust via its parent directory trust"
-        );
-    }
-
-    #[gpui::test]
-    async fn test_abs_path_trust_covers_multiple_worktrees(cx: &mut TestAppContext) {
-        init_test(cx);
-
-        let fs = FakeFs::new(cx.executor());
-        fs.insert_tree(
-            path!("/root"),
-            json!({
-                "project_a": { "main.rs": "fn main() {}" },
-                "project_b": { "lib.rs": "pub fn lib() {}" }
-            }),
-        )
-        .await;
-
-        let project = Project::test(
-            fs,
-            [
-                path!("/root/project_a").as_ref(),
-                path!("/root/project_b").as_ref(),
-            ],
-            cx,
-        )
-        .await;
-        let worktree_store = project.read_with(cx, |project, _| project.worktree_store());
-        let worktree_ids: Vec<_> = worktree_store.read_with(cx, |store, cx| {
-            store
-                .worktrees()
-                .map(|worktree| worktree.read(cx).id())
-                .collect()
-        });
-        assert_eq!(worktree_ids.len(), 2);
-
-        let trusted_worktrees = init_trust_global(worktree_store.clone(), cx);
-
-        for &worktree_id in &worktree_ids {
-            let can_trust = trusted_worktrees.update(cx, |store, cx| {
-                store.can_trust(&worktree_store, worktree_id, cx)
-            });
-            assert!(!can_trust, "worktree should be restricted initially");
-        }
-
-        trusted_worktrees.update(cx, |store, cx| {
-            store.trust(
-                &worktree_store,
-                HashSet::from_iter([PathTrust::AbsPath(PathBuf::from(path!("/root")))]),
-                cx,
-            );
-        });
-
-        for &worktree_id in &worktree_ids {
-            let can_trust = trusted_worktrees.update(cx, |store, cx| {
-                store.can_trust(&worktree_store, worktree_id, cx)
-            });
-            assert!(
-                can_trust,
-                "worktree should be trusted after parent path trust"
-            );
-        }
-    }
-
-    #[gpui::test]
-    async fn test_auto_trust_all(cx: &mut TestAppContext) {
-        init_test(cx);
-
-        let fs = FakeFs::new(cx.executor());
-        fs.insert_tree(
-            path!("/"),
-            json!({
-                "project_a": { "main.rs": "fn main() {}" },
-                "project_b": { "lib.rs": "pub fn lib() {}" },
-                "single.rs": "fn single() {}"
-            }),
-        )
-        .await;
-
-        let project = Project::test(
-            fs,
-            [
-                path!("/project_a").as_ref(),
-                path!("/project_b").as_ref(),
-                path!("/single.rs").as_ref(),
-            ],
-            cx,
-        )
-        .await;
-        let worktree_store = project.read_with(cx, |project, _| project.worktree_store());
-        let worktree_ids: Vec<_> = worktree_store.read_with(cx, |store, cx| {
-            store
-                .worktrees()
-                .map(|worktree| worktree.read(cx).id())
-                .collect()
-        });
-        assert_eq!(worktree_ids.len(), 3);
-
-        let trusted_worktrees = init_trust_global(worktree_store.clone(), cx);
-
-        let events: Rc<RefCell<Vec<TrustedWorktreesEvent>>> = Rc::default();
-        cx.update({
-            let events = events.clone();
-            |cx| {
-                cx.subscribe(&trusted_worktrees, move |_, event, _| {
-                    events.borrow_mut().push(match event {
-                        TrustedWorktreesEvent::Trusted(host, paths) => {
-                            TrustedWorktreesEvent::Trusted(host.clone(), paths.clone())
-                        }
-                        TrustedWorktreesEvent::Restricted(host, paths) => {
-                            TrustedWorktreesEvent::Restricted(host.clone(), paths.clone())
-                        }
-                    });
-                })
-            }
-        })
-        .detach();
-
-        for &worktree_id in &worktree_ids {
-            let can_trust = trusted_worktrees.update(cx, |store, cx| {
-                store.can_trust(&worktree_store, worktree_id, cx)
-            });
-            assert!(!can_trust, "worktree should be restricted initially");
-        }
-
-        let has_restricted = trusted_worktrees.read_with(cx, |store, cx| {
-            store.has_restricted_worktrees(&worktree_store, cx)
-        });
-        assert!(has_restricted, "should have restricted worktrees");
-
-        events.borrow_mut().clear();
-
-        trusted_worktrees.update(cx, |store, cx| {
-            store.auto_trust_all(cx);
-        });
-
-        for &worktree_id in &worktree_ids {
-            let can_trust = trusted_worktrees.update(cx, |store, cx| {
-                store.can_trust(&worktree_store, worktree_id, cx)
-            });
-            assert!(
-                can_trust,
-                "worktree {worktree_id:?} should be trusted after auto_trust_all"
-            );
-        }
-
-        let has_restricted_after = trusted_worktrees.read_with(cx, |store, cx| {
-            store.has_restricted_worktrees(&worktree_store, cx)
-        });
-        assert!(
-            !has_restricted_after,
-            "should have no restricted worktrees after auto_trust_all"
-        );
-
-        let trusted_event_count = events
-            .borrow()
-            .iter()
-            .filter(|e| matches!(e, TrustedWorktreesEvent::Trusted(..)))
-            .count();
-        assert!(
-            trusted_event_count > 0,
-            "should have emitted Trusted events"
-        );
-    }
-
-    #[gpui::test]
-    async fn test_trust_restrict_trust_cycle(cx: &mut TestAppContext) {
-        init_test(cx);
-
-        let fs = FakeFs::new(cx.executor());
-        fs.insert_tree(path!("/root"), json!({ "main.rs": "fn main() {}" }))
-            .await;
-
-        let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
-        let worktree_store = project.read_with(cx, |project, _| project.worktree_store());
-        let worktree_id = worktree_store.read_with(cx, |store, cx| {
-            store.worktrees().next().unwrap().read(cx).id()
-        });
-
-        let trusted_worktrees = init_trust_global(worktree_store.clone(), cx);
-
-        let events: Rc<RefCell<Vec<TrustedWorktreesEvent>>> = Rc::default();
-        cx.update({
-            let events = events.clone();
-            |cx| {
-                cx.subscribe(&trusted_worktrees, move |_, event, _| {
-                    events.borrow_mut().push(match event {
-                        TrustedWorktreesEvent::Trusted(host, paths) => {
-                            TrustedWorktreesEvent::Trusted(host.clone(), paths.clone())
-                        }
-                        TrustedWorktreesEvent::Restricted(host, paths) => {
-                            TrustedWorktreesEvent::Restricted(host.clone(), paths.clone())
-                        }
-                    });
-                })
-            }
-        })
-        .detach();
-
-        let can_trust = trusted_worktrees.update(cx, |store, cx| {
-            store.can_trust(&worktree_store, worktree_id, cx)
-        });
-        assert!(!can_trust, "should be restricted initially");
-        assert_eq!(events.borrow().len(), 1);
-        events.borrow_mut().clear();
-
-        trusted_worktrees.update(cx, |store, cx| {
-            store.trust(
-                &worktree_store,
-                HashSet::from_iter([PathTrust::Worktree(worktree_id)]),
-                cx,
-            );
-        });
-        let can_trust = trusted_worktrees.update(cx, |store, cx| {
-            store.can_trust(&worktree_store, worktree_id, cx)
-        });
-        assert!(can_trust, "should be trusted after trust()");
-        assert_eq!(events.borrow().len(), 1);
-        assert!(matches!(
-            &events.borrow()[0],
-            TrustedWorktreesEvent::Trusted(..)
-        ));
-        events.borrow_mut().clear();
-
-        trusted_worktrees.update(cx, |store, cx| {
-            store.restrict(
-                worktree_store.downgrade(),
-                HashSet::from_iter([PathTrust::Worktree(worktree_id)]),
-                cx,
-            );
-        });
-        let can_trust = trusted_worktrees.update(cx, |store, cx| {
-            store.can_trust(&worktree_store, worktree_id, cx)
-        });
-        assert!(!can_trust, "should be restricted after restrict()");
-        assert_eq!(events.borrow().len(), 1);
-        assert!(matches!(
-            &events.borrow()[0],
-            TrustedWorktreesEvent::Restricted(..)
-        ));
-
-        let has_restricted = trusted_worktrees.read_with(cx, |store, cx| {
-            store.has_restricted_worktrees(&worktree_store, cx)
-        });
-        assert!(has_restricted);
-        events.borrow_mut().clear();
-
-        trusted_worktrees.update(cx, |store, cx| {
-            store.trust(
-                &worktree_store,
-                HashSet::from_iter([PathTrust::Worktree(worktree_id)]),
-                cx,
-            );
-        });
-        let can_trust = trusted_worktrees.update(cx, |store, cx| {
-            store.can_trust(&worktree_store, worktree_id, cx)
-        });
-        assert!(can_trust, "should be trusted again after second trust()");
-        assert_eq!(events.borrow().len(), 1);
-        assert!(matches!(
-            &events.borrow()[0],
-            TrustedWorktreesEvent::Trusted(..)
-        ));
-
-        let has_restricted = trusted_worktrees.read_with(cx, |store, cx| {
-            store.has_restricted_worktrees(&worktree_store, cx)
-        });
-        assert!(!has_restricted);
-    }
-
-    #[gpui::test]
-    async fn test_multi_host_trust_isolation(cx: &mut TestAppContext) {
-        init_test(cx);
-
-        let fs = FakeFs::new(cx.executor());
-        fs.insert_tree(
-            path!("/"),
-            json!({
-                "local_project": { "main.rs": "fn main() {}" },
-                "remote_project": { "lib.rs": "pub fn lib() {}" }
-            }),
-        )
-        .await;
-
-        let project = Project::test(
-            fs,
-            [
-                path!("/local_project").as_ref(),
-                path!("/remote_project").as_ref(),
-            ],
-            cx,
-        )
-        .await;
-        let worktree_store = project.read_with(cx, |project, _| project.worktree_store());
-        let worktree_ids: Vec<_> = worktree_store.read_with(cx, |store, cx| {
-            store
-                .worktrees()
-                .map(|worktree| worktree.read(cx).id())
-                .collect()
-        });
-        assert_eq!(worktree_ids.len(), 2);
-        let local_worktree = worktree_ids[0];
-        let _remote_worktree = worktree_ids[1];
-
-        let trusted_worktrees = init_trust_global(worktree_store.clone(), cx);
-
-        let can_trust_local = trusted_worktrees.update(cx, |store, cx| {
-            store.can_trust(&worktree_store, local_worktree, cx)
-        });
-        assert!(!can_trust_local, "local worktree restricted on host_a");
-
-        trusted_worktrees.update(cx, |store, cx| {
-            store.trust(
-                &worktree_store,
-                HashSet::from_iter([PathTrust::Worktree(local_worktree)]),
-                cx,
-            );
-        });
-
-        let can_trust_local_after = trusted_worktrees.update(cx, |store, cx| {
-            store.can_trust(&worktree_store, local_worktree, cx)
-        });
-        assert!(
-            can_trust_local_after,
-            "local worktree should be trusted on local host"
-        );
-    }
-
-    #[gpui::test]
-    async fn test_invisible_worktree_stores_do_not_affect_trust(cx: &mut TestAppContext) {
-        init_test(cx);
-
-        let fs = FakeFs::new(cx.executor());
-        fs.insert_tree(
-            path!("/"),
-            json!({
-                "visible": { "main.rs": "fn main() {}" },
-                "other": { "a.rs": "fn other() {}" },
-                "invisible": { "b.rs": "fn invisible() {}" }
-            }),
-        )
-        .await;
-
-        let project = Project::test(fs, [path!("/visible").as_ref()], cx).await;
-        let worktree_store = project.read_with(cx, |project, _| project.worktree_store());
-        let visible_worktree_id = worktree_store.read_with(cx, |store, cx| {
-            store
-                .worktrees()
-                .find(|worktree| worktree.read(cx).root_dir().unwrap().ends_with("visible"))
-                .expect("visible worktree")
-                .read(cx)
-                .id()
-        });
-        let trusted_worktrees = init_trust_global(worktree_store.clone(), cx);
-
-        let events: Rc<RefCell<Vec<TrustedWorktreesEvent>>> = Rc::default();
-        cx.update({
-            let events = events.clone();
-            |cx| {
-                cx.subscribe(&trusted_worktrees, move |_, event, _| {
-                    events.borrow_mut().push(match event {
-                        TrustedWorktreesEvent::Trusted(host, paths) => {
-                            TrustedWorktreesEvent::Trusted(host.clone(), paths.clone())
-                        }
-                        TrustedWorktreesEvent::Restricted(host, paths) => {
-                            TrustedWorktreesEvent::Restricted(host.clone(), paths.clone())
-                        }
-                    });
-                })
-            }
-        })
-        .detach();
-
-        assert!(
-            !trusted_worktrees.update(cx, |store, cx| {
-                store.can_trust(&worktree_store, visible_worktree_id, cx)
-            }),
-            "visible worktree should be restricted initially"
-        );
-        assert_eq!(
-            HashSet::from_iter([(visible_worktree_id)]),
-            trusted_worktrees.read_with(cx, |store, _| {
-                store
-                    .restricted
-                    .get(&worktree_store.downgrade())
-                    .unwrap()
-                    .clone()
-            }),
-            "only visible worktree should be restricted",
-        );
-
-        let (new_visible_worktree, new_invisible_worktree) =
-            worktree_store.update(cx, |worktree_store, cx| {
-                let new_visible_worktree = worktree_store.create_worktree("/other", true, cx);
-                let new_invisible_worktree =
-                    worktree_store.create_worktree("/invisible", false, cx);
-                (new_visible_worktree, new_invisible_worktree)
-            });
-        let (new_visible_worktree, new_invisible_worktree) = (
-            new_visible_worktree.await.unwrap(),
-            new_invisible_worktree.await.unwrap(),
-        );
-
-        let new_visible_worktree_id =
-            new_visible_worktree.read_with(cx, |new_visible_worktree, _| new_visible_worktree.id());
-        assert!(
-            !trusted_worktrees.update(cx, |store, cx| {
-                store.can_trust(&worktree_store, new_visible_worktree_id, cx)
-            }),
-            "new visible worktree should be restricted initially",
-        );
-        assert!(
-            trusted_worktrees.update(cx, |store, cx| {
-                store.can_trust(&worktree_store, new_invisible_worktree.read(cx).id(), cx)
-            }),
-            "invisible worktree should be skipped",
-        );
-        assert_eq!(
-            HashSet::from_iter([visible_worktree_id, new_visible_worktree_id]),
-            trusted_worktrees.read_with(cx, |store, _| {
-                store
-                    .restricted
-                    .get(&worktree_store.downgrade())
-                    .unwrap()
-                    .clone()
-            }),
-            "only visible worktrees should be restricted"
-        );
-    }
-}

crates/project/src/yarn.rs 🔗

@@ -23,7 +23,7 @@ pub(crate) struct YarnPathStore {
 }
 
 /// Returns `None` when passed path is a malformed virtual path or it's not a virtual path at all.
-fn resolve_virtual(path: &Path) -> Option<Arc<Path>> {
+pub fn resolve_virtual(path: &Path) -> Option<Arc<Path>> {
     let components: Vec<_> = path.components().collect();
     let mut non_virtual_path = PathBuf::new();
 
@@ -136,44 +136,3 @@ async fn dump_zip(path: Arc<Path>, fs: Arc<dyn Fs>) -> Result<tempfile::TempDir>
     extract_zip(dir.path(), futures::io::Cursor::new(contents)).await?;
     Ok(dir)
 }
-
-#[cfg(test)]
-mod tests {
-    use super::*;
-    use std::path::Path;
-
-    #[test]
-    fn test_resolve_virtual() {
-        let test_cases = vec![
-            (
-                "/path/to/some/folder/__virtual__/a0b1c2d3/0/subpath/to/file.dat",
-                Some(Path::new("/path/to/some/folder/subpath/to/file.dat")),
-            ),
-            (
-                "/path/to/some/folder/__virtual__/e4f5a0b1/0/subpath/to/file.dat",
-                Some(Path::new("/path/to/some/folder/subpath/to/file.dat")),
-            ),
-            (
-                "/path/to/some/folder/__virtual__/a0b1c2d3/1/subpath/to/file.dat",
-                Some(Path::new("/path/to/some/subpath/to/file.dat")),
-            ),
-            (
-                "/path/to/some/folder/__virtual__/a0b1c2d3/3/subpath/to/file.dat",
-                Some(Path::new("/path/subpath/to/file.dat")),
-            ),
-            ("/path/to/nonvirtual/", None),
-            ("/path/to/malformed/__virtual__", None),
-            ("/path/to/malformed/__virtual__/a0b1c2d3", None),
-            (
-                "/path/to/malformed/__virtual__/a0b1c2d3/this-should-be-a-number",
-                None,
-            ),
-        ];
-
-        for (input, expected) in test_cases {
-            let input_path = Path::new(input);
-            let resolved_path = resolve_virtual(input_path);
-            assert_eq!(resolved_path.as_deref(), expected);
-        }
-    }
-}

crates/project/tests/integration/color_extractor.rs 🔗

@@ -0,0 +1,155 @@
+use gpui::{Hsla, rgba};
+use lsp::{CompletionItem, CompletionItemKind, Documentation};
+use project::color_extractor::*;
+
+pub const COLOR_TABLE: &[(&str, Option<u32>)] = &[
+    // -- Invalid --
+    // Invalid hex
+    ("f0f", None),
+    ("#fof", None),
+    // Extra field
+    ("rgb(255, 0, 0, 0.0)", None),
+    ("hsl(120, 0, 0, 0.0)", None),
+    // Missing field
+    ("rgba(255, 0, 0)", None),
+    ("hsla(120, 0, 0)", None),
+    // No decimal after zero
+    ("rgba(255, 0, 0, 0)", None),
+    ("hsla(120, 0, 0, 0)", None),
+    // Decimal after one
+    ("rgba(255, 0, 0, 1.0)", None),
+    ("hsla(120, 0, 0, 1.0)", None),
+    // HEX (sRGB)
+    ("#f0f", Some(0xFF00FFFF)),
+    ("#ff0000", Some(0xFF0000FF)),
+    // RGB / RGBA (sRGB)
+    ("rgb(255, 0, 0)", Some(0xFF0000FF)),
+    ("rgba(255, 0, 0, 0.4)", Some(0xFF000066)),
+    ("rgba(255, 0, 0, 1)", Some(0xFF0000FF)),
+    ("rgb(20%, 0%, 0%)", Some(0x330000FF)),
+    ("rgba(20%, 0%, 0%, 1)", Some(0x330000FF)),
+    ("rgb(0%, 20%, 0%)", Some(0x003300FF)),
+    ("rgba(0%, 20%, 0%, 1)", Some(0x003300FF)),
+    ("rgb(0%, 0%, 20%)", Some(0x000033FF)),
+    ("rgba(0%, 0%, 20%, 1)", Some(0x000033FF)),
+    // HSL / HSLA (sRGB)
+    ("hsl(0, 100%, 50%)", Some(0xFF0000FF)),
+    ("hsl(120, 100%, 50%)", Some(0x00FF00FF)),
+    ("hsla(0, 100%, 50%, 0.0)", Some(0xFF000000)),
+    ("hsla(0, 100%, 50%, 0.4)", Some(0xFF000066)),
+    ("hsla(0, 100%, 50%, 1)", Some(0xFF0000FF)),
+    ("hsla(120, 100%, 50%, 0.0)", Some(0x00FF0000)),
+    ("hsla(120, 100%, 50%, 0.4)", Some(0x00FF0066)),
+    ("hsla(120, 100%, 50%, 1)", Some(0x00FF00FF)),
+];
+
+#[test]
+fn can_extract_from_label() {
+    for (color_str, color_val) in COLOR_TABLE.iter() {
+        let color = extract_color(&CompletionItem {
+            kind: Some(CompletionItemKind::COLOR),
+            label: color_str.to_string(),
+            detail: None,
+            documentation: None,
+            ..Default::default()
+        });
+
+        assert_eq!(color, color_val.map(|v| Hsla::from(rgba(v))));
+    }
+}
+
+#[test]
+fn only_whole_label_matches_are_allowed() {
+    for (color_str, _) in COLOR_TABLE.iter() {
+        let color = extract_color(&CompletionItem {
+            kind: Some(CompletionItemKind::COLOR),
+            label: format!("{} foo", color_str).to_string(),
+            detail: None,
+            documentation: None,
+            ..Default::default()
+        });
+
+        assert_eq!(color, None);
+    }
+}
+
+#[test]
+fn can_extract_from_detail() {
+    for (color_str, color_val) in COLOR_TABLE.iter() {
+        let color = extract_color(&CompletionItem {
+            kind: Some(CompletionItemKind::COLOR),
+            label: "".to_string(),
+            detail: Some(color_str.to_string()),
+            documentation: None,
+            ..Default::default()
+        });
+
+        assert_eq!(color, color_val.map(|v| Hsla::from(rgba(v))));
+    }
+}
+
+#[test]
+fn only_whole_detail_matches_are_allowed() {
+    for (color_str, _) in COLOR_TABLE.iter() {
+        let color = extract_color(&CompletionItem {
+            kind: Some(CompletionItemKind::COLOR),
+            label: "".to_string(),
+            detail: Some(format!("{} foo", color_str).to_string()),
+            documentation: None,
+            ..Default::default()
+        });
+
+        assert_eq!(color, None);
+    }
+}
+
+#[test]
+fn can_extract_from_documentation_start() {
+    for (color_str, color_val) in COLOR_TABLE.iter() {
+        let color = extract_color(&CompletionItem {
+            kind: Some(CompletionItemKind::COLOR),
+            label: "".to_string(),
+            detail: None,
+            documentation: Some(Documentation::String(
+                format!("{} foo", color_str).to_string(),
+            )),
+            ..Default::default()
+        });
+
+        assert_eq!(color, color_val.map(|v| Hsla::from(rgba(v))));
+    }
+}
+
+#[test]
+fn can_extract_from_documentation_end() {
+    for (color_str, color_val) in COLOR_TABLE.iter() {
+        let color = extract_color(&CompletionItem {
+            kind: Some(CompletionItemKind::COLOR),
+            label: "".to_string(),
+            detail: None,
+            documentation: Some(Documentation::String(
+                format!("foo {}", color_str).to_string(),
+            )),
+            ..Default::default()
+        });
+
+        assert_eq!(color, color_val.map(|v| Hsla::from(rgba(v))));
+    }
+}
+
+#[test]
+fn cannot_extract_from_documentation_middle() {
+    for (color_str, _) in COLOR_TABLE.iter() {
+        let color = extract_color(&CompletionItem {
+            kind: Some(CompletionItemKind::COLOR),
+            label: "".to_string(),
+            detail: None,
+            documentation: Some(Documentation::String(
+                format!("foo {} foo", color_str).to_string(),
+            )),
+            ..Default::default()
+        });
+
+        assert_eq!(color, None);
+    }
+}

crates/project/tests/integration/context_server_store.rs 🔗

@@ -0,0 +1,882 @@
+use anyhow::Result;
+use context_server::test::create_fake_transport;
+use context_server::{ContextServer, ContextServerId};
+use gpui::{AppContext, AsyncApp, Entity, Subscription, Task, TestAppContext, UpdateGlobal as _};
+use http_client::{FakeHttpClient, Response};
+use project::context_server_store::registry::ContextServerDescriptorRegistry;
+use project::context_server_store::*;
+use project::project_settings::ContextServerSettings;
+use project::worktree_store::WorktreeStore;
+use project::{
+    FakeFs, Project, context_server_store::registry::ContextServerDescriptor,
+    project_settings::ProjectSettings,
+};
+use serde_json::json;
+use settings::{ContextServerCommand, Settings, SettingsStore};
+use std::sync::Arc;
+use std::{cell::RefCell, path::PathBuf, rc::Rc};
+use util::path;
+
+#[gpui::test]
+async fn test_context_server_status(cx: &mut TestAppContext) {
+    const SERVER_1_ID: &str = "mcp-1";
+    const SERVER_2_ID: &str = "mcp-2";
+
+    let (_fs, project) = setup_context_server_test(cx, json!({"code.rs": ""}), vec![]).await;
+
+    let registry = cx.new(|_| ContextServerDescriptorRegistry::new());
+    let store = cx.new(|cx| {
+        ContextServerStore::test(
+            registry.clone(),
+            project.read(cx).worktree_store(),
+            Some(project.downgrade()),
+            cx,
+        )
+    });
+
+    let server_1_id = ContextServerId(SERVER_1_ID.into());
+    let server_2_id = ContextServerId(SERVER_2_ID.into());
+
+    let server_1 = Arc::new(ContextServer::new(
+        server_1_id.clone(),
+        Arc::new(create_fake_transport(SERVER_1_ID, cx.executor())),
+    ));
+    let server_2 = Arc::new(ContextServer::new(
+        server_2_id.clone(),
+        Arc::new(create_fake_transport(SERVER_2_ID, cx.executor())),
+    ));
+
+    store.update(cx, |store, cx| store.test_start_server(server_1, cx));
+
+    cx.run_until_parked();
+
+    cx.update(|cx| {
+        assert_eq!(
+            store.read(cx).status_for_server(&server_1_id),
+            Some(ContextServerStatus::Running)
+        );
+        assert_eq!(store.read(cx).status_for_server(&server_2_id), None);
+    });
+
+    store.update(cx, |store, cx| {
+        store.test_start_server(server_2.clone(), cx)
+    });
+
+    cx.run_until_parked();
+
+    cx.update(|cx| {
+        assert_eq!(
+            store.read(cx).status_for_server(&server_1_id),
+            Some(ContextServerStatus::Running)
+        );
+        assert_eq!(
+            store.read(cx).status_for_server(&server_2_id),
+            Some(ContextServerStatus::Running)
+        );
+    });
+
+    store
+        .update(cx, |store, cx| store.stop_server(&server_2_id, cx))
+        .unwrap();
+
+    cx.update(|cx| {
+        assert_eq!(
+            store.read(cx).status_for_server(&server_1_id),
+            Some(ContextServerStatus::Running)
+        );
+        assert_eq!(
+            store.read(cx).status_for_server(&server_2_id),
+            Some(ContextServerStatus::Stopped)
+        );
+    });
+}
+
+#[gpui::test]
+async fn test_context_server_status_events(cx: &mut TestAppContext) {
+    const SERVER_1_ID: &str = "mcp-1";
+    const SERVER_2_ID: &str = "mcp-2";
+
+    let (_fs, project) = setup_context_server_test(cx, json!({"code.rs": ""}), vec![]).await;
+
+    let registry = cx.new(|_| ContextServerDescriptorRegistry::new());
+    let store = cx.new(|cx| {
+        ContextServerStore::test(
+            registry.clone(),
+            project.read(cx).worktree_store(),
+            Some(project.downgrade()),
+            cx,
+        )
+    });
+
+    let server_1_id = ContextServerId(SERVER_1_ID.into());
+    let server_2_id = ContextServerId(SERVER_2_ID.into());
+
+    let server_1 = Arc::new(ContextServer::new(
+        server_1_id.clone(),
+        Arc::new(create_fake_transport(SERVER_1_ID, cx.executor())),
+    ));
+    let server_2 = Arc::new(ContextServer::new(
+        server_2_id.clone(),
+        Arc::new(create_fake_transport(SERVER_2_ID, cx.executor())),
+    ));
+
+    let _server_events = assert_server_events(
+        &store,
+        vec![
+            (server_1_id.clone(), ContextServerStatus::Starting),
+            (server_1_id, ContextServerStatus::Running),
+            (server_2_id.clone(), ContextServerStatus::Starting),
+            (server_2_id.clone(), ContextServerStatus::Running),
+            (server_2_id.clone(), ContextServerStatus::Stopped),
+        ],
+        cx,
+    );
+
+    store.update(cx, |store, cx| store.test_start_server(server_1, cx));
+
+    cx.run_until_parked();
+
+    store.update(cx, |store, cx| {
+        store.test_start_server(server_2.clone(), cx)
+    });
+
+    cx.run_until_parked();
+
+    store
+        .update(cx, |store, cx| store.stop_server(&server_2_id, cx))
+        .unwrap();
+}
+
+#[gpui::test(iterations = 25)]
+async fn test_context_server_concurrent_starts(cx: &mut TestAppContext) {
+    const SERVER_1_ID: &str = "mcp-1";
+
+    let (_fs, project) = setup_context_server_test(cx, json!({"code.rs": ""}), vec![]).await;
+
+    let registry = cx.new(|_| ContextServerDescriptorRegistry::new());
+    let store = cx.new(|cx| {
+        ContextServerStore::test(
+            registry.clone(),
+            project.read(cx).worktree_store(),
+            Some(project.downgrade()),
+            cx,
+        )
+    });
+
+    let server_id = ContextServerId(SERVER_1_ID.into());
+
+    let server_with_same_id_1 = Arc::new(ContextServer::new(
+        server_id.clone(),
+        Arc::new(create_fake_transport(SERVER_1_ID, cx.executor())),
+    ));
+    let server_with_same_id_2 = Arc::new(ContextServer::new(
+        server_id.clone(),
+        Arc::new(create_fake_transport(SERVER_1_ID, cx.executor())),
+    ));
+
+    // If we start another server with the same id, we should report that we stopped the previous one
+    let _server_events = assert_server_events(
+        &store,
+        vec![
+            (server_id.clone(), ContextServerStatus::Starting),
+            (server_id.clone(), ContextServerStatus::Stopped),
+            (server_id.clone(), ContextServerStatus::Starting),
+            (server_id.clone(), ContextServerStatus::Running),
+        ],
+        cx,
+    );
+
+    store.update(cx, |store, cx| {
+        store.test_start_server(server_with_same_id_1.clone(), cx)
+    });
+    store.update(cx, |store, cx| {
+        store.test_start_server(server_with_same_id_2.clone(), cx)
+    });
+
+    cx.run_until_parked();
+
+    cx.update(|cx| {
+        assert_eq!(
+            store.read(cx).status_for_server(&server_id),
+            Some(ContextServerStatus::Running)
+        );
+    });
+}
+
+#[gpui::test]
+async fn test_context_server_maintain_servers_loop(cx: &mut TestAppContext) {
+    const SERVER_1_ID: &str = "mcp-1";
+    const SERVER_2_ID: &str = "mcp-2";
+
+    let server_1_id = ContextServerId(SERVER_1_ID.into());
+    let server_2_id = ContextServerId(SERVER_2_ID.into());
+
+    let fake_descriptor_1 = Arc::new(FakeContextServerDescriptor::new(SERVER_1_ID));
+
+    let (_fs, project) = setup_context_server_test(cx, json!({"code.rs": ""}), vec![]).await;
+
+    let executor = cx.executor();
+    let store = project.read_with(cx, |project, _| project.context_server_store());
+    store.update(cx, |store, cx| {
+        store.set_context_server_factory(Box::new(move |id, _| {
+            Arc::new(ContextServer::new(
+                id.clone(),
+                Arc::new(create_fake_transport(id.0.to_string(), executor.clone())),
+            ))
+        }));
+        store.registry().update(cx, |registry, cx| {
+            registry.register_context_server_descriptor(SERVER_1_ID.into(), fake_descriptor_1, cx);
+        });
+    });
+
+    set_context_server_configuration(
+        vec![(
+            server_1_id.0.clone(),
+            settings::ContextServerSettingsContent::Extension {
+                enabled: true,
+                remote: false,
+                settings: json!({
+                    "somevalue": true
+                }),
+            },
+        )],
+        cx,
+    );
+
+    // Ensure that mcp-1 starts up
+    {
+        let _server_events = assert_server_events(
+            &store,
+            vec![
+                (server_1_id.clone(), ContextServerStatus::Starting),
+                (server_1_id.clone(), ContextServerStatus::Running),
+            ],
+            cx,
+        );
+        cx.run_until_parked();
+    }
+
+    // Ensure that mcp-1 is restarted when the configuration was changed
+    {
+        let _server_events = assert_server_events(
+            &store,
+            vec![
+                (server_1_id.clone(), ContextServerStatus::Stopped),
+                (server_1_id.clone(), ContextServerStatus::Starting),
+                (server_1_id.clone(), ContextServerStatus::Running),
+            ],
+            cx,
+        );
+        set_context_server_configuration(
+            vec![(
+                server_1_id.0.clone(),
+                settings::ContextServerSettingsContent::Extension {
+                    enabled: true,
+                    remote: false,
+                    settings: json!({
+                        "somevalue": false
+                    }),
+                },
+            )],
+            cx,
+        );
+
+        cx.run_until_parked();
+    }
+
+    // Ensure that mcp-1 is not restarted when the configuration was not changed
+    {
+        let _server_events = assert_server_events(&store, vec![], cx);
+        set_context_server_configuration(
+            vec![(
+                server_1_id.0.clone(),
+                settings::ContextServerSettingsContent::Extension {
+                    enabled: true,
+                    remote: false,
+                    settings: json!({
+                        "somevalue": false
+                    }),
+                },
+            )],
+            cx,
+        );
+
+        cx.run_until_parked();
+    }
+
+    // Ensure that mcp-2 is started once it is added to the settings
+    {
+        let _server_events = assert_server_events(
+            &store,
+            vec![
+                (server_2_id.clone(), ContextServerStatus::Starting),
+                (server_2_id.clone(), ContextServerStatus::Running),
+            ],
+            cx,
+        );
+        set_context_server_configuration(
+            vec![
+                (
+                    server_1_id.0.clone(),
+                    settings::ContextServerSettingsContent::Extension {
+                        enabled: true,
+                        remote: false,
+                        settings: json!({
+                            "somevalue": false
+                        }),
+                    },
+                ),
+                (
+                    server_2_id.0.clone(),
+                    settings::ContextServerSettingsContent::Stdio {
+                        enabled: true,
+                        remote: false,
+                        command: ContextServerCommand {
+                            path: "somebinary".into(),
+                            args: vec!["arg".to_string()],
+                            env: None,
+                            timeout: None,
+                        },
+                    },
+                ),
+            ],
+            cx,
+        );
+
+        cx.run_until_parked();
+    }
+
+    // Ensure that mcp-2 is restarted once the args have changed
+    {
+        let _server_events = assert_server_events(
+            &store,
+            vec![
+                (server_2_id.clone(), ContextServerStatus::Stopped),
+                (server_2_id.clone(), ContextServerStatus::Starting),
+                (server_2_id.clone(), ContextServerStatus::Running),
+            ],
+            cx,
+        );
+        set_context_server_configuration(
+            vec![
+                (
+                    server_1_id.0.clone(),
+                    settings::ContextServerSettingsContent::Extension {
+                        enabled: true,
+                        remote: false,
+                        settings: json!({
+                            "somevalue": false
+                        }),
+                    },
+                ),
+                (
+                    server_2_id.0.clone(),
+                    settings::ContextServerSettingsContent::Stdio {
+                        enabled: true,
+                        remote: false,
+                        command: ContextServerCommand {
+                            path: "somebinary".into(),
+                            args: vec!["anotherArg".to_string()],
+                            env: None,
+                            timeout: None,
+                        },
+                    },
+                ),
+            ],
+            cx,
+        );
+
+        cx.run_until_parked();
+    }
+
+    // Ensure that mcp-2 is removed once it is removed from the settings
+    {
+        let _server_events = assert_server_events(
+            &store,
+            vec![(server_2_id.clone(), ContextServerStatus::Stopped)],
+            cx,
+        );
+        set_context_server_configuration(
+            vec![(
+                server_1_id.0.clone(),
+                settings::ContextServerSettingsContent::Extension {
+                    enabled: true,
+                    remote: false,
+                    settings: json!({
+                        "somevalue": false
+                    }),
+                },
+            )],
+            cx,
+        );
+
+        cx.run_until_parked();
+
+        cx.update(|cx| {
+            assert_eq!(store.read(cx).status_for_server(&server_2_id), None);
+        });
+    }
+
+    // Ensure that nothing happens if the settings do not change
+    {
+        let _server_events = assert_server_events(&store, vec![], cx);
+        set_context_server_configuration(
+            vec![(
+                server_1_id.0.clone(),
+                settings::ContextServerSettingsContent::Extension {
+                    enabled: true,
+                    remote: false,
+                    settings: json!({
+                        "somevalue": false
+                    }),
+                },
+            )],
+            cx,
+        );
+
+        cx.run_until_parked();
+
+        cx.update(|cx| {
+            assert_eq!(
+                store.read(cx).status_for_server(&server_1_id),
+                Some(ContextServerStatus::Running)
+            );
+            assert_eq!(store.read(cx).status_for_server(&server_2_id), None);
+        });
+    }
+}
+
+#[gpui::test]
+async fn test_context_server_enabled_disabled(cx: &mut TestAppContext) {
+    const SERVER_1_ID: &str = "mcp-1";
+
+    let server_1_id = ContextServerId(SERVER_1_ID.into());
+
+    let (_fs, project) = setup_context_server_test(cx, json!({"code.rs": ""}), vec![]).await;
+
+    let executor = cx.executor();
+    let store = project.read_with(cx, |project, _| project.context_server_store());
+    store.update(cx, |store, _| {
+        store.set_context_server_factory(Box::new(move |id, _| {
+            Arc::new(ContextServer::new(
+                id.clone(),
+                Arc::new(create_fake_transport(id.0.to_string(), executor.clone())),
+            ))
+        }));
+    });
+
+    set_context_server_configuration(
+        vec![(
+            server_1_id.0.clone(),
+            settings::ContextServerSettingsContent::Stdio {
+                enabled: true,
+                remote: false,
+                command: ContextServerCommand {
+                    path: "somebinary".into(),
+                    args: vec!["arg".to_string()],
+                    env: None,
+                    timeout: None,
+                },
+            },
+        )],
+        cx,
+    );
+
+    // Ensure that mcp-1 starts up
+    {
+        let _server_events = assert_server_events(
+            &store,
+            vec![
+                (server_1_id.clone(), ContextServerStatus::Starting),
+                (server_1_id.clone(), ContextServerStatus::Running),
+            ],
+            cx,
+        );
+        cx.run_until_parked();
+    }
+
+    // Ensure that mcp-1 is stopped once it is disabled.
+    {
+        let _server_events = assert_server_events(
+            &store,
+            vec![(server_1_id.clone(), ContextServerStatus::Stopped)],
+            cx,
+        );
+        set_context_server_configuration(
+            vec![(
+                server_1_id.0.clone(),
+                settings::ContextServerSettingsContent::Stdio {
+                    enabled: false,
+                    remote: false,
+                    command: ContextServerCommand {
+                        path: "somebinary".into(),
+                        args: vec!["arg".to_string()],
+                        env: None,
+                        timeout: None,
+                    },
+                },
+            )],
+            cx,
+        );
+
+        cx.run_until_parked();
+    }
+
+    // Ensure that mcp-1 is started once it is enabled again.
+    {
+        let _server_events = assert_server_events(
+            &store,
+            vec![
+                (server_1_id.clone(), ContextServerStatus::Starting),
+                (server_1_id.clone(), ContextServerStatus::Running),
+            ],
+            cx,
+        );
+        set_context_server_configuration(
+            vec![(
+                server_1_id.0.clone(),
+                settings::ContextServerSettingsContent::Stdio {
+                    enabled: true,
+                    remote: false,
+                    command: ContextServerCommand {
+                        path: "somebinary".into(),
+                        args: vec!["arg".to_string()],
+                        timeout: None,
+                        env: None,
+                    },
+                },
+            )],
+            cx,
+        );
+
+        cx.run_until_parked();
+    }
+}
+
+fn set_context_server_configuration(
+    context_servers: Vec<(Arc<str>, settings::ContextServerSettingsContent)>,
+    cx: &mut TestAppContext,
+) {
+    cx.update(|cx| {
+        SettingsStore::update_global(cx, |store, cx| {
+            store.update_user_settings(cx, |content| {
+                content.project.context_servers.clear();
+                for (id, config) in context_servers {
+                    content.project.context_servers.insert(id, config);
+                }
+            });
+        })
+    });
+}
+
+#[gpui::test]
+async fn test_remote_context_server(cx: &mut TestAppContext) {
+    const SERVER_ID: &str = "remote-server";
+    let server_id = ContextServerId(SERVER_ID.into());
+    let server_url = "http://example.com/api";
+
+    let client = FakeHttpClient::create(|_| async move {
+        use http_client::AsyncBody;
+
+        let response = Response::builder()
+            .status(200)
+            .header("Content-Type", "application/json")
+            .body(AsyncBody::from(
+                serde_json::to_string(&json!({
+                    "jsonrpc": "2.0",
+                    "id": 0,
+                    "result": {
+                        "protocolVersion": "2024-11-05",
+                        "capabilities": {},
+                        "serverInfo": {
+                            "name": "test-server",
+                            "version": "1.0.0"
+                        }
+                    }
+                }))
+                .unwrap(),
+            ))
+            .unwrap();
+        Ok(response)
+    });
+    cx.update(|cx| cx.set_http_client(client));
+
+    let (_fs, project) = setup_context_server_test(cx, json!({ "code.rs": "" }), vec![]).await;
+
+    let store = project.read_with(cx, |project, _| project.context_server_store());
+
+    set_context_server_configuration(
+        vec![(
+            server_id.0.clone(),
+            settings::ContextServerSettingsContent::Http {
+                enabled: true,
+                url: server_url.to_string(),
+                headers: Default::default(),
+                timeout: None,
+            },
+        )],
+        cx,
+    );
+
+    let _server_events = assert_server_events(
+        &store,
+        vec![
+            (server_id.clone(), ContextServerStatus::Starting),
+            (server_id.clone(), ContextServerStatus::Running),
+        ],
+        cx,
+    );
+    cx.run_until_parked();
+}
+
+struct ServerEvents {
+    received_event_count: Rc<RefCell<usize>>,
+    expected_event_count: usize,
+    _subscription: Subscription,
+}
+
+impl Drop for ServerEvents {
+    fn drop(&mut self) {
+        let actual_event_count = *self.received_event_count.borrow();
+        assert_eq!(
+            actual_event_count, self.expected_event_count,
+            "
+               Expected to receive {} context server store events, but received {} events",
+            self.expected_event_count, actual_event_count
+        );
+    }
+}
+
+#[gpui::test]
+async fn test_context_server_global_timeout(cx: &mut TestAppContext) {
+    cx.update(|cx| {
+        let settings_store = SettingsStore::test(cx);
+        cx.set_global(settings_store);
+        SettingsStore::update_global(cx, |store, cx| {
+            store
+                .set_user_settings(r#"{"context_server_timeout": 90}"#, cx)
+                .expect("Failed to set test user settings");
+        });
+    });
+
+    let (_fs, project) = setup_context_server_test(cx, json!({"code.rs": ""}), vec![]).await;
+
+    let registry = cx.new(|_| ContextServerDescriptorRegistry::new());
+    let store = cx.new(|cx| {
+        ContextServerStore::test(
+            registry.clone(),
+            project.read(cx).worktree_store(),
+            Some(project.downgrade()),
+            cx,
+        )
+    });
+
+    let mut async_cx = cx.to_async();
+    let result = ContextServerStore::create_context_server(
+        store.downgrade(),
+        ContextServerId("test-server".into()),
+        Arc::new(ContextServerConfiguration::Http {
+            url: url::Url::parse("http://localhost:8080").expect("Failed to parse test URL"),
+            headers: Default::default(),
+            timeout: None,
+        }),
+        &mut async_cx,
+    )
+    .await;
+
+    assert!(
+        result.is_ok(),
+        "Server should be created successfully with global timeout"
+    );
+}
+
+#[gpui::test]
+async fn test_context_server_per_server_timeout_override(cx: &mut TestAppContext) {
+    const SERVER_ID: &str = "test-server";
+
+    cx.update(|cx| {
+        let settings_store = SettingsStore::test(cx);
+        cx.set_global(settings_store);
+        SettingsStore::update_global(cx, |store, cx| {
+            store
+                .set_user_settings(r#"{"context_server_timeout": 60}"#, cx)
+                .expect("Failed to set test user settings");
+        });
+    });
+
+    let (_fs, project) = setup_context_server_test(
+        cx,
+        json!({"code.rs": ""}),
+        vec![(
+            SERVER_ID.into(),
+            ContextServerSettings::Http {
+                enabled: true,
+                url: "http://localhost:8080".to_string(),
+                headers: Default::default(),
+                timeout: Some(120),
+            },
+        )],
+    )
+    .await;
+
+    let registry = cx.new(|_| ContextServerDescriptorRegistry::new());
+    let store = cx.new(|cx| {
+        ContextServerStore::test(
+            registry.clone(),
+            project.read(cx).worktree_store(),
+            Some(project.downgrade()),
+            cx,
+        )
+    });
+
+    let mut async_cx = cx.to_async();
+    let result = ContextServerStore::create_context_server(
+        store.downgrade(),
+        ContextServerId("test-server".into()),
+        Arc::new(ContextServerConfiguration::Http {
+            url: url::Url::parse("http://localhost:8080").expect("Failed to parse test URL"),
+            headers: Default::default(),
+            timeout: Some(120),
+        }),
+        &mut async_cx,
+    )
+    .await;
+
+    assert!(
+        result.is_ok(),
+        "Server should be created successfully with per-server timeout override"
+    );
+}
+
+#[gpui::test]
+async fn test_context_server_stdio_timeout(cx: &mut TestAppContext) {
+    let (_fs, project) = setup_context_server_test(cx, json!({"code.rs": ""}), vec![]).await;
+
+    let registry = cx.new(|_| ContextServerDescriptorRegistry::new());
+    let store = cx.new(|cx| {
+        ContextServerStore::test(
+            registry.clone(),
+            project.read(cx).worktree_store(),
+            Some(project.downgrade()),
+            cx,
+        )
+    });
+
+    let mut async_cx = cx.to_async();
+    let result = ContextServerStore::create_context_server(
+        store.downgrade(),
+        ContextServerId("stdio-server".into()),
+        Arc::new(ContextServerConfiguration::Custom {
+            command: ContextServerCommand {
+                path: "/usr/bin/node".into(),
+                args: vec!["server.js".into()],
+                env: None,
+                timeout: Some(180000),
+            },
+            remote: false,
+        }),
+        &mut async_cx,
+    )
+    .await;
+
+    assert!(
+        result.is_ok(),
+        "Stdio server should be created successfully with timeout"
+    );
+}
+
+fn assert_server_events(
+    store: &Entity<ContextServerStore>,
+    expected_events: Vec<(ContextServerId, ContextServerStatus)>,
+    cx: &mut TestAppContext,
+) -> ServerEvents {
+    cx.update(|cx| {
+        let mut ix = 0;
+        let received_event_count = Rc::new(RefCell::new(0));
+        let expected_event_count = expected_events.len();
+        let subscription = cx.subscribe(store, {
+            let received_event_count = received_event_count.clone();
+            move |_, event, _| match event {
+                Event::ServerStatusChanged {
+                    server_id: actual_server_id,
+                    status: actual_status,
+                } => {
+                    let (expected_server_id, expected_status) = &expected_events[ix];
+
+                    assert_eq!(
+                        actual_server_id, expected_server_id,
+                        "Expected different server id at index {}",
+                        ix
+                    );
+                    assert_eq!(
+                        actual_status, expected_status,
+                        "Expected different status at index {}",
+                        ix
+                    );
+                    ix += 1;
+                    *received_event_count.borrow_mut() += 1;
+                }
+            }
+        });
+        ServerEvents {
+            expected_event_count,
+            received_event_count,
+            _subscription: subscription,
+        }
+    })
+}
+
+async fn setup_context_server_test(
+    cx: &mut TestAppContext,
+    files: serde_json::Value,
+    context_server_configurations: Vec<(Arc<str>, ContextServerSettings)>,
+) -> (Arc<FakeFs>, Entity<Project>) {
+    cx.update(|cx| {
+        let settings_store = SettingsStore::test(cx);
+        cx.set_global(settings_store);
+        let mut settings = ProjectSettings::get_global(cx).clone();
+        for (id, config) in context_server_configurations {
+            settings.context_servers.insert(id, config);
+        }
+        ProjectSettings::override_global(settings, cx);
+    });
+
+    let fs = FakeFs::new(cx.executor());
+    fs.insert_tree(path!("/test"), files).await;
+    let project = Project::test(fs.clone(), [path!("/test").as_ref()], cx).await;
+
+    (fs, project)
+}
+
+struct FakeContextServerDescriptor {
+    path: PathBuf,
+}
+
+impl FakeContextServerDescriptor {
+    fn new(path: impl Into<PathBuf>) -> Self {
+        Self { path: path.into() }
+    }
+}
+
+impl ContextServerDescriptor for FakeContextServerDescriptor {
+    fn command(
+        &self,
+        _worktree_store: Entity<WorktreeStore>,
+        _cx: &AsyncApp,
+    ) -> Task<Result<ContextServerCommand>> {
+        Task::ready(Ok(ContextServerCommand {
+            path: self.path.clone(),
+            args: vec!["arg1".to_string(), "arg2".to_string()],
+            env: None,
+            timeout: None,
+        }))
+    }
+
+    fn configuration(
+        &self,
+        _worktree_store: Entity<WorktreeStore>,
+        _cx: &AsyncApp,
+    ) -> Task<Result<Option<::extension::ContextServerConfiguration>>> {
+        Task::ready(Ok(None))
+    }
+}

crates/project/tests/integration/debugger.rs 🔗

@@ -0,0 +1,293 @@
+mod go_locator {
+    use collections::HashMap;
+    use dap::{DapLocator, adapters::DebugAdapterName};
+    use gpui::TestAppContext;
+    use project::debugger::locators::go::{DelveLaunchRequest, GoLocator};
+    use task::{HideStrategy, RevealStrategy, RevealTarget, Shell, TaskTemplate};
+    #[gpui::test]
+    async fn test_create_scenario_for_go_build(_: &mut TestAppContext) {
+        let locator = GoLocator;
+        let task = TaskTemplate {
+            label: "go build".into(),
+            command: "go".into(),
+            args: vec!["build".into(), ".".into()],
+            env: Default::default(),
+            cwd: Some("${ZED_WORKTREE_ROOT}".into()),
+            use_new_terminal: false,
+            allow_concurrent_runs: false,
+            reveal: RevealStrategy::Always,
+            reveal_target: RevealTarget::Dock,
+            hide: HideStrategy::Never,
+            shell: Shell::System,
+            tags: vec![],
+            show_summary: true,
+            show_command: true,
+        };
+
+        let scenario = locator
+            .create_scenario(&task, "test label", &DebugAdapterName("Delve".into()))
+            .await;
+
+        assert!(scenario.is_none());
+    }
+
+    #[gpui::test]
+    async fn test_skip_non_go_commands_with_non_delve_adapter(_: &mut TestAppContext) {
+        let locator = GoLocator;
+        let task = TaskTemplate {
+            label: "cargo build".into(),
+            command: "cargo".into(),
+            args: vec!["build".into()],
+            env: Default::default(),
+            cwd: Some("${ZED_WORKTREE_ROOT}".into()),
+            use_new_terminal: false,
+            allow_concurrent_runs: false,
+            reveal: RevealStrategy::Always,
+            reveal_target: RevealTarget::Dock,
+            hide: HideStrategy::Never,
+            shell: Shell::System,
+            tags: vec![],
+            show_summary: true,
+            show_command: true,
+        };
+
+        let scenario = locator
+            .create_scenario(
+                &task,
+                "test label",
+                &DebugAdapterName("SomeOtherAdapter".into()),
+            )
+            .await;
+        assert!(scenario.is_none());
+
+        let scenario = locator
+            .create_scenario(&task, "test label", &DebugAdapterName("Delve".into()))
+            .await;
+        assert!(scenario.is_none());
+    }
+    #[gpui::test]
+    async fn test_go_locator_run(_: &mut TestAppContext) {
+        let locator = GoLocator;
+        let delve = DebugAdapterName("Delve".into());
+
+        let task = TaskTemplate {
+            label: "go run with flags".into(),
+            command: "go".into(),
+            args: vec![
+                "run".to_string(),
+                "-race".to_string(),
+                "-ldflags".to_string(),
+                "-X main.version=1.0".to_string(),
+                "./cmd/myapp".to_string(),
+                "--config".to_string(),
+                "production.yaml".to_string(),
+                "--verbose".to_string(),
+            ],
+            env: {
+                let mut env = HashMap::default();
+                env.insert("GO_ENV".to_string(), "production".to_string());
+                env
+            },
+            cwd: Some("/project/root".into()),
+            ..Default::default()
+        };
+
+        let scenario = locator
+            .create_scenario(&task, "test run label", &delve)
+            .await
+            .unwrap();
+
+        let config: DelveLaunchRequest = serde_json::from_value(scenario.config).unwrap();
+
+        assert_eq!(
+            config,
+            DelveLaunchRequest {
+                request: "launch".to_string(),
+                mode: "debug".to_string(),
+                program: "./cmd/myapp".to_string(),
+                build_flags: vec![
+                    "-race".to_string(),
+                    "-ldflags".to_string(),
+                    "-X main.version=1.0".to_string()
+                ],
+                args: vec![
+                    "--config".to_string(),
+                    "production.yaml".to_string(),
+                    "--verbose".to_string(),
+                ],
+                env: {
+                    let mut env = HashMap::default();
+                    env.insert("GO_ENV".to_string(), "production".to_string());
+                    env
+                },
+                cwd: Some("/project/root".to_string()),
+            }
+        );
+    }
+
+    #[gpui::test]
+    async fn test_go_locator_test(_: &mut TestAppContext) {
+        let locator = GoLocator;
+        let delve = DebugAdapterName("Delve".into());
+
+        // Test with tags and run flag
+        let task_with_tags = TaskTemplate {
+            label: "test".into(),
+            command: "go".into(),
+            args: vec![
+                "test".to_string(),
+                "-tags".to_string(),
+                "integration,unit".to_string(),
+                "-run".to_string(),
+                "Foo".to_string(),
+                ".".to_string(),
+            ],
+            ..Default::default()
+        };
+        let result = locator
+            .create_scenario(&task_with_tags, "", &delve)
+            .await
+            .unwrap();
+
+        let config: DelveLaunchRequest = serde_json::from_value(result.config).unwrap();
+
+        assert_eq!(
+            config,
+            DelveLaunchRequest {
+                request: "launch".to_string(),
+                mode: "test".to_string(),
+                program: ".".to_string(),
+                build_flags: vec!["-tags".to_string(), "integration,unit".to_string(),],
+                args: vec![
+                    "-test.run".to_string(),
+                    "Foo".to_string(),
+                    "-test.v".to_string()
+                ],
+                env: Default::default(),
+                cwd: None,
+            }
+        );
+    }
+
+    #[gpui::test]
+    async fn test_skip_unsupported_go_commands(_: &mut TestAppContext) {
+        let locator = GoLocator;
+        let task = TaskTemplate {
+            label: "go clean".into(),
+            command: "go".into(),
+            args: vec!["clean".into()],
+            env: Default::default(),
+            cwd: Some("${ZED_WORKTREE_ROOT}".into()),
+            use_new_terminal: false,
+            allow_concurrent_runs: false,
+            reveal: RevealStrategy::Always,
+            reveal_target: RevealTarget::Dock,
+            hide: HideStrategy::Never,
+            shell: Shell::System,
+            tags: vec![],
+            show_summary: true,
+            show_command: true,
+        };
+
+        let scenario = locator
+            .create_scenario(&task, "test label", &DebugAdapterName("Delve".into()))
+            .await;
+        assert!(scenario.is_none());
+    }
+}
+
+mod python_locator {
+    use dap::{DapLocator, adapters::DebugAdapterName};
+    use serde_json::json;
+
+    use project::debugger::locators::python::*;
+    use task::{DebugScenario, TaskTemplate};
+
+    #[gpui::test]
+    async fn test_python_locator() {
+        let adapter = DebugAdapterName("Debugpy".into());
+        let build_task = TaskTemplate {
+            label: "run module '$ZED_FILE'".into(),
+            command: "$ZED_CUSTOM_PYTHON_ACTIVE_ZED_TOOLCHAIN".into(),
+            args: vec!["-m".into(), "$ZED_CUSTOM_PYTHON_MODULE_NAME".into()],
+            env: Default::default(),
+            cwd: Some("$ZED_WORKTREE_ROOT".into()),
+            use_new_terminal: false,
+            allow_concurrent_runs: false,
+            reveal: task::RevealStrategy::Always,
+            reveal_target: task::RevealTarget::Dock,
+            hide: task::HideStrategy::Never,
+            tags: vec!["python-module-main-method".into()],
+            shell: task::Shell::System,
+            show_summary: false,
+            show_command: false,
+        };
+
+        let expected_scenario = DebugScenario {
+            adapter: "Debugpy".into(),
+            label: "run module 'main.py'".into(),
+            build: None,
+            config: json!({
+                "request": "launch",
+                "python": "$ZED_CUSTOM_PYTHON_ACTIVE_ZED_TOOLCHAIN",
+                "args": [],
+                "cwd": "$ZED_WORKTREE_ROOT",
+                "module": "$ZED_CUSTOM_PYTHON_MODULE_NAME",
+            }),
+            tcp_connection: None,
+        };
+
+        assert_eq!(
+            PythonLocator
+                .create_scenario(&build_task, "run module 'main.py'", &adapter)
+                .await
+                .expect("Failed to create a scenario"),
+            expected_scenario
+        );
+    }
+}
+
+mod memory {
+    use project::debugger::{
+        MemoryCell,
+        memory::{MemoryIterator, PageAddress, PageContents},
+    };
+
+    #[test]
+    fn iterate_over_unmapped_memory() {
+        let empty_iterator = MemoryIterator::new(0..=127, Default::default());
+        let actual = empty_iterator.collect::<Vec<_>>();
+        let expected = vec![MemoryCell(None); 128];
+        assert_eq!(actual.len(), expected.len());
+        assert_eq!(actual, expected);
+    }
+
+    #[test]
+    fn iterate_over_partially_mapped_memory() {
+        let it = MemoryIterator::new(
+            0..=127,
+            vec![(PageAddress(5), PageContents::mapped(vec![1]))].into_iter(),
+        );
+        let actual = it.collect::<Vec<_>>();
+        let expected = std::iter::repeat_n(MemoryCell(None), 5)
+            .chain(std::iter::once(MemoryCell(Some(1))))
+            .chain(std::iter::repeat_n(MemoryCell(None), 122))
+            .collect::<Vec<_>>();
+        assert_eq!(actual.len(), expected.len());
+        assert_eq!(actual, expected);
+    }
+
+    #[test]
+    fn reads_from_the_middle_of_a_page() {
+        let partial_iter = MemoryIterator::new(
+            20..=30,
+            vec![(PageAddress(0), PageContents::mapped((0..255).collect()))].into_iter(),
+        );
+        let actual = partial_iter.collect::<Vec<_>>();
+        let expected = (20..=30)
+            .map(|val| MemoryCell(Some(val)))
+            .collect::<Vec<_>>();
+        assert_eq!(actual.len(), expected.len());
+        assert_eq!(actual, expected);
+    }
+}

crates/project/tests/integration/ext_agent_tests.rs 🔗

@@ -0,0 +1,226 @@
+use anyhow::Result;
+use collections::HashMap;
+use gpui::{AsyncApp, SharedString, Task};
+use project::agent_server_store::*;
+use std::{any::Any, collections::HashSet, fmt::Write as _, path::PathBuf};
+// A simple fake that implements ExternalAgentServer without needing async plumbing.
+struct NoopExternalAgent;
+
+impl ExternalAgentServer for NoopExternalAgent {
+    fn get_command(
+        &mut self,
+        _root_dir: Option<&str>,
+        _extra_env: HashMap<String, String>,
+        _status_tx: Option<watch::Sender<SharedString>>,
+        _new_version_available_tx: Option<watch::Sender<Option<String>>>,
+        _cx: &mut AsyncApp,
+    ) -> Task<Result<(AgentServerCommand, String, Option<task::SpawnInTerminal>)>> {
+        Task::ready(Ok((
+            AgentServerCommand {
+                path: PathBuf::from("noop"),
+                args: Vec::new(),
+                env: None,
+            },
+            "".to_string(),
+            None,
+        )))
+    }
+
+    fn as_any_mut(&mut self) -> &mut dyn Any {
+        self
+    }
+}
+
+#[test]
+fn external_agent_server_name_display() {
+    let name = ExternalAgentServerName(SharedString::from("Ext: Tool"));
+    let mut s = String::new();
+    write!(&mut s, "{name}").unwrap();
+    assert_eq!(s, "Ext: Tool");
+}
+
+#[test]
+fn sync_extension_agents_removes_previous_extension_entries() {
+    let mut store = AgentServerStore::collab();
+
+    // Seed with a couple of agents that will be replaced by extensions
+    store.external_agents.insert(
+        ExternalAgentServerName(SharedString::from("foo-agent")),
+        ExternalAgentEntry::new(
+            Box::new(NoopExternalAgent) as Box<dyn ExternalAgentServer>,
+            ExternalAgentSource::Custom,
+            None,
+            None,
+        ),
+    );
+    store.external_agents.insert(
+        ExternalAgentServerName(SharedString::from("bar-agent")),
+        ExternalAgentEntry::new(
+            Box::new(NoopExternalAgent) as Box<dyn ExternalAgentServer>,
+            ExternalAgentSource::Custom,
+            None,
+            None,
+        ),
+    );
+    store.external_agents.insert(
+        ExternalAgentServerName(SharedString::from("custom")),
+        ExternalAgentEntry::new(
+            Box::new(NoopExternalAgent) as Box<dyn ExternalAgentServer>,
+            ExternalAgentSource::Custom,
+            None,
+            None,
+        ),
+    );
+
+    // Simulate the removal phase: if we're syncing extensions that provide
+    // "foo-agent" and "bar-agent", those should be removed first
+    let extension_agent_names: HashSet<String> = ["foo-agent".to_string(), "bar-agent".to_string()]
+        .into_iter()
+        .collect();
+
+    let keys_to_remove: Vec<_> = store
+        .external_agents
+        .keys()
+        .filter(|name| extension_agent_names.contains(name.0.as_ref()))
+        .cloned()
+        .collect();
+
+    for key in keys_to_remove {
+        store.external_agents.remove(&key);
+    }
+
+    // Only the custom entry should remain.
+    let remaining: Vec<_> = store
+        .external_agents
+        .keys()
+        .map(|k| k.0.to_string())
+        .collect();
+    assert_eq!(remaining, vec!["custom".to_string()]);
+}
+
+#[test]
+fn resolve_extension_icon_path_allows_valid_paths() {
+    // Create a temporary directory structure for testing
+    let temp_dir = tempfile::tempdir().unwrap();
+    let extensions_dir = temp_dir.path();
+    let ext_dir = extensions_dir.join("my-extension");
+    std::fs::create_dir_all(&ext_dir).unwrap();
+
+    // Create a valid icon file
+    let icon_path = ext_dir.join("icon.svg");
+    std::fs::write(&icon_path, "<svg></svg>").unwrap();
+
+    // Test that a valid relative path works
+    let result = project::agent_server_store::resolve_extension_icon_path(
+        extensions_dir,
+        "my-extension",
+        "icon.svg",
+    );
+    assert!(result.is_some());
+    assert!(result.unwrap().ends_with("icon.svg"));
+}
+
+#[test]
+fn resolve_extension_icon_path_allows_nested_paths() {
+    let temp_dir = tempfile::tempdir().unwrap();
+    let extensions_dir = temp_dir.path();
+    let ext_dir = extensions_dir.join("my-extension");
+    let icons_dir = ext_dir.join("assets").join("icons");
+    std::fs::create_dir_all(&icons_dir).unwrap();
+
+    let icon_path = icons_dir.join("logo.svg");
+    std::fs::write(&icon_path, "<svg></svg>").unwrap();
+
+    let result = project::agent_server_store::resolve_extension_icon_path(
+        extensions_dir,
+        "my-extension",
+        "assets/icons/logo.svg",
+    );
+    assert!(result.is_some());
+    assert!(result.unwrap().ends_with("logo.svg"));
+}
+
+#[test]
+fn resolve_extension_icon_path_blocks_path_traversal() {
+    let temp_dir = tempfile::tempdir().unwrap();
+    let extensions_dir = temp_dir.path();
+
+    // Create two extension directories
+    let ext1_dir = extensions_dir.join("extension1");
+    let ext2_dir = extensions_dir.join("extension2");
+    std::fs::create_dir_all(&ext1_dir).unwrap();
+    std::fs::create_dir_all(&ext2_dir).unwrap();
+
+    // Create a file in extension2
+    let secret_file = ext2_dir.join("secret.svg");
+    std::fs::write(&secret_file, "<svg>secret</svg>").unwrap();
+
+    // Try to access extension2's file from extension1 using path traversal
+    let result = project::agent_server_store::resolve_extension_icon_path(
+        extensions_dir,
+        "extension1",
+        "../extension2/secret.svg",
+    );
+    assert!(
+        result.is_none(),
+        "Path traversal to sibling extension should be blocked"
+    );
+}
+
+#[test]
+fn resolve_extension_icon_path_blocks_absolute_escape() {
+    let temp_dir = tempfile::tempdir().unwrap();
+    let extensions_dir = temp_dir.path();
+    let ext_dir = extensions_dir.join("my-extension");
+    std::fs::create_dir_all(&ext_dir).unwrap();
+
+    // Create a file outside the extensions directory
+    let outside_file = temp_dir.path().join("outside.svg");
+    std::fs::write(&outside_file, "<svg>outside</svg>").unwrap();
+
+    // Try to escape to parent directory
+    let result = project::agent_server_store::resolve_extension_icon_path(
+        extensions_dir,
+        "my-extension",
+        "../outside.svg",
+    );
+    assert!(
+        result.is_none(),
+        "Path traversal to parent directory should be blocked"
+    );
+}
+
+#[test]
+fn resolve_extension_icon_path_blocks_deep_traversal() {
+    let temp_dir = tempfile::tempdir().unwrap();
+    let extensions_dir = temp_dir.path();
+    let ext_dir = extensions_dir.join("my-extension");
+    std::fs::create_dir_all(&ext_dir).unwrap();
+
+    // Try deep path traversal
+    let result = project::agent_server_store::resolve_extension_icon_path(
+        extensions_dir,
+        "my-extension",
+        "../../../../../../etc/passwd",
+    );
+    assert!(
+        result.is_none(),
+        "Deep path traversal should be blocked (file doesn't exist)"
+    );
+}
+
+#[test]
+fn resolve_extension_icon_path_returns_none_for_nonexistent() {
+    let temp_dir = tempfile::tempdir().unwrap();
+    let extensions_dir = temp_dir.path();
+    let ext_dir = extensions_dir.join("my-extension");
+    std::fs::create_dir_all(&ext_dir).unwrap();
+
+    // Try to access a file that doesn't exist
+    let result = project::agent_server_store::resolve_extension_icon_path(
+        extensions_dir,
+        "my-extension",
+        "nonexistent.svg",
+    );
+    assert!(result.is_none(), "Nonexistent file should return None");
+}

crates/project/tests/integration/extension_agent_tests.rs 🔗

@@ -0,0 +1,345 @@
+use anyhow::Result;
+use collections::HashMap;
+use gpui::{AppContext, AsyncApp, SharedString, Task, TestAppContext};
+use node_runtime::NodeRuntime;
+use project::agent_server_store::*;
+use project::worktree_store::WorktreeStore;
+use std::{any::Any, path::PathBuf, sync::Arc};
+
+#[test]
+fn extension_agent_constructs_proper_display_names() {
+    // Verify the display name format for extension-provided agents
+    let name1 = ExternalAgentServerName(SharedString::from("Extension: Agent"));
+    assert!(name1.0.contains(": "));
+
+    let name2 = ExternalAgentServerName(SharedString::from("MyExt: MyAgent"));
+    assert_eq!(name2.0, "MyExt: MyAgent");
+
+    // Non-extension agents shouldn't have the separator
+    let custom = ExternalAgentServerName(SharedString::from("custom"));
+    assert!(!custom.0.contains(": "));
+}
+
+struct NoopExternalAgent;
+
+impl ExternalAgentServer for NoopExternalAgent {
+    fn get_command(
+        &mut self,
+        _root_dir: Option<&str>,
+        _extra_env: HashMap<String, String>,
+        _status_tx: Option<watch::Sender<SharedString>>,
+        _new_version_available_tx: Option<watch::Sender<Option<String>>>,
+        _cx: &mut AsyncApp,
+    ) -> Task<Result<(AgentServerCommand, String, Option<task::SpawnInTerminal>)>> {
+        Task::ready(Ok((
+            AgentServerCommand {
+                path: PathBuf::from("noop"),
+                args: Vec::new(),
+                env: None,
+            },
+            "".to_string(),
+            None,
+        )))
+    }
+
+    fn as_any_mut(&mut self) -> &mut dyn Any {
+        self
+    }
+}
+
+#[test]
+fn sync_removes_only_extension_provided_agents() {
+    let mut store = AgentServerStore::collab();
+
+    // Seed with extension agents (contain ": ") and custom agents (don't contain ": ")
+    store.external_agents.insert(
+        ExternalAgentServerName(SharedString::from("Ext1: Agent1")),
+        ExternalAgentEntry::new(
+            Box::new(NoopExternalAgent) as Box<dyn ExternalAgentServer>,
+            ExternalAgentSource::Extension,
+            None,
+            None,
+        ),
+    );
+    store.external_agents.insert(
+        ExternalAgentServerName(SharedString::from("Ext2: Agent2")),
+        ExternalAgentEntry::new(
+            Box::new(NoopExternalAgent) as Box<dyn ExternalAgentServer>,
+            ExternalAgentSource::Extension,
+            None,
+            None,
+        ),
+    );
+    store.external_agents.insert(
+        ExternalAgentServerName(SharedString::from("custom-agent")),
+        ExternalAgentEntry::new(
+            Box::new(NoopExternalAgent) as Box<dyn ExternalAgentServer>,
+            ExternalAgentSource::Custom,
+            None,
+            None,
+        ),
+    );
+
+    // Simulate removal phase
+    store
+        .external_agents
+        .retain(|_, entry| entry.source != ExternalAgentSource::Extension);
+
+    // Only custom-agent should remain
+    assert_eq!(store.external_agents.len(), 1);
+    assert!(
+        store
+            .external_agents
+            .contains_key(&ExternalAgentServerName(SharedString::from("custom-agent")))
+    );
+}
+
+#[test]
+fn archive_launcher_constructs_with_all_fields() {
+    use extension::AgentServerManifestEntry;
+
+    let mut env = HashMap::default();
+    env.insert("GITHUB_TOKEN".into(), "secret".into());
+
+    let mut targets = HashMap::default();
+    targets.insert(
+        "darwin-aarch64".to_string(),
+        extension::TargetConfig {
+            archive:
+                "https://github.com/owner/repo/releases/download/v1.0.0/agent-darwin-arm64.zip"
+                    .into(),
+            cmd: "./agent".into(),
+            args: vec![],
+            sha256: None,
+            env: Default::default(),
+        },
+    );
+
+    let _entry = AgentServerManifestEntry {
+        name: "GitHub Agent".into(),
+        targets,
+        env,
+        icon: None,
+    };
+
+    // Verify display name construction
+    let expected_name = ExternalAgentServerName(SharedString::from("GitHub Agent"));
+    assert_eq!(expected_name.0, "GitHub Agent");
+}
+
+#[gpui::test]
+async fn archive_agent_uses_extension_and_agent_id_for_cache_key(cx: &mut TestAppContext) {
+    let fs = fs::FakeFs::new(cx.background_executor.clone());
+    let http_client = http_client::FakeHttpClient::with_404_response();
+    let worktree_store = cx.new(|_| WorktreeStore::local(false, fs.clone()));
+    let project_environment = cx.new(|cx| {
+        crate::ProjectEnvironment::new(None, worktree_store.downgrade(), None, false, cx)
+    });
+
+    let agent = LocalExtensionArchiveAgent {
+        fs,
+        http_client,
+        node_runtime: node_runtime::NodeRuntime::unavailable(),
+        project_environment,
+        extension_id: Arc::from("my-extension"),
+        agent_id: Arc::from("my-agent"),
+        targets: {
+            let mut map = HashMap::default();
+            map.insert(
+                "darwin-aarch64".to_string(),
+                extension::TargetConfig {
+                    archive: "https://example.com/my-agent-darwin-arm64.zip".into(),
+                    cmd: "./my-agent".into(),
+                    args: vec!["--serve".into()],
+                    sha256: None,
+                    env: Default::default(),
+                },
+            );
+            map
+        },
+        env: {
+            let mut map = HashMap::default();
+            map.insert("PORT".into(), "8080".into());
+            map
+        },
+    };
+
+    // Verify agent is properly constructed
+    assert_eq!(agent.extension_id.as_ref(), "my-extension");
+    assert_eq!(agent.agent_id.as_ref(), "my-agent");
+    assert_eq!(agent.env.get("PORT"), Some(&"8080".to_string()));
+    assert!(agent.targets.contains_key("darwin-aarch64"));
+}
+
+#[test]
+fn sync_extension_agents_registers_archive_launcher() {
+    use extension::AgentServerManifestEntry;
+
+    let expected_name = ExternalAgentServerName(SharedString::from("Release Agent"));
+    assert_eq!(expected_name.0, "Release Agent");
+
+    // Verify the manifest entry structure for archive-based installation
+    let mut env = HashMap::default();
+    env.insert("API_KEY".into(), "secret".into());
+
+    let mut targets = HashMap::default();
+    targets.insert(
+            "linux-x86_64".to_string(),
+            extension::TargetConfig {
+                archive: "https://github.com/org/project/releases/download/v2.1.0/release-agent-linux-x64.tar.gz".into(),
+                cmd: "./release-agent".into(),
+                args: vec!["serve".into()],
+                sha256: None,
+                env: Default::default(),
+            },
+        );
+
+    let manifest_entry = AgentServerManifestEntry {
+        name: "Release Agent".into(),
+        targets: targets.clone(),
+        env,
+        icon: None,
+    };
+
+    // Verify target config is present
+    assert!(manifest_entry.targets.contains_key("linux-x86_64"));
+    let target = manifest_entry.targets.get("linux-x86_64").unwrap();
+    assert_eq!(target.cmd, "./release-agent");
+}
+
+#[gpui::test]
+async fn test_node_command_uses_managed_runtime(cx: &mut TestAppContext) {
+    let fs = fs::FakeFs::new(cx.background_executor.clone());
+    let http_client = http_client::FakeHttpClient::with_404_response();
+    let node_runtime = NodeRuntime::unavailable();
+    let worktree_store = cx.new(|_| WorktreeStore::local(false, fs.clone()));
+    let project_environment = cx.new(|cx| {
+        crate::ProjectEnvironment::new(None, worktree_store.downgrade(), None, false, cx)
+    });
+
+    let agent = LocalExtensionArchiveAgent {
+        fs: fs.clone(),
+        http_client,
+        node_runtime,
+        project_environment,
+        extension_id: Arc::from("node-extension"),
+        agent_id: Arc::from("node-agent"),
+        targets: {
+            let mut map = HashMap::default();
+            map.insert(
+                "darwin-aarch64".to_string(),
+                extension::TargetConfig {
+                    archive: "https://example.com/node-agent.zip".into(),
+                    cmd: "node".into(),
+                    args: vec!["index.js".into()],
+                    sha256: None,
+                    env: Default::default(),
+                },
+            );
+            map
+        },
+        env: HashMap::default(),
+    };
+
+    // Verify that when cmd is "node", it attempts to use the node runtime
+    assert_eq!(agent.extension_id.as_ref(), "node-extension");
+    assert_eq!(agent.agent_id.as_ref(), "node-agent");
+
+    let target = agent.targets.get("darwin-aarch64").unwrap();
+    assert_eq!(target.cmd, "node");
+    assert_eq!(target.args, vec!["index.js"]);
+}
+
+#[gpui::test]
+async fn test_commands_run_in_extraction_directory(cx: &mut TestAppContext) {
+    let fs = fs::FakeFs::new(cx.background_executor.clone());
+    let http_client = http_client::FakeHttpClient::with_404_response();
+    let node_runtime = NodeRuntime::unavailable();
+    let worktree_store = cx.new(|_| WorktreeStore::local(false, fs.clone()));
+    let project_environment = cx.new(|cx| {
+        crate::ProjectEnvironment::new(None, worktree_store.downgrade(), None, false, cx)
+    });
+
+    let agent = LocalExtensionArchiveAgent {
+        fs: fs.clone(),
+        http_client,
+        node_runtime,
+        project_environment,
+        extension_id: Arc::from("test-ext"),
+        agent_id: Arc::from("test-agent"),
+        targets: {
+            let mut map = HashMap::default();
+            map.insert(
+                "darwin-aarch64".to_string(),
+                extension::TargetConfig {
+                    archive: "https://example.com/test.zip".into(),
+                    cmd: "node".into(),
+                    args: vec![
+                        "server.js".into(),
+                        "--config".into(),
+                        "./config.json".into(),
+                    ],
+                    sha256: None,
+                    env: Default::default(),
+                },
+            );
+            map
+        },
+        env: Default::default(),
+    };
+
+    // Verify the agent is configured with relative paths in args
+    let target = agent.targets.get("darwin-aarch64").unwrap();
+    assert_eq!(target.args[0], "server.js");
+    assert_eq!(target.args[2], "./config.json");
+    // These relative paths will resolve relative to the extraction directory
+    // when the command is executed
+}
+
+#[test]
+fn test_tilde_expansion_in_settings() {
+    let settings = settings::BuiltinAgentServerSettings {
+        path: Some(PathBuf::from("~/bin/agent")),
+        args: Some(vec!["--flag".into()]),
+        env: None,
+        ignore_system_version: None,
+        default_mode: None,
+        default_model: None,
+        favorite_models: vec![],
+        default_config_options: Default::default(),
+        favorite_config_option_values: Default::default(),
+    };
+
+    let BuiltinAgentServerSettings { path, .. } = settings.into();
+
+    let path = path.unwrap();
+    assert!(
+        !path.to_string_lossy().starts_with("~"),
+        "Tilde should be expanded for builtin agent path"
+    );
+
+    let settings = settings::CustomAgentServerSettings::Custom {
+        path: PathBuf::from("~/custom/agent"),
+        args: vec!["serve".into()],
+        env: Default::default(),
+        default_mode: None,
+        default_model: None,
+        favorite_models: vec![],
+        default_config_options: Default::default(),
+        favorite_config_option_values: Default::default(),
+    };
+
+    let converted: CustomAgentServerSettings = settings.into();
+    let CustomAgentServerSettings::Custom {
+        command: AgentServerCommand { path, .. },
+        ..
+    } = converted
+    else {
+        panic!("Expected Custom variant");
+    };
+
+    assert!(
+        !path.to_string_lossy().starts_with("~"),
+        "Tilde should be expanded for custom agent path"
+    );
+}

crates/project/tests/integration/git_store.rs 🔗

@@ -0,0 +1,1014 @@
+mod conflict_set_tests {
+    use std::sync::mpsc;
+
+    use crate::Project;
+
+    use fs::FakeFs;
+    use git::{
+        repository::{RepoPath, repo_path},
+        status::{UnmergedStatus, UnmergedStatusCode},
+    };
+    use gpui::{BackgroundExecutor, TestAppContext};
+    use project::git_store::*;
+    use serde_json::json;
+    use text::{Buffer, BufferId, OffsetRangeExt, Point, ReplicaId, ToOffset as _};
+    use unindent::Unindent as _;
+    use util::{path, rel_path::rel_path};
+
+    #[test]
+    fn test_parse_conflicts_in_buffer() {
+        // Create a buffer with conflict markers
+        let test_content = r#"
+            This is some text before the conflict.
+            <<<<<<< HEAD
+            This is our version
+            =======
+            This is their version
+            >>>>>>> branch-name
+
+            Another conflict:
+            <<<<<<< HEAD
+            Our second change
+            ||||||| merged common ancestors
+            Original content
+            =======
+            Their second change
+            >>>>>>> branch-name
+        "#
+        .unindent();
+
+        let buffer_id = BufferId::new(1).unwrap();
+        let buffer = Buffer::new(ReplicaId::LOCAL, buffer_id, test_content);
+        let snapshot = buffer.snapshot();
+
+        let conflict_snapshot = ConflictSet::parse(&snapshot);
+        assert_eq!(conflict_snapshot.conflicts.len(), 2);
+
+        let first = &conflict_snapshot.conflicts[0];
+        assert!(first.base.is_none());
+        assert_eq!(first.ours_branch_name.as_ref(), "HEAD");
+        assert_eq!(first.theirs_branch_name.as_ref(), "branch-name");
+        let our_text = snapshot
+            .text_for_range(first.ours.clone())
+            .collect::<String>();
+        let their_text = snapshot
+            .text_for_range(first.theirs.clone())
+            .collect::<String>();
+        assert_eq!(our_text, "This is our version\n");
+        assert_eq!(their_text, "This is their version\n");
+
+        let second = &conflict_snapshot.conflicts[1];
+        assert!(second.base.is_some());
+        assert_eq!(second.ours_branch_name.as_ref(), "HEAD");
+        assert_eq!(second.theirs_branch_name.as_ref(), "branch-name");
+        let our_text = snapshot
+            .text_for_range(second.ours.clone())
+            .collect::<String>();
+        let their_text = snapshot
+            .text_for_range(second.theirs.clone())
+            .collect::<String>();
+        let base_text = snapshot
+            .text_for_range(second.base.as_ref().unwrap().clone())
+            .collect::<String>();
+        assert_eq!(our_text, "Our second change\n");
+        assert_eq!(their_text, "Their second change\n");
+        assert_eq!(base_text, "Original content\n");
+
+        // Test conflicts_in_range
+        let range = snapshot.anchor_before(0)..snapshot.anchor_before(snapshot.len());
+        let conflicts_in_range = conflict_snapshot.conflicts_in_range(range, &snapshot);
+        assert_eq!(conflicts_in_range.len(), 2);
+
+        // Test with a range that includes only the first conflict
+        let first_conflict_end = conflict_snapshot.conflicts[0].range.end;
+        let range = snapshot.anchor_before(0)..first_conflict_end;
+        let conflicts_in_range = conflict_snapshot.conflicts_in_range(range, &snapshot);
+        assert_eq!(conflicts_in_range.len(), 1);
+
+        // Test with a range that includes only the second conflict
+        let second_conflict_start = conflict_snapshot.conflicts[1].range.start;
+        let range = second_conflict_start..snapshot.anchor_before(snapshot.len());
+        let conflicts_in_range = conflict_snapshot.conflicts_in_range(range, &snapshot);
+        assert_eq!(conflicts_in_range.len(), 1);
+
+        // Test with a range that doesn't include any conflicts
+        let range = buffer.anchor_after(first_conflict_end.to_next_offset(&buffer))
+            ..buffer.anchor_before(second_conflict_start.to_previous_offset(&buffer));
+        let conflicts_in_range = conflict_snapshot.conflicts_in_range(range, &snapshot);
+        assert_eq!(conflicts_in_range.len(), 0);
+    }
+
+    #[test]
+    fn test_nested_conflict_markers() {
+        // Create a buffer with nested conflict markers
+        let test_content = r#"
+            This is some text before the conflict.
+            <<<<<<< HEAD
+            This is our version
+            <<<<<<< HEAD
+            This is a nested conflict marker
+            =======
+            This is their version in a nested conflict
+            >>>>>>> branch-nested
+            =======
+            This is their version
+            >>>>>>> branch-name
+        "#
+        .unindent();
+
+        let buffer_id = BufferId::new(1).unwrap();
+        let buffer = Buffer::new(ReplicaId::LOCAL, buffer_id, test_content);
+        let snapshot = buffer.snapshot();
+
+        let conflict_snapshot = ConflictSet::parse(&snapshot);
+
+        assert_eq!(conflict_snapshot.conflicts.len(), 1);
+
+        // The conflict should have our version, their version, but no base
+        let conflict = &conflict_snapshot.conflicts[0];
+        assert!(conflict.base.is_none());
+        assert_eq!(conflict.ours_branch_name.as_ref(), "HEAD");
+        assert_eq!(conflict.theirs_branch_name.as_ref(), "branch-nested");
+
+        // Check that the nested conflict was detected correctly
+        let our_text = snapshot
+            .text_for_range(conflict.ours.clone())
+            .collect::<String>();
+        assert_eq!(our_text, "This is a nested conflict marker\n");
+        let their_text = snapshot
+            .text_for_range(conflict.theirs.clone())
+            .collect::<String>();
+        assert_eq!(their_text, "This is their version in a nested conflict\n");
+    }
+
+    #[test]
+    fn test_conflict_markers_at_eof() {
+        let test_content = r#"
+            <<<<<<< ours
+            =======
+            This is their version
+            >>>>>>> "#
+            .unindent();
+        let buffer_id = BufferId::new(1).unwrap();
+        let buffer = Buffer::new(ReplicaId::LOCAL, buffer_id, test_content);
+        let snapshot = buffer.snapshot();
+
+        let conflict_snapshot = ConflictSet::parse(&snapshot);
+        assert_eq!(conflict_snapshot.conflicts.len(), 1);
+        assert_eq!(
+            conflict_snapshot.conflicts[0].ours_branch_name.as_ref(),
+            "ours"
+        );
+        assert_eq!(
+            conflict_snapshot.conflicts[0].theirs_branch_name.as_ref(),
+            "Origin" // default branch name if there is none
+        );
+    }
+
+    #[test]
+    fn test_conflicts_in_range() {
+        // Create a buffer with conflict markers
+        let test_content = r#"
+            one
+            <<<<<<< HEAD1
+            two
+            =======
+            three
+            >>>>>>> branch1
+            four
+            five
+            <<<<<<< HEAD2
+            six
+            =======
+            seven
+            >>>>>>> branch2
+            eight
+            nine
+            <<<<<<< HEAD3
+            ten
+            =======
+            eleven
+            >>>>>>> branch3
+            twelve
+            <<<<<<< HEAD4
+            thirteen
+            =======
+            fourteen
+            >>>>>>> branch4
+            fifteen
+        "#
+        .unindent();
+
+        let buffer_id = BufferId::new(1).unwrap();
+        let buffer = Buffer::new(ReplicaId::LOCAL, buffer_id, test_content.clone());
+        let snapshot = buffer.snapshot();
+
+        let conflict_snapshot = ConflictSet::parse(&snapshot);
+        assert_eq!(conflict_snapshot.conflicts.len(), 4);
+        assert_eq!(
+            conflict_snapshot.conflicts[0].ours_branch_name.as_ref(),
+            "HEAD1"
+        );
+        assert_eq!(
+            conflict_snapshot.conflicts[0].theirs_branch_name.as_ref(),
+            "branch1"
+        );
+        assert_eq!(
+            conflict_snapshot.conflicts[1].ours_branch_name.as_ref(),
+            "HEAD2"
+        );
+        assert_eq!(
+            conflict_snapshot.conflicts[1].theirs_branch_name.as_ref(),
+            "branch2"
+        );
+        assert_eq!(
+            conflict_snapshot.conflicts[2].ours_branch_name.as_ref(),
+            "HEAD3"
+        );
+        assert_eq!(
+            conflict_snapshot.conflicts[2].theirs_branch_name.as_ref(),
+            "branch3"
+        );
+        assert_eq!(
+            conflict_snapshot.conflicts[3].ours_branch_name.as_ref(),
+            "HEAD4"
+        );
+        assert_eq!(
+            conflict_snapshot.conflicts[3].theirs_branch_name.as_ref(),
+            "branch4"
+        );
+
+        let range = test_content.find("seven").unwrap()..test_content.find("eleven").unwrap();
+        let range = buffer.anchor_before(range.start)..buffer.anchor_after(range.end);
+        assert_eq!(
+            conflict_snapshot.conflicts_in_range(range, &snapshot),
+            &conflict_snapshot.conflicts[1..=2]
+        );
+
+        let range = test_content.find("one").unwrap()..test_content.find("<<<<<<< HEAD2").unwrap();
+        let range = buffer.anchor_before(range.start)..buffer.anchor_after(range.end);
+        assert_eq!(
+            conflict_snapshot.conflicts_in_range(range, &snapshot),
+            &conflict_snapshot.conflicts[0..=1]
+        );
+
+        let range =
+            test_content.find("eight").unwrap() - 1..test_content.find(">>>>>>> branch3").unwrap();
+        let range = buffer.anchor_before(range.start)..buffer.anchor_after(range.end);
+        assert_eq!(
+            conflict_snapshot.conflicts_in_range(range, &snapshot),
+            &conflict_snapshot.conflicts[1..=2]
+        );
+
+        let range = test_content.find("thirteen").unwrap() - 1..test_content.len();
+        let range = buffer.anchor_before(range.start)..buffer.anchor_after(range.end);
+        assert_eq!(
+            conflict_snapshot.conflicts_in_range(range, &snapshot),
+            &conflict_snapshot.conflicts[3..=3]
+        );
+    }
+
+    #[gpui::test]
+    async fn test_conflict_updates(executor: BackgroundExecutor, cx: &mut TestAppContext) {
+        zlog::init_test();
+        cx.update(|cx| {
+            settings::init(cx);
+        });
+        let initial_text = "
+            one
+            two
+            three
+            four
+            five
+        "
+        .unindent();
+        let fs = FakeFs::new(executor);
+        fs.insert_tree(
+            path!("/project"),
+            json!({
+                ".git": {},
+                "a.txt": initial_text,
+            }),
+        )
+        .await;
+        let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
+        let (git_store, buffer) = project.update(cx, |project, cx| {
+            (
+                project.git_store().clone(),
+                project.open_local_buffer(path!("/project/a.txt"), cx),
+            )
+        });
+        let buffer = buffer.await.unwrap();
+        let conflict_set = git_store.update(cx, |git_store, cx| {
+            git_store.open_conflict_set(buffer.clone(), cx)
+        });
+        let (events_tx, events_rx) = mpsc::channel::<ConflictSetUpdate>();
+        let _conflict_set_subscription = cx.update(|cx| {
+            cx.subscribe(&conflict_set, move |_, event, _| {
+                events_tx.send(event.clone()).ok();
+            })
+        });
+        let conflicts_snapshot =
+            conflict_set.read_with(cx, |conflict_set, _| conflict_set.snapshot());
+        assert!(conflicts_snapshot.conflicts.is_empty());
+
+        buffer.update(cx, |buffer, cx| {
+            buffer.edit(
+                [
+                    (4..4, "<<<<<<< HEAD\n"),
+                    (14..14, "=======\nTWO\n>>>>>>> branch\n"),
+                ],
+                None,
+                cx,
+            );
+        });
+
+        cx.run_until_parked();
+        events_rx.try_recv().expect_err(
+            "no conflicts should be registered as long as the file's status is unchanged",
+        );
+
+        fs.with_git_state(path!("/project/.git").as_ref(), true, |state| {
+            state.unmerged_paths.insert(
+                repo_path("a.txt"),
+                UnmergedStatus {
+                    first_head: UnmergedStatusCode::Updated,
+                    second_head: UnmergedStatusCode::Updated,
+                },
+            );
+            // Cause the repository to emit MergeHeadsChanged.
+            state.refs.insert("MERGE_HEAD".into(), "123".into())
+        })
+        .unwrap();
+
+        cx.run_until_parked();
+        let update = events_rx
+            .try_recv()
+            .expect("status change should trigger conflict parsing");
+        assert_eq!(update.old_range, 0..0);
+        assert_eq!(update.new_range, 0..1);
+
+        let conflict = conflict_set.read_with(cx, |conflict_set, _| {
+            conflict_set.snapshot().conflicts[0].clone()
+        });
+        cx.update(|cx| {
+            conflict.resolve(buffer.clone(), std::slice::from_ref(&conflict.theirs), cx);
+        });
+
+        cx.run_until_parked();
+        let update = events_rx
+            .try_recv()
+            .expect("conflicts should be removed after resolution");
+        assert_eq!(update.old_range, 0..1);
+        assert_eq!(update.new_range, 0..0);
+    }
+
+    #[gpui::test]
+    async fn test_conflict_updates_without_merge_head(
+        executor: BackgroundExecutor,
+        cx: &mut TestAppContext,
+    ) {
+        zlog::init_test();
+        cx.update(|cx| {
+            settings::init(cx);
+        });
+
+        let initial_text = "
+            zero
+            <<<<<<< HEAD
+            one
+            =======
+            two
+            >>>>>>> Stashed Changes
+            three
+        "
+        .unindent();
+
+        let fs = FakeFs::new(executor);
+        fs.insert_tree(
+            path!("/project"),
+            json!({
+                ".git": {},
+                "a.txt": initial_text,
+            }),
+        )
+        .await;
+
+        let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
+        let (git_store, buffer) = project.update(cx, |project, cx| {
+            (
+                project.git_store().clone(),
+                project.open_local_buffer(path!("/project/a.txt"), cx),
+            )
+        });
+
+        cx.run_until_parked();
+        fs.with_git_state(path!("/project/.git").as_ref(), true, |state| {
+            state.unmerged_paths.insert(
+                RepoPath::from_rel_path(rel_path("a.txt")),
+                UnmergedStatus {
+                    first_head: UnmergedStatusCode::Updated,
+                    second_head: UnmergedStatusCode::Updated,
+                },
+            )
+        })
+        .unwrap();
+
+        let buffer = buffer.await.unwrap();
+
+        // Open the conflict set for a file that currently has conflicts.
+        let conflict_set = git_store.update(cx, |git_store, cx| {
+            git_store.open_conflict_set(buffer.clone(), cx)
+        });
+
+        cx.run_until_parked();
+        conflict_set.update(cx, |conflict_set, cx| {
+            let conflict_range = conflict_set.snapshot().conflicts[0]
+                .range
+                .to_point(buffer.read(cx));
+            assert_eq!(conflict_range, Point::new(1, 0)..Point::new(6, 0));
+        });
+
+        // Simulate the conflict being removed by e.g. staging the file.
+        fs.with_git_state(path!("/project/.git").as_ref(), true, |state| {
+            state.unmerged_paths.remove(&repo_path("a.txt"))
+        })
+        .unwrap();
+
+        cx.run_until_parked();
+        conflict_set.update(cx, |conflict_set, _| {
+            assert!(!conflict_set.has_conflict);
+            assert_eq!(conflict_set.snapshot.conflicts.len(), 0);
+        });
+
+        // Simulate the conflict being re-added.
+        fs.with_git_state(path!("/project/.git").as_ref(), true, |state| {
+            state.unmerged_paths.insert(
+                repo_path("a.txt"),
+                UnmergedStatus {
+                    first_head: UnmergedStatusCode::Updated,
+                    second_head: UnmergedStatusCode::Updated,
+                },
+            )
+        })
+        .unwrap();
+
+        cx.run_until_parked();
+        conflict_set.update(cx, |conflict_set, cx| {
+            let conflict_range = conflict_set.snapshot().conflicts[0]
+                .range
+                .to_point(buffer.read(cx));
+            assert_eq!(conflict_range, Point::new(1, 0)..Point::new(6, 0));
+        });
+    }
+}
+
+mod git_traversal {
+    use std::{path::Path, time::Duration};
+
+    use collections::HashMap;
+    use project::{
+        Project,
+        git_store::{RepositoryId, RepositorySnapshot},
+    };
+
+    use fs::FakeFs;
+    use git::status::{
+        FileStatus, GitSummary, StatusCode, TrackedSummary, UnmergedStatus, UnmergedStatusCode,
+    };
+    use gpui::TestAppContext;
+    use project::GitTraversal;
+
+    use serde_json::json;
+    use settings::SettingsStore;
+    use util::{
+        path,
+        rel_path::{RelPath, rel_path},
+    };
+
+    const CONFLICT: FileStatus = FileStatus::Unmerged(UnmergedStatus {
+        first_head: UnmergedStatusCode::Updated,
+        second_head: UnmergedStatusCode::Updated,
+    });
+    const ADDED: GitSummary = GitSummary {
+        index: TrackedSummary::ADDED,
+        count: 1,
+        ..GitSummary::UNCHANGED
+    };
+    const MODIFIED: GitSummary = GitSummary {
+        index: TrackedSummary::MODIFIED,
+        count: 1,
+        ..GitSummary::UNCHANGED
+    };
+
+    #[gpui::test]
+    async fn test_git_traversal_with_one_repo(cx: &mut TestAppContext) {
+        init_test(cx);
+        let fs = FakeFs::new(cx.background_executor.clone());
+        fs.insert_tree(
+            path!("/root"),
+            json!({
+                "x": {
+                    ".git": {},
+                    "x1.txt": "foo",
+                    "x2.txt": "bar",
+                    "y": {
+                        ".git": {},
+                        "y1.txt": "baz",
+                        "y2.txt": "qux"
+                    },
+                    "z.txt": "sneaky..."
+                },
+                "z": {
+                    ".git": {},
+                    "z1.txt": "quux",
+                    "z2.txt": "quuux"
+                }
+            }),
+        )
+        .await;
+
+        fs.set_status_for_repo(
+            Path::new(path!("/root/x/.git")),
+            &[
+                ("x2.txt", StatusCode::Modified.index()),
+                ("z.txt", StatusCode::Added.index()),
+            ],
+        );
+        fs.set_status_for_repo(Path::new(path!("/root/x/y/.git")), &[("y1.txt", CONFLICT)]);
+        fs.set_status_for_repo(
+            Path::new(path!("/root/z/.git")),
+            &[("z2.txt", StatusCode::Added.index())],
+        );
+
+        let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
+        cx.executor().run_until_parked();
+
+        let (repo_snapshots, worktree_snapshot) = project.read_with(cx, |project, cx| {
+            (
+                project.git_store().read(cx).repo_snapshots(cx),
+                project.worktrees(cx).next().unwrap().read(cx).snapshot(),
+            )
+        });
+
+        let traversal = GitTraversal::new(
+            &repo_snapshots,
+            worktree_snapshot.traverse_from_path(true, false, true, RelPath::unix("x").unwrap()),
+        );
+        let entries = traversal
+            .map(|entry| (entry.path.clone(), entry.git_summary))
+            .collect::<Vec<_>>();
+        pretty_assertions::assert_eq!(
+            entries,
+            [
+                (rel_path("x/x1.txt").into(), GitSummary::UNCHANGED),
+                (rel_path("x/x2.txt").into(), MODIFIED),
+                (rel_path("x/y/y1.txt").into(), GitSummary::CONFLICT),
+                (rel_path("x/y/y2.txt").into(), GitSummary::UNCHANGED),
+                (rel_path("x/z.txt").into(), ADDED),
+                (rel_path("z/z1.txt").into(), GitSummary::UNCHANGED),
+                (rel_path("z/z2.txt").into(), ADDED),
+            ]
+        )
+    }
+
+    #[gpui::test]
+    async fn test_git_traversal_with_nested_repos(cx: &mut TestAppContext) {
+        init_test(cx);
+        let fs = FakeFs::new(cx.background_executor.clone());
+        fs.insert_tree(
+            path!("/root"),
+            json!({
+                "x": {
+                    ".git": {},
+                    "x1.txt": "foo",
+                    "x2.txt": "bar",
+                    "y": {
+                        ".git": {},
+                        "y1.txt": "baz",
+                        "y2.txt": "qux"
+                    },
+                    "z.txt": "sneaky..."
+                },
+                "z": {
+                    ".git": {},
+                    "z1.txt": "quux",
+                    "z2.txt": "quuux"
+                }
+            }),
+        )
+        .await;
+
+        fs.set_status_for_repo(
+            Path::new(path!("/root/x/.git")),
+            &[
+                ("x2.txt", StatusCode::Modified.index()),
+                ("z.txt", StatusCode::Added.index()),
+            ],
+        );
+        fs.set_status_for_repo(Path::new(path!("/root/x/y/.git")), &[("y1.txt", CONFLICT)]);
+
+        fs.set_status_for_repo(
+            Path::new(path!("/root/z/.git")),
+            &[("z2.txt", StatusCode::Added.index())],
+        );
+
+        let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
+        cx.executor().run_until_parked();
+
+        let (repo_snapshots, worktree_snapshot) = project.read_with(cx, |project, cx| {
+            (
+                project.git_store().read(cx).repo_snapshots(cx),
+                project.worktrees(cx).next().unwrap().read(cx).snapshot(),
+            )
+        });
+
+        // Sanity check the propagation for x/y and z
+        check_git_statuses(
+            &repo_snapshots,
+            &worktree_snapshot,
+            &[
+                ("x/y", GitSummary::CONFLICT),
+                ("x/y/y1.txt", GitSummary::CONFLICT),
+                ("x/y/y2.txt", GitSummary::UNCHANGED),
+            ],
+        );
+        check_git_statuses(
+            &repo_snapshots,
+            &worktree_snapshot,
+            &[
+                ("z", ADDED),
+                ("z/z1.txt", GitSummary::UNCHANGED),
+                ("z/z2.txt", ADDED),
+            ],
+        );
+
+        // Test one of the fundamental cases of propagation blocking, the transition from one git repository to another
+        check_git_statuses(
+            &repo_snapshots,
+            &worktree_snapshot,
+            &[
+                ("x", MODIFIED + ADDED),
+                ("x/y", GitSummary::CONFLICT),
+                ("x/y/y1.txt", GitSummary::CONFLICT),
+            ],
+        );
+
+        // Sanity check everything around it
+        check_git_statuses(
+            &repo_snapshots,
+            &worktree_snapshot,
+            &[
+                ("x", MODIFIED + ADDED),
+                ("x/x1.txt", GitSummary::UNCHANGED),
+                ("x/x2.txt", MODIFIED),
+                ("x/y", GitSummary::CONFLICT),
+                ("x/y/y1.txt", GitSummary::CONFLICT),
+                ("x/y/y2.txt", GitSummary::UNCHANGED),
+                ("x/z.txt", ADDED),
+            ],
+        );
+
+        // Test the other fundamental case, transitioning from git repository to non-git repository
+        check_git_statuses(
+            &repo_snapshots,
+            &worktree_snapshot,
+            &[
+                ("", GitSummary::UNCHANGED),
+                ("x", MODIFIED + ADDED),
+                ("x/x1.txt", GitSummary::UNCHANGED),
+            ],
+        );
+
+        // And all together now
+        check_git_statuses(
+            &repo_snapshots,
+            &worktree_snapshot,
+            &[
+                ("", GitSummary::UNCHANGED),
+                ("x", MODIFIED + ADDED),
+                ("x/x1.txt", GitSummary::UNCHANGED),
+                ("x/x2.txt", MODIFIED),
+                ("x/y", GitSummary::CONFLICT),
+                ("x/y/y1.txt", GitSummary::CONFLICT),
+                ("x/y/y2.txt", GitSummary::UNCHANGED),
+                ("x/z.txt", ADDED),
+                ("z", ADDED),
+                ("z/z1.txt", GitSummary::UNCHANGED),
+                ("z/z2.txt", ADDED),
+            ],
+        );
+    }
+
+    #[gpui::test]
+    async fn test_git_traversal_simple(cx: &mut TestAppContext) {
+        init_test(cx);
+        let fs = FakeFs::new(cx.background_executor.clone());
+        fs.insert_tree(
+            path!("/root"),
+            json!({
+                ".git": {},
+                "a": {
+                    "b": {
+                        "c1.txt": "",
+                        "c2.txt": "",
+                    },
+                    "d": {
+                        "e1.txt": "",
+                        "e2.txt": "",
+                        "e3.txt": "",
+                    }
+                },
+                "f": {
+                    "no-status.txt": ""
+                },
+                "g": {
+                    "h1.txt": "",
+                    "h2.txt": ""
+                },
+            }),
+        )
+        .await;
+
+        fs.set_status_for_repo(
+            Path::new(path!("/root/.git")),
+            &[
+                ("a/b/c1.txt", StatusCode::Added.index()),
+                ("a/d/e2.txt", StatusCode::Modified.index()),
+                ("g/h2.txt", CONFLICT),
+            ],
+        );
+
+        let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
+        cx.executor().run_until_parked();
+
+        let (repo_snapshots, worktree_snapshot) = project.read_with(cx, |project, cx| {
+            (
+                project.git_store().read(cx).repo_snapshots(cx),
+                project.worktrees(cx).next().unwrap().read(cx).snapshot(),
+            )
+        });
+
+        check_git_statuses(
+            &repo_snapshots,
+            &worktree_snapshot,
+            &[
+                ("", GitSummary::CONFLICT + MODIFIED + ADDED),
+                ("g", GitSummary::CONFLICT),
+                ("g/h2.txt", GitSummary::CONFLICT),
+            ],
+        );
+
+        check_git_statuses(
+            &repo_snapshots,
+            &worktree_snapshot,
+            &[
+                ("", GitSummary::CONFLICT + ADDED + MODIFIED),
+                ("a", ADDED + MODIFIED),
+                ("a/b", ADDED),
+                ("a/b/c1.txt", ADDED),
+                ("a/b/c2.txt", GitSummary::UNCHANGED),
+                ("a/d", MODIFIED),
+                ("a/d/e2.txt", MODIFIED),
+                ("f", GitSummary::UNCHANGED),
+                ("f/no-status.txt", GitSummary::UNCHANGED),
+                ("g", GitSummary::CONFLICT),
+                ("g/h2.txt", GitSummary::CONFLICT),
+            ],
+        );
+
+        check_git_statuses(
+            &repo_snapshots,
+            &worktree_snapshot,
+            &[
+                ("a/b", ADDED),
+                ("a/b/c1.txt", ADDED),
+                ("a/b/c2.txt", GitSummary::UNCHANGED),
+                ("a/d", MODIFIED),
+                ("a/d/e1.txt", GitSummary::UNCHANGED),
+                ("a/d/e2.txt", MODIFIED),
+                ("f", GitSummary::UNCHANGED),
+                ("f/no-status.txt", GitSummary::UNCHANGED),
+                ("g", GitSummary::CONFLICT),
+            ],
+        );
+
+        check_git_statuses(
+            &repo_snapshots,
+            &worktree_snapshot,
+            &[
+                ("a/b/c1.txt", ADDED),
+                ("a/b/c2.txt", GitSummary::UNCHANGED),
+                ("a/d/e1.txt", GitSummary::UNCHANGED),
+                ("a/d/e2.txt", MODIFIED),
+                ("f/no-status.txt", GitSummary::UNCHANGED),
+            ],
+        );
+    }
+
+    #[gpui::test]
+    async fn test_git_traversal_with_repos_under_project(cx: &mut TestAppContext) {
+        init_test(cx);
+        let fs = FakeFs::new(cx.background_executor.clone());
+        fs.insert_tree(
+            path!("/root"),
+            json!({
+                "x": {
+                    ".git": {},
+                    "x1.txt": "foo",
+                    "x2.txt": "bar"
+                },
+                "y": {
+                    ".git": {},
+                    "y1.txt": "baz",
+                    "y2.txt": "qux"
+                },
+                "z": {
+                    ".git": {},
+                    "z1.txt": "quux",
+                    "z2.txt": "quuux"
+                }
+            }),
+        )
+        .await;
+
+        fs.set_status_for_repo(
+            Path::new(path!("/root/x/.git")),
+            &[("x1.txt", StatusCode::Added.index())],
+        );
+        fs.set_status_for_repo(
+            Path::new(path!("/root/y/.git")),
+            &[
+                ("y1.txt", CONFLICT),
+                ("y2.txt", StatusCode::Modified.index()),
+            ],
+        );
+        fs.set_status_for_repo(
+            Path::new(path!("/root/z/.git")),
+            &[("z2.txt", StatusCode::Modified.index())],
+        );
+
+        let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
+        cx.executor().run_until_parked();
+
+        let (repo_snapshots, worktree_snapshot) = project.read_with(cx, |project, cx| {
+            (
+                project.git_store().read(cx).repo_snapshots(cx),
+                project.worktrees(cx).next().unwrap().read(cx).snapshot(),
+            )
+        });
+
+        check_git_statuses(
+            &repo_snapshots,
+            &worktree_snapshot,
+            &[("x", ADDED), ("x/x1.txt", ADDED)],
+        );
+
+        check_git_statuses(
+            &repo_snapshots,
+            &worktree_snapshot,
+            &[
+                ("y", GitSummary::CONFLICT + MODIFIED),
+                ("y/y1.txt", GitSummary::CONFLICT),
+                ("y/y2.txt", MODIFIED),
+            ],
+        );
+
+        check_git_statuses(
+            &repo_snapshots,
+            &worktree_snapshot,
+            &[("z", MODIFIED), ("z/z2.txt", MODIFIED)],
+        );
+
+        check_git_statuses(
+            &repo_snapshots,
+            &worktree_snapshot,
+            &[("x", ADDED), ("x/x1.txt", ADDED)],
+        );
+
+        check_git_statuses(
+            &repo_snapshots,
+            &worktree_snapshot,
+            &[
+                ("x", ADDED),
+                ("x/x1.txt", ADDED),
+                ("x/x2.txt", GitSummary::UNCHANGED),
+                ("y", GitSummary::CONFLICT + MODIFIED),
+                ("y/y1.txt", GitSummary::CONFLICT),
+                ("y/y2.txt", MODIFIED),
+                ("z", MODIFIED),
+                ("z/z1.txt", GitSummary::UNCHANGED),
+                ("z/z2.txt", MODIFIED),
+            ],
+        );
+    }
+
+    fn init_test(cx: &mut gpui::TestAppContext) {
+        zlog::init_test();
+
+        cx.update(|cx| {
+            let settings_store = SettingsStore::test(cx);
+            cx.set_global(settings_store);
+        });
+    }
+
+    #[gpui::test]
+    async fn test_bump_mtime_of_git_repo_workdir(cx: &mut TestAppContext) {
+        init_test(cx);
+
+        // Create a worktree with a git directory.
+        let fs = FakeFs::new(cx.background_executor.clone());
+        fs.insert_tree(
+            path!("/root"),
+            json!({
+                ".git": {},
+                "a.txt": "",
+                "b": {
+                    "c.txt": "",
+                },
+            }),
+        )
+        .await;
+        fs.set_head_and_index_for_repo(
+            path!("/root/.git").as_ref(),
+            &[("a.txt", "".into()), ("b/c.txt", "".into())],
+        );
+        cx.run_until_parked();
+
+        let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
+        cx.executor().run_until_parked();
+
+        let (old_entry_ids, old_mtimes) = project.read_with(cx, |project, cx| {
+            let tree = project.worktrees(cx).next().unwrap().read(cx);
+            (
+                tree.entries(true, 0).map(|e| e.id).collect::<Vec<_>>(),
+                tree.entries(true, 0).map(|e| e.mtime).collect::<Vec<_>>(),
+            )
+        });
+
+        // Regression test: after the directory is scanned, touch the git repo's
+        // working directory, bumping its mtime. That directory keeps its project
+        // entry id after the directories are re-scanned.
+        fs.touch_path(path!("/root")).await;
+        cx.executor().run_until_parked();
+
+        let (new_entry_ids, new_mtimes) = project.read_with(cx, |project, cx| {
+            let tree = project.worktrees(cx).next().unwrap().read(cx);
+            (
+                tree.entries(true, 0).map(|e| e.id).collect::<Vec<_>>(),
+                tree.entries(true, 0).map(|e| e.mtime).collect::<Vec<_>>(),
+            )
+        });
+        assert_eq!(new_entry_ids, old_entry_ids);
+        assert_ne!(new_mtimes, old_mtimes);
+
+        // Regression test: changes to the git repository should still be
+        // detected.
+        fs.set_head_for_repo(
+            path!("/root/.git").as_ref(),
+            &[("a.txt", "".into()), ("b/c.txt", "something-else".into())],
+            "deadbeef",
+        );
+        cx.executor().run_until_parked();
+        cx.executor().advance_clock(Duration::from_secs(1));
+
+        let (repo_snapshots, worktree_snapshot) = project.read_with(cx, |project, cx| {
+            (
+                project.git_store().read(cx).repo_snapshots(cx),
+                project.worktrees(cx).next().unwrap().read(cx).snapshot(),
+            )
+        });
+
+        check_git_statuses(
+            &repo_snapshots,
+            &worktree_snapshot,
+            &[
+                ("", MODIFIED),
+                ("a.txt", GitSummary::UNCHANGED),
+                ("b/c.txt", MODIFIED),
+            ],
+        );
+    }
+
+    #[track_caller]
+    fn check_git_statuses(
+        repo_snapshots: &HashMap<RepositoryId, RepositorySnapshot>,
+        worktree_snapshot: &worktree::Snapshot,
+        expected_statuses: &[(&str, GitSummary)],
+    ) {
+        let mut traversal = GitTraversal::new(
+            repo_snapshots,
+            worktree_snapshot.traverse_from_path(true, true, false, RelPath::empty()),
+        );
+        let found_statuses = expected_statuses
+            .iter()
+            .map(|&(path, _)| {
+                let git_entry = traversal
+                    .find(|git_entry| git_entry.path.as_ref() == rel_path(path))
+                    .unwrap_or_else(|| panic!("Traversal has no entry for {path:?}"));
+                (path, git_entry.git_summary)
+            })
+            .collect::<Vec<_>>();
+        pretty_assertions::assert_eq!(found_statuses, expected_statuses);
+    }
+}

crates/project/tests/integration/image_store.rs 🔗

@@ -0,0 +1,78 @@
+use fs::FakeFs;
+use gpui::TestAppContext;
+use project::Project;
+use project::ProjectPath;
+use project::image_store::*;
+use serde_json::json;
+use settings::SettingsStore;
+use util::rel_path::rel_path;
+
+pub fn init_test(cx: &mut TestAppContext) {
+    zlog::init_test();
+
+    cx.update(|cx| {
+        let settings_store = SettingsStore::test(cx);
+        cx.set_global(settings_store);
+    });
+}
+
+#[gpui::test]
+async fn test_image_not_loaded_twice(cx: &mut TestAppContext) {
+    init_test(cx);
+    let fs = FakeFs::new(cx.executor());
+
+    fs.insert_tree("/root", json!({})).await;
+    // Create a png file that consists of a single white pixel
+    fs.insert_file(
+        "/root/image_1.png",
+        vec![
+            0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A, 0x00, 0x00, 0x00, 0x0D, 0x49, 0x48,
+            0x44, 0x52, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01, 0x08, 0x06, 0x00, 0x00,
+            0x00, 0x1F, 0x15, 0xC4, 0x89, 0x00, 0x00, 0x00, 0x0A, 0x49, 0x44, 0x41, 0x54, 0x78,
+            0x9C, 0x63, 0x00, 0x01, 0x00, 0x00, 0x05, 0x00, 0x01, 0x0D, 0x0A, 0x2D, 0xB4, 0x00,
+            0x00, 0x00, 0x00, 0x49, 0x45, 0x4E, 0x44, 0xAE, 0x42, 0x60, 0x82,
+        ],
+    )
+    .await;
+
+    let project = Project::test(fs, ["/root".as_ref()], cx).await;
+
+    let worktree_id = cx.update(|cx| project.read(cx).worktrees(cx).next().unwrap().read(cx).id());
+
+    let project_path = ProjectPath {
+        worktree_id,
+        path: rel_path("image_1.png").into(),
+    };
+
+    let (task1, task2) = project.update(cx, |project, cx| {
+        (
+            project.open_image(project_path.clone(), cx),
+            project.open_image(project_path.clone(), cx),
+        )
+    });
+
+    let image1 = task1.await.unwrap();
+    let image2 = task2.await.unwrap();
+
+    assert_eq!(image1, image2);
+}
+
+#[gpui::test]
+fn test_compute_metadata_from_bytes() {
+    // Single white pixel PNG
+    let png_bytes = vec![
+        0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A, 0x00, 0x00, 0x00, 0x0D, 0x49, 0x48, 0x44,
+        0x52, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x01, 0x08, 0x06, 0x00, 0x00, 0x00, 0x1F,
+        0x15, 0xC4, 0x89, 0x00, 0x00, 0x00, 0x0A, 0x49, 0x44, 0x41, 0x54, 0x78, 0x9C, 0x63, 0x00,
+        0x01, 0x00, 0x00, 0x05, 0x00, 0x01, 0x0D, 0x0A, 0x2D, 0xB4, 0x00, 0x00, 0x00, 0x00, 0x49,
+        0x45, 0x4E, 0x44, 0xAE, 0x42, 0x60, 0x82,
+    ];
+
+    let metadata = ImageItem::compute_metadata_from_bytes(&png_bytes).unwrap();
+
+    assert_eq!(metadata.width, 1);
+    assert_eq!(metadata.height, 1);
+    assert_eq!(metadata.file_size, png_bytes.len() as u64);
+    assert_eq!(metadata.format, image::ImageFormat::Png);
+    assert!(metadata.colors.is_some());
+}

crates/project/tests/integration/lsp_command.rs 🔗

@@ -0,0 +1,128 @@
+use std::str::FromStr;
+
+use lsp::{DiagnosticSeverity, DiagnosticTag};
+use project::lsp_command::*;
+use rpc::proto::{self};
+use serde_json::json;
+
+#[test]
+fn test_serialize_lsp_diagnostic() {
+    let lsp_diagnostic = lsp::Diagnostic {
+        range: lsp::Range {
+            start: lsp::Position::new(0, 1),
+            end: lsp::Position::new(2, 3),
+        },
+        severity: Some(DiagnosticSeverity::ERROR),
+        code: Some(lsp::NumberOrString::String("E001".to_string())),
+        source: Some("test-source".to_string()),
+        message: "Test error message".to_string(),
+        related_information: None,
+        tags: Some(vec![DiagnosticTag::DEPRECATED]),
+        code_description: None,
+        data: Some(json!({"detail": "test detail"})),
+    };
+
+    let proto_diagnostic = GetDocumentDiagnostics::serialize_lsp_diagnostic(lsp_diagnostic)
+        .expect("Failed to serialize diagnostic");
+
+    let start = proto_diagnostic.start.unwrap();
+    let end = proto_diagnostic.end.unwrap();
+    assert_eq!(start.row, 0);
+    assert_eq!(start.column, 1);
+    assert_eq!(end.row, 2);
+    assert_eq!(end.column, 3);
+    assert_eq!(
+        proto_diagnostic.severity,
+        proto::lsp_diagnostic::Severity::Error as i32
+    );
+    assert_eq!(proto_diagnostic.code, Some("E001".to_string()));
+    assert_eq!(proto_diagnostic.source, Some("test-source".to_string()));
+    assert_eq!(proto_diagnostic.message, "Test error message");
+}
+
+#[test]
+fn test_deserialize_lsp_diagnostic() {
+    let proto_diagnostic = proto::LspDiagnostic {
+        start: Some(proto::PointUtf16 { row: 0, column: 1 }),
+        end: Some(proto::PointUtf16 { row: 2, column: 3 }),
+        severity: proto::lsp_diagnostic::Severity::Warning as i32,
+        code: Some("ERR".to_string()),
+        source: Some("Prism".to_string()),
+        message: "assigned but unused variable - a".to_string(),
+        related_information: vec![],
+        tags: vec![],
+        code_description: None,
+        data: None,
+    };
+
+    let lsp_diagnostic = GetDocumentDiagnostics::deserialize_lsp_diagnostic(proto_diagnostic)
+        .expect("Failed to deserialize diagnostic");
+
+    assert_eq!(lsp_diagnostic.range.start.line, 0);
+    assert_eq!(lsp_diagnostic.range.start.character, 1);
+    assert_eq!(lsp_diagnostic.range.end.line, 2);
+    assert_eq!(lsp_diagnostic.range.end.character, 3);
+    assert_eq!(lsp_diagnostic.severity, Some(DiagnosticSeverity::WARNING));
+    assert_eq!(
+        lsp_diagnostic.code,
+        Some(lsp::NumberOrString::String("ERR".to_string()))
+    );
+    assert_eq!(lsp_diagnostic.source, Some("Prism".to_string()));
+    assert_eq!(lsp_diagnostic.message, "assigned but unused variable - a");
+}
+
+#[test]
+fn test_related_information() {
+    let related_info = lsp::DiagnosticRelatedInformation {
+        location: lsp::Location {
+            uri: lsp::Uri::from_str("file:///test.rs").unwrap(),
+            range: lsp::Range {
+                start: lsp::Position::new(1, 1),
+                end: lsp::Position::new(1, 5),
+            },
+        },
+        message: "Related info message".to_string(),
+    };
+
+    let lsp_diagnostic = lsp::Diagnostic {
+        range: lsp::Range {
+            start: lsp::Position::new(0, 0),
+            end: lsp::Position::new(0, 1),
+        },
+        severity: Some(DiagnosticSeverity::INFORMATION),
+        code: None,
+        source: Some("Prism".to_string()),
+        message: "assigned but unused variable - a".to_string(),
+        related_information: Some(vec![related_info]),
+        tags: None,
+        code_description: None,
+        data: None,
+    };
+
+    let proto_diagnostic = GetDocumentDiagnostics::serialize_lsp_diagnostic(lsp_diagnostic)
+        .expect("Failed to serialize diagnostic");
+
+    assert_eq!(proto_diagnostic.related_information.len(), 1);
+    let related = &proto_diagnostic.related_information[0];
+    assert_eq!(related.location_url, Some("file:///test.rs".to_string()));
+    assert_eq!(related.message, "Related info message");
+}
+
+#[test]
+fn test_invalid_ranges() {
+    let proto_diagnostic = proto::LspDiagnostic {
+        start: None,
+        end: Some(proto::PointUtf16 { row: 2, column: 3 }),
+        severity: proto::lsp_diagnostic::Severity::Error as i32,
+        code: None,
+        source: None,
+        message: "Test message".to_string(),
+        related_information: vec![],
+        tags: vec![],
+        code_description: None,
+        data: None,
+    };
+
+    let result = GetDocumentDiagnostics::deserialize_lsp_diagnostic(proto_diagnostic);
+    assert!(result.is_err());
+}

crates/project/tests/integration/lsp_store.rs 🔗

@@ -0,0 +1,74 @@
+use std::path::Path;
+
+use language::{CodeLabel, HighlightId};
+
+use project::lsp_store::*;
+
+#[test]
+fn test_glob_literal_prefix() {
+    assert_eq!(glob_literal_prefix(Path::new("**/*.js")), Path::new(""));
+    assert_eq!(
+        glob_literal_prefix(Path::new("node_modules/**/*.js")),
+        Path::new("node_modules")
+    );
+    assert_eq!(
+        glob_literal_prefix(Path::new("foo/{bar,baz}.js")),
+        Path::new("foo")
+    );
+    assert_eq!(
+        glob_literal_prefix(Path::new("foo/bar/baz.js")),
+        Path::new("foo/bar/baz.js")
+    );
+
+    #[cfg(target_os = "windows")]
+    {
+        assert_eq!(glob_literal_prefix(Path::new("**\\*.js")), Path::new(""));
+        assert_eq!(
+            glob_literal_prefix(Path::new("node_modules\\**/*.js")),
+            Path::new("node_modules")
+        );
+        assert_eq!(
+            glob_literal_prefix(Path::new("foo/{bar,baz}.js")),
+            Path::new("foo")
+        );
+        assert_eq!(
+            glob_literal_prefix(Path::new("foo\\bar\\baz.js")),
+            Path::new("foo/bar/baz.js")
+        );
+    }
+}
+
+#[test]
+fn test_multi_len_chars_normalization() {
+    let mut label = CodeLabel::new(
+        "myElˇ (parameter) myElˇ: {\n    foo: string;\n}".to_string(),
+        0..6,
+        vec![(0..6, HighlightId(1))],
+    );
+    ensure_uniform_list_compatible_label(&mut label);
+    assert_eq!(
+        label,
+        CodeLabel::new(
+            "myElˇ (parameter) myElˇ: { foo: string; }".to_string(),
+            0..6,
+            vec![(0..6, HighlightId(1))],
+        )
+    );
+}
+
+#[test]
+fn test_trailing_newline_in_completion_documentation() {
+    let doc =
+        lsp::Documentation::String("Inappropriate argument value (of correct type).\n".to_string());
+    let completion_doc: CompletionDocumentation = doc.into();
+    assert!(
+        matches!(completion_doc, CompletionDocumentation::SingleLine(s) if s == "Inappropriate argument value (of correct type).")
+    );
+
+    let doc = lsp::Documentation::String("  some value  \n".to_string());
+    let completion_doc: CompletionDocumentation = doc.into();
+    assert!(matches!(
+        completion_doc,
+        CompletionDocumentation::SingleLine(s) if s == "some value"
+    ));
+}

crates/project/tests/integration/manifest_tree.rs 🔗

@@ -0,0 +1,124 @@
+mod path_trie {
+    use std::{collections::BTreeSet, ops::ControlFlow};
+
+    use util::rel_path::rel_path;
+
+    use project::manifest_tree::path_trie::*;
+
+    #[test]
+    fn test_insert_and_lookup() {
+        let mut trie = RootPathTrie::<()>::new();
+        trie.insert(
+            &TriePath::new(rel_path("a/b/c")),
+            (),
+            LabelPresence::Present,
+        );
+
+        trie.walk(&TriePath::new(rel_path("a/b/c")), &mut |path, nodes| {
+            assert_eq!(nodes.get(&()), Some(&LabelPresence::Present));
+            assert_eq!(path.as_unix_str(), "a/b/c");
+            ControlFlow::Continue(())
+        });
+        // Now let's annotate a parent with "Known missing" node.
+        trie.insert(
+            &TriePath::new(rel_path("a")),
+            (),
+            LabelPresence::KnownAbsent,
+        );
+
+        // Ensure that we walk from the root to the leaf.
+        let mut visited_paths = BTreeSet::new();
+        trie.walk(&TriePath::new(rel_path("a/b/c")), &mut |path, nodes| {
+            if path.as_unix_str() == "a/b/c" {
+                assert_eq!(visited_paths, BTreeSet::from_iter([rel_path("a").into()]));
+                assert_eq!(nodes.get(&()), Some(&LabelPresence::Present));
+            } else if path.as_unix_str() == "a" {
+                assert!(visited_paths.is_empty());
+                assert_eq!(nodes.get(&()), Some(&LabelPresence::KnownAbsent));
+            } else {
+                panic!("Unknown path");
+            }
+            // Assert that we only ever visit a path once.
+            assert!(visited_paths.insert(path.clone()));
+            ControlFlow::Continue(())
+        });
+
+        // One can also pass a path whose prefix is in the tree, but not that path itself.
+        let mut visited_paths = BTreeSet::new();
+        trie.walk(
+            &TriePath::new(rel_path("a/b/c/d/e/f/g")),
+            &mut |path, nodes| {
+                if path.as_unix_str() == "a/b/c" {
+                    assert_eq!(visited_paths, BTreeSet::from_iter([rel_path("a").into()]));
+                    assert_eq!(nodes.get(&()), Some(&LabelPresence::Present));
+                } else if path.as_unix_str() == "a" {
+                    assert!(visited_paths.is_empty());
+                    assert_eq!(nodes.get(&()), Some(&LabelPresence::KnownAbsent));
+                } else {
+                    panic!("Unknown path");
+                }
+                // Assert that we only ever visit a path once.
+                assert!(visited_paths.insert(path.clone()));
+                ControlFlow::Continue(())
+            },
+        );
+
+        // Test breaking from the tree-walk.
+        let mut visited_paths = BTreeSet::new();
+        trie.walk(&TriePath::new(rel_path("a/b/c")), &mut |path, nodes| {
+            if path.as_unix_str() == "a" {
+                assert!(visited_paths.is_empty());
+                assert_eq!(nodes.get(&()), Some(&LabelPresence::KnownAbsent));
+            } else {
+                panic!("Unknown path");
+            }
+            // Assert that we only ever visit a path once.
+            assert!(visited_paths.insert(path.clone()));
+            ControlFlow::Break(())
+        });
+        assert_eq!(visited_paths.len(), 1);
+
+        // Entry removal.
+        trie.insert(
+            &TriePath::new(rel_path("a/b")),
+            (),
+            LabelPresence::KnownAbsent,
+        );
+        let mut visited_paths = BTreeSet::new();
+        trie.walk(&TriePath::new(rel_path("a/b/c")), &mut |path, _nodes| {
+            // Assert that we only ever visit a path once.
+            assert!(visited_paths.insert(path.clone()));
+            ControlFlow::Continue(())
+        });
+        assert_eq!(visited_paths.len(), 3);
+        trie.remove(&TriePath::new(rel_path("a/b")));
+        let mut visited_paths = BTreeSet::new();
+        trie.walk(&TriePath::new(rel_path("a/b/c")), &mut |path, _nodes| {
+            // Assert that we only ever visit a path once.
+            assert!(visited_paths.insert(path.clone()));
+            ControlFlow::Continue(())
+        });
+        assert_eq!(visited_paths.len(), 1);
+        assert_eq!(
+            visited_paths.into_iter().next().unwrap(),
+            rel_path("a").into()
+        );
+    }
+
+    #[test]
+    fn path_to_a_root_can_contain_multiple_known_nodes() {
+        let mut trie = RootPathTrie::<()>::new();
+        trie.insert(&TriePath::new(rel_path("a/b")), (), LabelPresence::Present);
+        trie.insert(&TriePath::new(rel_path("a")), (), LabelPresence::Present);
+        let mut visited_paths = BTreeSet::new();
+        trie.walk(&TriePath::new(rel_path("a/b/c")), &mut |path, nodes| {
+            assert_eq!(nodes.get(&()), Some(&LabelPresence::Present));
+            if path.as_unix_str() != "a" && path.as_unix_str() != "a/b" {
+                panic!("Unexpected path: {}", path.as_unix_str());
+            }
+            assert!(visited_paths.insert(path.clone()));
+            ControlFlow::Continue(())
+        });
+        assert_eq!(visited_paths.len(), 2);
+    }
+}

crates/project/tests/integration/project_search.rs 🔗

@@ -0,0 +1,114 @@
+use std::{path::Path, sync::Arc};
+
+use crate::init_test;
+use fs::FakeFs;
+use project::{Project, ProjectEntryId, project_search::PathInclusionMatcher, search::SearchQuery};
+use serde_json::json;
+use settings::Settings;
+use util::{
+    path,
+    paths::{PathMatcher, PathStyle},
+    rel_path::RelPath,
+};
+use worktree::{Entry, EntryKind, WorktreeSettings};
+
+#[gpui::test]
+async fn test_path_inclusion_matcher(cx: &mut gpui::TestAppContext) {
+    init_test(cx);
+
+    let fs = FakeFs::new(cx.background_executor.clone());
+    fs.insert_tree(
+        "/root",
+        json!({
+            ".gitignore": "src/data/\n",
+            "src": {
+                "data": {
+                    "main.csv": "field_1,field_2,field_3",
+                },
+                "lib": {
+                    "main.txt": "Are you familiar with fields?",
+                },
+            },
+        }),
+    )
+    .await;
+
+    let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
+    let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
+    let (worktree_settings, worktree_snapshot) = worktree.update(cx, |worktree, cx| {
+        let settings_location = worktree.settings_location(cx);
+        return (
+            WorktreeSettings::get(Some(settings_location), cx).clone(),
+            worktree.snapshot(),
+        );
+    });
+
+    // Manually create a test entry for the gitignored directory since it won't
+    // be loaded by the worktree
+    let entry = Entry {
+        id: ProjectEntryId::from_proto(1),
+        kind: EntryKind::UnloadedDir,
+        path: Arc::from(RelPath::unix(Path::new("src/data")).unwrap()),
+        inode: 0,
+        mtime: None,
+        canonical_path: None,
+        is_ignored: true,
+        is_hidden: false,
+        is_always_included: false,
+        is_external: false,
+        is_private: false,
+        size: 0,
+        char_bag: Default::default(),
+        is_fifo: false,
+    };
+
+    // 1. Test searching for `field`, including ignored files without any
+    // inclusion and exclusion filters.
+    let include_ignored = true;
+    let files_to_include = PathMatcher::default();
+    let files_to_exclude = PathMatcher::default();
+    let match_full_paths = false;
+    let search_query = SearchQuery::text(
+        "field",
+        false,
+        false,
+        include_ignored,
+        files_to_include,
+        files_to_exclude,
+        match_full_paths,
+        None,
+    )
+    .unwrap();
+
+    let path_matcher = PathInclusionMatcher::new(Arc::new(search_query));
+    assert!(path_matcher.should_scan_gitignored_dir(
+        &entry,
+        &worktree_snapshot,
+        &worktree_settings
+    ));
+
+    // 2. Test searching for `field`, including ignored files but updating
+    // `files_to_include` to only include files under `src/lib`.
+    let include_ignored = true;
+    let files_to_include = PathMatcher::new(vec!["src/lib"], PathStyle::Posix).unwrap();
+    let files_to_exclude = PathMatcher::default();
+    let match_full_paths = false;
+    let search_query = SearchQuery::text(
+        "field",
+        false,
+        false,
+        include_ignored,
+        files_to_include,
+        files_to_exclude,
+        match_full_paths,
+        None,
+    )
+    .unwrap();
+
+    let path_matcher = PathInclusionMatcher::new(Arc::new(search_query));
+    assert!(!path_matcher.should_scan_gitignored_dir(
+        &entry,
+        &worktree_snapshot,
+        &worktree_settings
+    ));
+}

crates/project/src/project_tests.rs → crates/project/tests/integration/project_tests.rs 🔗

@@ -1,62 +1,90 @@
 #![allow(clippy::format_collect)]
 
-use crate::{
-    Event,
-    git_store::{GitStoreEvent, RepositoryEvent, StatusEntry, pending_op},
-    task_inventory::TaskContexts,
-    task_store::TaskSettingsLocation,
-    *,
-};
+mod color_extractor;
+mod context_server_store;
+mod debugger;
+mod ext_agent_tests;
+mod extension_agent_tests;
+mod git_store;
+mod image_store;
+mod lsp_command;
+mod lsp_store;
+mod manifest_tree;
+mod project_search;
+mod search;
+mod search_history;
+mod signature_help;
+mod task_inventory;
+mod trusted_worktrees;
+mod yarn;
+
+use anyhow::Result;
 use async_trait::async_trait;
 use buffer_diff::{
     BufferDiffEvent, DiffChanged, DiffHunkSecondaryStatus, DiffHunkStatus, DiffHunkStatusKind,
     assert_hunks,
 };
+use collections::{BTreeSet, HashMap, HashSet};
 use fs::FakeFs;
 use futures::{StreamExt, future};
 use git::{
     GitHostingProviderRegistry,
     repository::{RepoPath, repo_path},
-    status::{StatusCode, TrackedStatus},
+    status::{FileStatus, StatusCode, TrackedStatus},
 };
 use git2::RepositoryInitOptions;
-use gpui::{App, BackgroundExecutor, FutureExt, UpdateGlobal};
+use gpui::{
+    App, AppContext, BackgroundExecutor, BorrowAppContext, Entity, FutureExt, SharedString, Task,
+    UpdateGlobal,
+};
 use itertools::Itertools;
 use language::{
-    Diagnostic, DiagnosticEntry, DiagnosticEntryRef, DiagnosticSet, DiagnosticSourceKind,
-    DiskState, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageName, LineEnding,
-    ManifestName, ManifestProvider, ManifestQuery, OffsetRangeExt, Point, ToPoint, ToolchainList,
-    ToolchainLister,
+    Buffer, BufferEvent, Diagnostic, DiagnosticEntry, DiagnosticEntryRef, DiagnosticSet,
+    DiagnosticSourceKind, DiskState, FakeLspAdapter, Language, LanguageConfig, LanguageMatcher,
+    LanguageName, LineEnding, ManifestName, ManifestProvider, ManifestQuery, OffsetRangeExt, Point,
+    ToPoint, Toolchain, ToolchainList, ToolchainLister, ToolchainMetadata,
     language_settings::{LanguageSettingsContent, language_settings},
     markdown_lang, rust_lang, tree_sitter_typescript,
 };
 use lsp::{
-    DiagnosticSeverity, DocumentChanges, FileOperationFilter, NumberOrString, TextDocumentEdit,
-    Uri, WillRenameFiles, notification::DidRenameFiles,
+    CodeActionKind, DiagnosticSeverity, DocumentChanges, FileOperationFilter, LanguageServerId,
+    LanguageServerName, NumberOrString, TextDocumentEdit, Uri, WillRenameFiles,
+    notification::DidRenameFiles,
 };
 use parking_lot::Mutex;
 use paths::{config_dir, global_gitignore_path, tasks_file};
 use postage::stream::Stream as _;
 use pretty_assertions::{assert_eq, assert_matches};
+use project::{
+    Event, TaskContexts,
+    git_store::{GitStoreEvent, Repository, RepositoryEvent, StatusEntry, pending_op},
+    search::{SearchQuery, SearchResult},
+    task_store::{TaskSettingsLocation, TaskStore},
+    *,
+};
 use rand::{Rng as _, rngs::StdRng};
 use serde_json::json;
+use settings::SettingsStore;
 #[cfg(not(windows))]
 use std::os;
 use std::{
     env, mem,
     num::NonZeroU32,
     ops::Range,
+    path::{Path, PathBuf},
     str::FromStr,
     sync::{Arc, OnceLock},
     task::Poll,
+    time::Duration,
 };
 use sum_tree::SumTree;
 use task::{ResolvedTask, ShellKind, TaskContext};
+use text::{Anchor, PointUtf16, ReplicaId, ToOffset, Unclipped};
 use unindent::Unindent as _;
 use util::{
     TryFutureExt as _, assert_set_eq, maybe, path,
-    paths::PathMatcher,
-    rel_path::rel_path,
+    paths::{PathMatcher, PathStyle},
+    rel_path::{RelPath, rel_path},
     test::{TempTree, marked_text_offsets},
     uri,
 };
@@ -1000,7 +1028,7 @@ async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext)
         .expect("should have one global task");
     project.update(cx, |project, cx| {
         let task_inventory = project
-            .task_store
+            .task_store()
             .read(cx)
             .task_inventory()
             .cloned()
@@ -1262,7 +1290,7 @@ async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
         .unwrap();
     cx.executor().run_until_parked();
     let servers = project.update(cx, |project, cx| {
-        project.lsp_store.update(cx, |this, cx| {
+        project.lsp_store().update(cx, |this, cx| {
             first_buffer.update(cx, |buffer, cx| {
                 this.running_language_servers_for_local_buffer(buffer, cx)
                     .map(|(adapter, server)| (adapter.clone(), server.clone()))
@@ -1291,7 +1319,7 @@ async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
         .unwrap();
     cx.executor().run_until_parked();
     let servers = project.update(cx, |project, cx| {
-        project.lsp_store.update(cx, |this, cx| {
+        project.lsp_store().update(cx, |this, cx| {
             second_project_buffer.update(cx, |buffer, cx| {
                 this.running_language_servers_for_local_buffer(buffer, cx)
                     .map(|(adapter, server)| (adapter.clone(), server.clone()))
@@ -1362,7 +1390,7 @@ async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
         .unwrap();
     cx.run_until_parked();
     let servers = project.update(cx, |project, cx| {
-        project.lsp_store.update(cx, |this, cx| {
+        project.lsp_store().update(cx, |this, cx| {
             second_project_buffer.update(cx, |buffer, cx| {
                 this.running_language_servers_for_local_buffer(buffer, cx)
                     .map(|(adapter, server)| (adapter.clone(), server.clone()))
@@ -3342,7 +3370,7 @@ async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
         .unwrap();
 
     project.update(cx, |project, cx| {
-        project.lsp_store.update(cx, |lsp_store, cx| {
+        project.lsp_store().update(cx, |lsp_store, cx| {
             lsp_store
                 .update_diagnostic_entries(
                     LanguageServerId(0),
@@ -3407,7 +3435,7 @@ async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppC
         .await;
 
     let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
-    let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
+    let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
 
     lsp_store.update(cx, |lsp_store, cx| {
         lsp_store
@@ -10512,7 +10540,7 @@ async fn test_repos_in_invisible_worktrees(
 
     let (_invisible_worktree, _) = project
         .update(cx, |project, cx| {
-            project.worktree_store.update(cx, |worktree_store, cx| {
+            project.worktree_store().update(cx, |worktree_store, cx| {
                 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
             })
         })
@@ -11172,7 +11200,7 @@ fn get_all_tasks(
     cx: &mut App,
 ) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
     let new_tasks = project.update(cx, |project, cx| {
-        project.task_store.update(cx, |task_store, cx| {
+        project.task_store().update(cx, |task_store, cx| {
             task_store.task_inventory().unwrap().update(cx, |this, cx| {
                 this.used_and_current_resolved_tasks(task_contexts, cx)
             })
@@ -11881,3 +11909,54 @@ async fn test_read_only_files_with_lock_files(cx: &mut gpui::TestAppContext) {
         assert!(!buffer.read_only(), "package.json should not be read-only");
     });
 }
+
+mod disable_ai_settings_tests {
+    use gpui::TestAppContext;
+    use project::*;
+    use settings::{Settings, SettingsStore};
+
+    #[gpui::test]
+    async fn test_disable_ai_settings_security(cx: &mut TestAppContext) {
+        cx.update(|cx| {
+            settings::init(cx);
+
+            // Test 1: Default is false (AI enabled)
+            assert!(
+                !DisableAiSettings::get_global(cx).disable_ai,
+                "Default should allow AI"
+            );
+        });
+
+        let disable_true = serde_json::json!({
+            "disable_ai": true
+        })
+        .to_string();
+        let disable_false = serde_json::json!({
+            "disable_ai": false
+        })
+        .to_string();
+
+        cx.update_global::<SettingsStore, _>(|store, cx| {
+            store.set_user_settings(&disable_false, cx).unwrap();
+            store.set_global_settings(&disable_true, cx).unwrap();
+        });
+        cx.update(|cx| {
+            assert!(
+                DisableAiSettings::get_global(cx).disable_ai,
+                "Local false cannot override global true"
+            );
+        });
+
+        cx.update_global::<SettingsStore, _>(|store, cx| {
+            store.set_global_settings(&disable_false, cx).unwrap();
+            store.set_user_settings(&disable_true, cx).unwrap();
+        });
+
+        cx.update(|cx| {
+            assert!(
+                DisableAiSettings::get_global(cx).disable_ai,
+                "Local false cannot override global true"
+            );
+        });
+    }
+}

crates/project/tests/integration/search.rs 🔗

@@ -0,0 +1,156 @@
+use project::search::SearchQuery;
+use text::Rope;
+use util::{
+    paths::{PathMatcher, PathStyle},
+    rel_path::RelPath,
+};
+
+#[test]
+fn path_matcher_creation_for_valid_paths() {
+    for valid_path in [
+        "file",
+        "Cargo.toml",
+        ".DS_Store",
+        "~/dir/another_dir/",
+        "./dir/file",
+        "dir/[a-z].txt",
+    ] {
+        let path_matcher = PathMatcher::new(&[valid_path.to_owned()], PathStyle::local())
+            .unwrap_or_else(|e| panic!("Valid path {valid_path} should be accepted, but got: {e}"));
+        assert!(
+            path_matcher.is_match(&RelPath::new(valid_path.as_ref(), PathStyle::local()).unwrap()),
+            "Path matcher for valid path {valid_path} should match itself"
+        )
+    }
+}
+
+#[test]
+fn path_matcher_creation_for_globs() {
+    for invalid_glob in ["dir/[].txt", "dir/[a-z.txt", "dir/{file"] {
+        match PathMatcher::new(&[invalid_glob.to_owned()], PathStyle::local()) {
+            Ok(_) => panic!("Invalid glob {invalid_glob} should not be accepted"),
+            Err(_expected) => {}
+        }
+    }
+
+    for valid_glob in [
+        "dir/?ile",
+        "dir/*.txt",
+        "dir/**/file",
+        "dir/[a-z].txt",
+        "{dir,file}",
+    ] {
+        match PathMatcher::new(&[valid_glob.to_owned()], PathStyle::local()) {
+            Ok(_expected) => {}
+            Err(e) => panic!("Valid glob should be accepted, but got: {e}"),
+        }
+    }
+}
+
+#[test]
+fn test_case_sensitive_pattern_items() {
+    let case_sensitive = false;
+    let search_query = SearchQuery::regex(
+        "test\\C",
+        false,
+        case_sensitive,
+        false,
+        false,
+        Default::default(),
+        Default::default(),
+        false,
+        None,
+    )
+    .expect("Should be able to create a regex SearchQuery");
+
+    assert_eq!(
+        search_query.case_sensitive(),
+        true,
+        "Case sensitivity should be enabled when \\C pattern item is present in the query."
+    );
+
+    let case_sensitive = true;
+    let search_query = SearchQuery::regex(
+        "test\\c",
+        true,
+        case_sensitive,
+        false,
+        false,
+        Default::default(),
+        Default::default(),
+        false,
+        None,
+    )
+    .expect("Should be able to create a regex SearchQuery");
+
+    assert_eq!(
+        search_query.case_sensitive(),
+        false,
+        "Case sensitivity should be disabled when \\c pattern item is present, even if initially set to true."
+    );
+
+    let case_sensitive = false;
+    let search_query = SearchQuery::regex(
+        "test\\c\\C",
+        false,
+        case_sensitive,
+        false,
+        false,
+        Default::default(),
+        Default::default(),
+        false,
+        None,
+    )
+    .expect("Should be able to create a regex SearchQuery");
+
+    assert_eq!(
+        search_query.case_sensitive(),
+        true,
+        "Case sensitivity should be enabled when \\C is the last pattern item, even after a \\c."
+    );
+
+    let case_sensitive = false;
+    let search_query = SearchQuery::regex(
+        "tests\\\\C",
+        false,
+        case_sensitive,
+        false,
+        false,
+        Default::default(),
+        Default::default(),
+        false,
+        None,
+    )
+    .expect("Should be able to create a regex SearchQuery");
+
+    assert_eq!(
+        search_query.case_sensitive(),
+        false,
+        "Case sensitivity should not be enabled when \\C pattern item is preceded by a backslash."
+    );
+}
+
+#[gpui::test]
+async fn test_multiline_regex(cx: &mut gpui::TestAppContext) {
+    let search_query = SearchQuery::regex(
+        "^hello$\n",
+        false,
+        false,
+        false,
+        false,
+        Default::default(),
+        Default::default(),
+        false,
+        None,
+    )
+    .expect("Should be able to create a regex SearchQuery");
+
+    use language::Buffer;
+    let text = Rope::from("hello\nworld\nhello\nworld");
+    let snapshot = cx
+        .update(|app| Buffer::build_snapshot(text, None, None, app))
+        .await;
+
+    let results = search_query.search(&snapshot, None).await;
+    assert_eq!(results, vec![0..6, 12..18]);
+}

crates/project/tests/integration/search_history.rs 🔗

@@ -0,0 +1,148 @@
+use project::search_history::{QueryInsertionBehavior, SearchHistory, SearchHistoryCursor};
+
+#[test]
+fn test_add() {
+    const MAX_HISTORY_LEN: usize = 20;
+    let mut search_history = SearchHistory::new(
+        Some(MAX_HISTORY_LEN),
+        QueryInsertionBehavior::ReplacePreviousIfContains,
+    );
+    let mut cursor = SearchHistoryCursor::default();
+
+    assert_eq!(
+        search_history.current(&cursor),
+        None,
+        "No current selection should be set for the default search history"
+    );
+
+    search_history.add(&mut cursor, "rust".to_string());
+    assert_eq!(
+        search_history.current(&cursor),
+        Some("rust"),
+        "Newly added item should be selected"
+    );
+
+    // check if duplicates are not added
+    search_history.add(&mut cursor, "rust".to_string());
+    assert_eq!(search_history.len(), 1, "Should not add a duplicate");
+    assert_eq!(search_history.current(&cursor), Some("rust"));
+
+    // check if new string containing the previous string replaces it
+    search_history.add(&mut cursor, "rustlang".to_string());
+    assert_eq!(
+        search_history.len(),
+        1,
+        "Should replace previous item if it's a substring"
+    );
+    assert_eq!(search_history.current(&cursor), Some("rustlang"));
+
+    // add item when it equals to current item if it's not the last one
+    search_history.add(&mut cursor, "php".to_string());
+    search_history.previous(&mut cursor);
+    assert_eq!(search_history.current(&cursor), Some("rustlang"));
+    search_history.add(&mut cursor, "rustlang".to_string());
+    assert_eq!(search_history.len(), 3, "Should add item");
+    assert_eq!(search_history.current(&cursor), Some("rustlang"));
+
+    // push enough items to test SEARCH_HISTORY_LIMIT
+    for i in 0..MAX_HISTORY_LEN * 2 {
+        search_history.add(&mut cursor, format!("item{i}"));
+    }
+    assert!(search_history.len() <= MAX_HISTORY_LEN);
+}
+
+#[test]
+fn test_next_and_previous() {
+    let mut search_history = SearchHistory::new(None, QueryInsertionBehavior::AlwaysInsert);
+    let mut cursor = SearchHistoryCursor::default();
+
+    assert_eq!(
+        search_history.next(&mut cursor),
+        None,
+        "Default search history should not have a next item"
+    );
+
+    search_history.add(&mut cursor, "Rust".to_string());
+    assert_eq!(search_history.next(&mut cursor), None);
+    search_history.add(&mut cursor, "JavaScript".to_string());
+    assert_eq!(search_history.next(&mut cursor), None);
+    search_history.add(&mut cursor, "TypeScript".to_string());
+    assert_eq!(search_history.next(&mut cursor), None);
+
+    assert_eq!(search_history.current(&cursor), Some("TypeScript"));
+
+    assert_eq!(search_history.previous(&mut cursor), Some("JavaScript"));
+    assert_eq!(search_history.current(&cursor), Some("JavaScript"));
+
+    assert_eq!(search_history.previous(&mut cursor), Some("Rust"));
+    assert_eq!(search_history.current(&cursor), Some("Rust"));
+
+    assert_eq!(search_history.previous(&mut cursor), None);
+    assert_eq!(search_history.current(&cursor), Some("Rust"));
+
+    assert_eq!(search_history.next(&mut cursor), Some("JavaScript"));
+    assert_eq!(search_history.current(&cursor), Some("JavaScript"));
+
+    assert_eq!(search_history.next(&mut cursor), Some("TypeScript"));
+    assert_eq!(search_history.current(&cursor), Some("TypeScript"));
+
+    assert_eq!(search_history.next(&mut cursor), None);
+    assert_eq!(search_history.current(&cursor), Some("TypeScript"));
+}
+
+#[test]
+fn test_reset_selection() {
+    let mut search_history = SearchHistory::new(None, QueryInsertionBehavior::AlwaysInsert);
+    let mut cursor = SearchHistoryCursor::default();
+
+    search_history.add(&mut cursor, "Rust".to_string());
+    search_history.add(&mut cursor, "JavaScript".to_string());
+    search_history.add(&mut cursor, "TypeScript".to_string());
+
+    assert_eq!(search_history.current(&cursor), Some("TypeScript"));
+    cursor.reset();
+    assert_eq!(search_history.current(&cursor), None);
+    assert_eq!(
+        search_history.previous(&mut cursor),
+        Some("TypeScript"),
+        "Should start from the end after reset on previous item query"
+    );
+
+    search_history.previous(&mut cursor);
+    assert_eq!(search_history.current(&cursor), Some("JavaScript"));
+    search_history.previous(&mut cursor);
+    assert_eq!(search_history.current(&cursor), Some("Rust"));
+
+    cursor.reset();
+    assert_eq!(search_history.current(&cursor), None);
+}
+
+#[test]
+fn test_multiple_cursors() {
+    let mut search_history = SearchHistory::new(None, QueryInsertionBehavior::AlwaysInsert);
+    let mut cursor1 = SearchHistoryCursor::default();
+    let mut cursor2 = SearchHistoryCursor::default();
+
+    search_history.add(&mut cursor1, "Rust".to_string());
+    search_history.add(&mut cursor1, "JavaScript".to_string());
+    search_history.add(&mut cursor1, "TypeScript".to_string());
+
+    search_history.add(&mut cursor2, "Python".to_string());
+    search_history.add(&mut cursor2, "Java".to_string());
+    search_history.add(&mut cursor2, "C++".to_string());
+
+    assert_eq!(search_history.current(&cursor1), Some("TypeScript"));
+    assert_eq!(search_history.current(&cursor2), Some("C++"));
+
+    assert_eq!(search_history.previous(&mut cursor1), Some("JavaScript"));
+    assert_eq!(search_history.previous(&mut cursor2), Some("Java"));
+
+    assert_eq!(search_history.next(&mut cursor1), Some("TypeScript"));
+    assert_eq!(search_history.next(&mut cursor1), Some("Python"));
+
+    cursor1.reset();
+    cursor2.reset();
+
+    assert_eq!(search_history.current(&cursor1), None);
+    assert_eq!(search_history.current(&cursor2), None);
+}

crates/project/tests/integration/signature_help.rs 🔗

@@ -0,0 +1,517 @@
+use gpui::{FontWeight, HighlightStyle, SharedString, TestAppContext};
+use lsp::{Documentation, MarkupContent, MarkupKind};
+
+use project::lsp_command::signature_help::SignatureHelp;
+
+fn current_parameter() -> HighlightStyle {
+    HighlightStyle {
+        font_weight: Some(FontWeight::EXTRA_BOLD),
+        ..Default::default()
+    }
+}
+
+#[gpui::test]
+fn test_create_signature_help_markdown_string_1(cx: &mut TestAppContext) {
+    let signature_help = lsp::SignatureHelp {
+        signatures: vec![lsp::SignatureInformation {
+            label: "fn test(foo: u8, bar: &str)".to_string(),
+            documentation: Some(Documentation::String(
+                "This is a test documentation".to_string(),
+            )),
+            parameters: Some(vec![
+                lsp::ParameterInformation {
+                    label: lsp::ParameterLabel::Simple("foo: u8".to_string()),
+                    documentation: None,
+                },
+                lsp::ParameterInformation {
+                    label: lsp::ParameterLabel::Simple("bar: &str".to_string()),
+                    documentation: None,
+                },
+            ]),
+            active_parameter: None,
+        }],
+        active_signature: Some(0),
+        active_parameter: Some(0),
+    };
+    let maybe_markdown = cx.update(|cx| SignatureHelp::new(signature_help, None, None, cx));
+    assert!(maybe_markdown.is_some());
+
+    let markdown = maybe_markdown.unwrap();
+    let signature = markdown.signatures[markdown.active_signature].clone();
+    let markdown = (signature.label, signature.highlights);
+    assert_eq!(
+        markdown,
+        (
+            SharedString::new("fn test(foo: u8, bar: &str)"),
+            vec![(8..15, current_parameter())]
+        )
+    );
+    assert_eq!(
+        signature
+            .documentation
+            .unwrap()
+            .update(cx, |documentation, _| documentation.source().to_owned()),
+        "This is a test documentation",
+    )
+}
+
+#[gpui::test]
+fn test_create_signature_help_markdown_string_2(cx: &mut TestAppContext) {
+    let signature_help = lsp::SignatureHelp {
+        signatures: vec![lsp::SignatureInformation {
+            label: "fn test(foo: u8, bar: &str)".to_string(),
+            documentation: Some(Documentation::MarkupContent(MarkupContent {
+                kind: MarkupKind::Markdown,
+                value: "This is a test documentation".to_string(),
+            })),
+            parameters: Some(vec![
+                lsp::ParameterInformation {
+                    label: lsp::ParameterLabel::Simple("foo: u8".to_string()),
+                    documentation: None,
+                },
+                lsp::ParameterInformation {
+                    label: lsp::ParameterLabel::Simple("bar: &str".to_string()),
+                    documentation: None,
+                },
+            ]),
+            active_parameter: None,
+        }],
+        active_signature: Some(0),
+        active_parameter: Some(1),
+    };
+    let maybe_markdown = cx.update(|cx| SignatureHelp::new(signature_help, None, None, cx));
+    assert!(maybe_markdown.is_some());
+
+    let markdown = maybe_markdown.unwrap();
+    let signature = markdown.signatures[markdown.active_signature].clone();
+    let markdown = (signature.label, signature.highlights);
+    assert_eq!(
+        markdown,
+        (
+            SharedString::new("fn test(foo: u8, bar: &str)"),
+            vec![(17..26, current_parameter())]
+        )
+    );
+    assert_eq!(
+        signature
+            .documentation
+            .unwrap()
+            .update(cx, |documentation, _| documentation.source().to_owned()),
+        "This is a test documentation",
+    )
+}
+
+#[gpui::test]
+fn test_create_signature_help_markdown_string_3(cx: &mut TestAppContext) {
+    let signature_help = lsp::SignatureHelp {
+        signatures: vec![
+            lsp::SignatureInformation {
+                label: "fn test1(foo: u8, bar: &str)".to_string(),
+                documentation: None,
+                parameters: Some(vec![
+                    lsp::ParameterInformation {
+                        label: lsp::ParameterLabel::Simple("foo: u8".to_string()),
+                        documentation: None,
+                    },
+                    lsp::ParameterInformation {
+                        label: lsp::ParameterLabel::Simple("bar: &str".to_string()),
+                        documentation: None,
+                    },
+                ]),
+                active_parameter: None,
+            },
+            lsp::SignatureInformation {
+                label: "fn test2(hoge: String, fuga: bool)".to_string(),
+                documentation: None,
+                parameters: Some(vec![
+                    lsp::ParameterInformation {
+                        label: lsp::ParameterLabel::Simple("hoge: String".to_string()),
+                        documentation: None,
+                    },
+                    lsp::ParameterInformation {
+                        label: lsp::ParameterLabel::Simple("fuga: bool".to_string()),
+                        documentation: None,
+                    },
+                ]),
+                active_parameter: None,
+            },
+        ],
+        active_signature: Some(0),
+        active_parameter: Some(0),
+    };
+    let maybe_markdown = cx.update(|cx| SignatureHelp::new(signature_help, None, None, cx));
+    assert!(maybe_markdown.is_some());
+
+    let markdown = maybe_markdown.unwrap();
+    let signature = markdown.signatures[markdown.active_signature].clone();
+    let markdown = (signature.label, signature.highlights);
+    assert_eq!(
+        markdown,
+        (
+            SharedString::new("fn test1(foo: u8, bar: &str)"),
+            vec![(9..16, current_parameter())]
+        )
+    );
+}
+
+#[gpui::test]
+fn test_create_signature_help_markdown_string_4(cx: &mut TestAppContext) {
+    let signature_help = lsp::SignatureHelp {
+        signatures: vec![
+            lsp::SignatureInformation {
+                label: "fn test1(foo: u8, bar: &str)".to_string(),
+                documentation: None,
+                parameters: Some(vec![
+                    lsp::ParameterInformation {
+                        label: lsp::ParameterLabel::Simple("foo: u8".to_string()),
+                        documentation: None,
+                    },
+                    lsp::ParameterInformation {
+                        label: lsp::ParameterLabel::Simple("bar: &str".to_string()),
+                        documentation: None,
+                    },
+                ]),
+                active_parameter: None,
+            },
+            lsp::SignatureInformation {
+                label: "fn test2(hoge: String, fuga: bool)".to_string(),
+                documentation: None,
+                parameters: Some(vec![
+                    lsp::ParameterInformation {
+                        label: lsp::ParameterLabel::Simple("hoge: String".to_string()),
+                        documentation: None,
+                    },
+                    lsp::ParameterInformation {
+                        label: lsp::ParameterLabel::Simple("fuga: bool".to_string()),
+                        documentation: None,
+                    },
+                ]),
+                active_parameter: None,
+            },
+        ],
+        active_signature: Some(1),
+        active_parameter: Some(0),
+    };
+    let maybe_markdown = cx.update(|cx| SignatureHelp::new(signature_help, None, None, cx));
+    assert!(maybe_markdown.is_some());
+
+    let markdown = maybe_markdown.unwrap();
+    let signature = markdown.signatures[markdown.active_signature].clone();
+    let markdown = (signature.label, signature.highlights);
+    assert_eq!(
+        markdown,
+        (
+            SharedString::new("fn test2(hoge: String, fuga: bool)"),
+            vec![(9..21, current_parameter())]
+        )
+    );
+}
+
+#[gpui::test]
+fn test_create_signature_help_markdown_string_5(cx: &mut TestAppContext) {
+    let signature_help = lsp::SignatureHelp {
+        signatures: vec![
+            lsp::SignatureInformation {
+                label: "fn test1(foo: u8, bar: &str)".to_string(),
+                documentation: None,
+                parameters: Some(vec![
+                    lsp::ParameterInformation {
+                        label: lsp::ParameterLabel::Simple("foo: u8".to_string()),
+                        documentation: None,
+                    },
+                    lsp::ParameterInformation {
+                        label: lsp::ParameterLabel::Simple("bar: &str".to_string()),
+                        documentation: None,
+                    },
+                ]),
+                active_parameter: None,
+            },
+            lsp::SignatureInformation {
+                label: "fn test2(hoge: String, fuga: bool)".to_string(),
+                documentation: None,
+                parameters: Some(vec![
+                    lsp::ParameterInformation {
+                        label: lsp::ParameterLabel::Simple("hoge: String".to_string()),
+                        documentation: None,
+                    },
+                    lsp::ParameterInformation {
+                        label: lsp::ParameterLabel::Simple("fuga: bool".to_string()),
+                        documentation: None,
+                    },
+                ]),
+                active_parameter: None,
+            },
+        ],
+        active_signature: Some(1),
+        active_parameter: Some(1),
+    };
+    let maybe_markdown = cx.update(|cx| SignatureHelp::new(signature_help, None, None, cx));
+    assert!(maybe_markdown.is_some());
+
+    let markdown = maybe_markdown.unwrap();
+    let signature = markdown.signatures[markdown.active_signature].clone();
+    let markdown = (signature.label, signature.highlights);
+    assert_eq!(
+        markdown,
+        (
+            SharedString::new("fn test2(hoge: String, fuga: bool)"),
+            vec![(23..33, current_parameter())]
+        )
+    );
+}
+
+#[gpui::test]
+fn test_create_signature_help_markdown_string_6(cx: &mut TestAppContext) {
+    let signature_help = lsp::SignatureHelp {
+        signatures: vec![
+            lsp::SignatureInformation {
+                label: "fn test1(foo: u8, bar: &str)".to_string(),
+                documentation: None,
+                parameters: Some(vec![
+                    lsp::ParameterInformation {
+                        label: lsp::ParameterLabel::Simple("foo: u8".to_string()),
+                        documentation: None,
+                    },
+                    lsp::ParameterInformation {
+                        label: lsp::ParameterLabel::Simple("bar: &str".to_string()),
+                        documentation: None,
+                    },
+                ]),
+                active_parameter: None,
+            },
+            lsp::SignatureInformation {
+                label: "fn test2(hoge: String, fuga: bool)".to_string(),
+                documentation: None,
+                parameters: Some(vec![
+                    lsp::ParameterInformation {
+                        label: lsp::ParameterLabel::Simple("hoge: String".to_string()),
+                        documentation: None,
+                    },
+                    lsp::ParameterInformation {
+                        label: lsp::ParameterLabel::Simple("fuga: bool".to_string()),
+                        documentation: None,
+                    },
+                ]),
+                active_parameter: None,
+            },
+        ],
+        active_signature: Some(1),
+        active_parameter: None,
+    };
+    let maybe_markdown = cx.update(|cx| SignatureHelp::new(signature_help, None, None, cx));
+    assert!(maybe_markdown.is_some());
+
+    let markdown = maybe_markdown.unwrap();
+    let signature = markdown.signatures[markdown.active_signature].clone();
+    let markdown = (signature.label, signature.highlights);
+    assert_eq!(
+        markdown,
+        (
+            SharedString::new("fn test2(hoge: String, fuga: bool)"),
+            vec![(9..21, current_parameter())]
+        )
+    );
+}
+
+#[gpui::test]
+fn test_create_signature_help_markdown_string_7(cx: &mut TestAppContext) {
+    let signature_help = lsp::SignatureHelp {
+        signatures: vec![
+            lsp::SignatureInformation {
+                label: "fn test1(foo: u8, bar: &str)".to_string(),
+                documentation: None,
+                parameters: Some(vec![
+                    lsp::ParameterInformation {
+                        label: lsp::ParameterLabel::Simple("foo: u8".to_string()),
+                        documentation: None,
+                    },
+                    lsp::ParameterInformation {
+                        label: lsp::ParameterLabel::Simple("bar: &str".to_string()),
+                        documentation: None,
+                    },
+                ]),
+                active_parameter: None,
+            },
+            lsp::SignatureInformation {
+                label: "fn test2(hoge: String, fuga: bool)".to_string(),
+                documentation: None,
+                parameters: Some(vec![
+                    lsp::ParameterInformation {
+                        label: lsp::ParameterLabel::Simple("hoge: String".to_string()),
+                        documentation: None,
+                    },
+                    lsp::ParameterInformation {
+                        label: lsp::ParameterLabel::Simple("fuga: bool".to_string()),
+                        documentation: None,
+                    },
+                ]),
+                active_parameter: None,
+            },
+            lsp::SignatureInformation {
+                label: "fn test3(one: usize, two: u32)".to_string(),
+                documentation: None,
+                parameters: Some(vec![
+                    lsp::ParameterInformation {
+                        label: lsp::ParameterLabel::Simple("one: usize".to_string()),
+                        documentation: None,
+                    },
+                    lsp::ParameterInformation {
+                        label: lsp::ParameterLabel::Simple("two: u32".to_string()),
+                        documentation: None,
+                    },
+                ]),
+                active_parameter: None,
+            },
+        ],
+        active_signature: Some(2),
+        active_parameter: Some(1),
+    };
+    let maybe_markdown = cx.update(|cx| SignatureHelp::new(signature_help, None, None, cx));
+    assert!(maybe_markdown.is_some());
+
+    let markdown = maybe_markdown.unwrap();
+    let signature = markdown.signatures[markdown.active_signature].clone();
+    let markdown = (signature.label, signature.highlights);
+    assert_eq!(
+        markdown,
+        (
+            SharedString::new("fn test3(one: usize, two: u32)"),
+            vec![(21..29, current_parameter())]
+        )
+    );
+}
+
+#[gpui::test]
+fn test_create_signature_help_markdown_string_8(cx: &mut TestAppContext) {
+    let signature_help = lsp::SignatureHelp {
+        signatures: vec![],
+        active_signature: None,
+        active_parameter: None,
+    };
+    let maybe_markdown = cx.update(|cx| SignatureHelp::new(signature_help, None, None, cx));
+    assert!(maybe_markdown.is_none());
+}
+
+#[gpui::test]
+fn test_create_signature_help_markdown_string_9(cx: &mut TestAppContext) {
+    let signature_help = lsp::SignatureHelp {
+        signatures: vec![lsp::SignatureInformation {
+            label: "fn test(foo: u8, bar: &str)".to_string(),
+            documentation: None,
+            parameters: Some(vec![
+                lsp::ParameterInformation {
+                    label: lsp::ParameterLabel::LabelOffsets([8, 15]),
+                    documentation: None,
+                },
+                lsp::ParameterInformation {
+                    label: lsp::ParameterLabel::LabelOffsets([17, 26]),
+                    documentation: None,
+                },
+            ]),
+            active_parameter: None,
+        }],
+        active_signature: Some(0),
+        active_parameter: Some(0),
+    };
+    let maybe_markdown = cx.update(|cx| SignatureHelp::new(signature_help, None, None, cx));
+    assert!(maybe_markdown.is_some());
+
+    let markdown = maybe_markdown.unwrap();
+    let signature = markdown.signatures[markdown.active_signature].clone();
+    let markdown = (signature.label, signature.highlights);
+    assert_eq!(
+        markdown,
+        (
+            SharedString::new("fn test(foo: u8, bar: &str)"),
+            vec![(8..15, current_parameter())]
+        )
+    );
+}
+
+#[gpui::test]
+fn test_parameter_documentation(cx: &mut TestAppContext) {
+    let signature_help = lsp::SignatureHelp {
+        signatures: vec![lsp::SignatureInformation {
+            label: "fn test(foo: u8, bar: &str)".to_string(),
+            documentation: Some(Documentation::String(
+                "This is a test documentation".to_string(),
+            )),
+            parameters: Some(vec![
+                lsp::ParameterInformation {
+                    label: lsp::ParameterLabel::Simple("foo: u8".to_string()),
+                    documentation: Some(Documentation::String("The foo parameter".to_string())),
+                },
+                lsp::ParameterInformation {
+                    label: lsp::ParameterLabel::Simple("bar: &str".to_string()),
+                    documentation: Some(Documentation::String("The bar parameter".to_string())),
+                },
+            ]),
+            active_parameter: None,
+        }],
+        active_signature: Some(0),
+        active_parameter: Some(0),
+    };
+    let maybe_signature_help = cx.update(|cx| SignatureHelp::new(signature_help, None, None, cx));
+    assert!(maybe_signature_help.is_some());
+
+    let signature_help = maybe_signature_help.unwrap();
+    let signature = &signature_help.signatures[signature_help.active_signature];
+
+    // Check that parameter documentation is extracted
+    assert_eq!(signature.parameters.len(), 2);
+    assert_eq!(
+        signature.parameters[0]
+            .documentation
+            .as_ref()
+            .unwrap()
+            .update(cx, |documentation, _| documentation.source().to_owned()),
+        "The foo parameter",
+    );
+    assert_eq!(
+        signature.parameters[1]
+            .documentation
+            .as_ref()
+            .unwrap()
+            .update(cx, |documentation, _| documentation.source().to_owned()),
+        "The bar parameter",
+    );
+
+    // Check that the active parameter is correct
+    assert_eq!(signature.active_parameter, Some(0));
+}
+
+#[gpui::test]
+fn test_create_signature_help_implements_utf16_spec(cx: &mut TestAppContext) {
+    let signature_help = lsp::SignatureHelp {
+        signatures: vec![lsp::SignatureInformation {
+            label: "fn test(🦀: u8, 🦀: &str)".to_string(),
+            documentation: None,
+            parameters: Some(vec![
+                lsp::ParameterInformation {
+                    label: lsp::ParameterLabel::LabelOffsets([8, 10]),
+                    documentation: None,
+                },
+                lsp::ParameterInformation {
+                    label: lsp::ParameterLabel::LabelOffsets([16, 18]),
+                    documentation: None,
+                },
+            ]),
+            active_parameter: None,
+        }],
+        active_signature: Some(0),
+        active_parameter: Some(0),
+    };
+    let signature_help = cx.update(|cx| SignatureHelp::new(signature_help, None, None, cx));
+    assert!(signature_help.is_some());
+
+    let markdown = signature_help.unwrap();
+    let signature = markdown.signatures[markdown.active_signature].clone();
+    let markdown = (signature.label, signature.highlights);
+    assert_eq!(
+        markdown,
+        (
+            SharedString::new("fn test(🦀: u8, 🦀: &str)"),
+            vec![(8..12, current_parameter())]
+        )
+    );
+}

crates/project/tests/integration/task_inventory.rs 🔗

@@ -0,0 +1,626 @@
+use gpui::{AppContext, Entity, Task, TestAppContext};
+use itertools::Itertools;
+use paths::tasks_file;
+use pretty_assertions::assert_eq;
+use serde_json::json;
+use settings::SettingsLocation;
+use std::path::Path;
+use std::sync::Arc;
+use util::rel_path::rel_path;
+
+use project::task_store::{TaskSettingsLocation, TaskStore};
+
+use project::{WorktreeId, task_inventory::*};
+use test_inventory::*;
+
+mod test_inventory {
+    use gpui::{AppContext as _, Entity, Task, TestAppContext};
+    use itertools::Itertools;
+    use task::TaskContext;
+    use worktree::WorktreeId;
+
+    use crate::Inventory;
+
+    use super::TaskSourceKind;
+
+    pub(super) fn task_template_names(
+        inventory: &Entity<Inventory>,
+        worktree: Option<WorktreeId>,
+        cx: &mut TestAppContext,
+    ) -> Task<Vec<String>> {
+        let new_tasks = inventory.update(cx, |inventory, cx| {
+            inventory.list_tasks(None, None, worktree, cx)
+        });
+        cx.background_spawn(async move {
+            new_tasks
+                .await
+                .into_iter()
+                .map(|(_, task)| task.label)
+                .sorted()
+                .collect()
+        })
+    }
+
+    pub(super) fn register_task_used(
+        inventory: &Entity<Inventory>,
+        task_name: &str,
+        cx: &mut TestAppContext,
+    ) -> Task<()> {
+        let tasks = inventory.update(cx, |inventory, cx| {
+            inventory.list_tasks(None, None, None, cx)
+        });
+
+        let task_name = task_name.to_owned();
+        let inventory = inventory.clone();
+        cx.spawn(|mut cx| async move {
+            let (task_source_kind, task) = tasks
+                .await
+                .into_iter()
+                .find(|(_, task)| task.label == task_name)
+                .unwrap_or_else(|| panic!("Failed to find task with name {task_name}"));
+
+            let id_base = task_source_kind.to_id_base();
+            inventory.update(&mut cx, |inventory, _| {
+                inventory.task_scheduled(
+                    task_source_kind.clone(),
+                    task.resolve_task(&id_base, &TaskContext::default())
+                        .unwrap_or_else(|| panic!("Failed to resolve task with name {task_name}")),
+                )
+            });
+        })
+    }
+
+    pub(super) fn register_worktree_task_used(
+        inventory: &Entity<Inventory>,
+        worktree_id: WorktreeId,
+        task_name: &str,
+        cx: &mut TestAppContext,
+    ) -> Task<()> {
+        let tasks = inventory.update(cx, |inventory, cx| {
+            inventory.list_tasks(None, None, Some(worktree_id), cx)
+        });
+
+        let inventory = inventory.clone();
+        let task_name = task_name.to_owned();
+        cx.spawn(|mut cx| async move {
+            let (task_source_kind, task) = tasks
+                .await
+                .into_iter()
+                .find(|(_, task)| task.label == task_name)
+                .unwrap_or_else(|| panic!("Failed to find task with name {task_name}"));
+            let id_base = task_source_kind.to_id_base();
+            inventory.update(&mut cx, |inventory, _| {
+                inventory.task_scheduled(
+                    task_source_kind.clone(),
+                    task.resolve_task(&id_base, &TaskContext::default())
+                        .unwrap_or_else(|| panic!("Failed to resolve task with name {task_name}")),
+                );
+            });
+        })
+    }
+
+    pub(super) async fn list_tasks(
+        inventory: &Entity<Inventory>,
+        worktree: Option<WorktreeId>,
+        cx: &mut TestAppContext,
+    ) -> Vec<(TaskSourceKind, String)> {
+        let task_context = &TaskContext::default();
+        inventory
+            .update(cx, |inventory, cx| {
+                inventory.list_tasks(None, None, worktree, cx)
+            })
+            .await
+            .into_iter()
+            .filter_map(|(source_kind, task)| {
+                let id_base = source_kind.to_id_base();
+                Some((source_kind, task.resolve_task(&id_base, task_context)?))
+            })
+            .map(|(source_kind, resolved_task)| (source_kind, resolved_task.resolved_label))
+            .collect()
+    }
+}
+
+#[gpui::test]
+async fn test_task_list_sorting(cx: &mut TestAppContext) {
+    init_test(cx);
+    let inventory = cx.update(|cx| Inventory::new(cx));
+    let initial_tasks = resolved_task_names(&inventory, None, cx).await;
+    assert!(
+        initial_tasks.is_empty(),
+        "No tasks expected for empty inventory, but got {initial_tasks:?}"
+    );
+    let initial_tasks = task_template_names(&inventory, None, cx).await;
+    assert!(
+        initial_tasks.is_empty(),
+        "No tasks expected for empty inventory, but got {initial_tasks:?}"
+    );
+    cx.run_until_parked();
+    let expected_initial_state = [
+        "1_a_task".to_string(),
+        "1_task".to_string(),
+        "2_task".to_string(),
+        "3_task".to_string(),
+    ];
+
+    inventory.update(cx, |inventory, _| {
+        inventory
+            .update_file_based_tasks(
+                TaskSettingsLocation::Global(tasks_file()),
+                Some(&mock_tasks_from_names(
+                    expected_initial_state.iter().map(|name| name.as_str()),
+                )),
+            )
+            .unwrap();
+    });
+    assert_eq!(
+        task_template_names(&inventory, None, cx).await,
+        &expected_initial_state,
+    );
+    assert_eq!(
+        resolved_task_names(&inventory, None, cx).await,
+        &expected_initial_state,
+        "Tasks with equal amount of usages should be sorted alphanumerically"
+    );
+
+    register_task_used(&inventory, "2_task", cx).await;
+    assert_eq!(
+        task_template_names(&inventory, None, cx).await,
+        &expected_initial_state,
+    );
+    assert_eq!(
+        resolved_task_names(&inventory, None, cx).await,
+        vec![
+            "2_task".to_string(),
+            "1_a_task".to_string(),
+            "1_task".to_string(),
+            "3_task".to_string()
+        ],
+    );
+
+    register_task_used(&inventory, "1_task", cx).await;
+    register_task_used(&inventory, "1_task", cx).await;
+    register_task_used(&inventory, "1_task", cx).await;
+    register_task_used(&inventory, "3_task", cx).await;
+    assert_eq!(
+        task_template_names(&inventory, None, cx).await,
+        &expected_initial_state,
+    );
+    assert_eq!(
+        resolved_task_names(&inventory, None, cx).await,
+        vec![
+            "3_task".to_string(),
+            "1_task".to_string(),
+            "2_task".to_string(),
+            "1_a_task".to_string(),
+        ],
+        "Most recently used task should be at the top"
+    );
+
+    let worktree_id = WorktreeId::from_usize(0);
+    let local_worktree_location = SettingsLocation {
+        worktree_id,
+        path: rel_path("foo"),
+    };
+    inventory.update(cx, |inventory, _| {
+        inventory
+            .update_file_based_tasks(
+                TaskSettingsLocation::Worktree(local_worktree_location),
+                Some(&mock_tasks_from_names(["worktree_task_1"])),
+            )
+            .unwrap();
+    });
+    assert_eq!(
+        resolved_task_names(&inventory, None, cx).await,
+        vec![
+            "3_task".to_string(),
+            "1_task".to_string(),
+            "2_task".to_string(),
+            "1_a_task".to_string(),
+        ],
+        "Most recently used task should be at the top"
+    );
+    assert_eq!(
+        resolved_task_names(&inventory, Some(worktree_id), cx).await,
+        vec![
+            "3_task".to_string(),
+            "1_task".to_string(),
+            "2_task".to_string(),
+            "worktree_task_1".to_string(),
+            "1_a_task".to_string(),
+        ],
+    );
+    register_worktree_task_used(&inventory, worktree_id, "worktree_task_1", cx).await;
+    assert_eq!(
+        resolved_task_names(&inventory, Some(worktree_id), cx).await,
+        vec![
+            "worktree_task_1".to_string(),
+            "3_task".to_string(),
+            "1_task".to_string(),
+            "2_task".to_string(),
+            "1_a_task".to_string(),
+        ],
+        "Most recently used worktree task should be at the top"
+    );
+
+    inventory.update(cx, |inventory, _| {
+        inventory
+            .update_file_based_tasks(
+                TaskSettingsLocation::Global(tasks_file()),
+                Some(&mock_tasks_from_names(
+                    ["10_hello", "11_hello"]
+                        .into_iter()
+                        .chain(expected_initial_state.iter().map(|name| name.as_str())),
+                )),
+            )
+            .unwrap();
+    });
+    cx.run_until_parked();
+    let expected_updated_state = [
+        "10_hello".to_string(),
+        "11_hello".to_string(),
+        "1_a_task".to_string(),
+        "1_task".to_string(),
+        "2_task".to_string(),
+        "3_task".to_string(),
+    ];
+    assert_eq!(
+        task_template_names(&inventory, None, cx).await,
+        &expected_updated_state,
+    );
+    assert_eq!(
+        resolved_task_names(&inventory, None, cx).await,
+        vec![
+            "worktree_task_1".to_string(),
+            "1_a_task".to_string(),
+            "1_task".to_string(),
+            "2_task".to_string(),
+            "3_task".to_string(),
+            "10_hello".to_string(),
+            "11_hello".to_string(),
+        ],
+        "After global tasks update, worktree task usage is not erased and it's the first still; global task is back to regular order as its file was updated"
+    );
+
+    register_task_used(&inventory, "11_hello", cx).await;
+    assert_eq!(
+        task_template_names(&inventory, None, cx).await,
+        &expected_updated_state,
+    );
+    assert_eq!(
+        resolved_task_names(&inventory, None, cx).await,
+        vec![
+            "11_hello".to_string(),
+            "worktree_task_1".to_string(),
+            "1_a_task".to_string(),
+            "1_task".to_string(),
+            "2_task".to_string(),
+            "3_task".to_string(),
+            "10_hello".to_string(),
+        ],
+    );
+}
+
+#[gpui::test]
+async fn test_reloading_debug_scenarios(cx: &mut TestAppContext) {
+    init_test(cx);
+    let inventory = cx.update(|cx| Inventory::new(cx));
+    inventory.update(cx, |inventory, _| {
+        inventory
+            .update_file_based_scenarios(
+                TaskSettingsLocation::Global(Path::new("")),
+                Some(
+                    r#"
+                        [{
+                            "label": "test scenario",
+                            "adapter": "CodeLLDB",
+                            "request": "launch",
+                            "program": "wowzer",
+                        }]
+                        "#,
+                ),
+            )
+            .unwrap();
+    });
+
+    let (_, scenario) = inventory
+        .update(cx, |this, cx| {
+            this.list_debug_scenarios(&TaskContexts::default(), vec![], vec![], false, cx)
+        })
+        .await
+        .1
+        .first()
+        .unwrap()
+        .clone();
+
+    inventory.update(cx, |this, _| {
+        this.scenario_scheduled(scenario.clone(), Default::default(), None, None);
+    });
+
+    assert_eq!(
+        inventory
+            .update(cx, |this, cx| {
+                this.list_debug_scenarios(&Default::default(), vec![], vec![], false, cx)
+            })
+            .await
+            .0
+            .first()
+            .unwrap()
+            .clone()
+            .0,
+        scenario
+    );
+
+    inventory.update(cx, |this, _| {
+        this.update_file_based_scenarios(
+            TaskSettingsLocation::Global(Path::new("")),
+            Some(
+                r#"
+                        [{
+                            "label": "test scenario",
+                            "adapter": "Delve",
+                            "request": "launch",
+                            "program": "wowzer",
+                        }]
+                        "#,
+            ),
+        )
+        .unwrap();
+    });
+
+    assert_eq!(
+        inventory
+            .update(cx, |this, cx| {
+                this.list_debug_scenarios(&Default::default(), vec![], vec![], false, cx)
+            })
+            .await
+            .0
+            .first()
+            .unwrap()
+            .0
+            .adapter,
+        "Delve",
+    );
+
+    inventory.update(cx, |this, _| {
+        this.update_file_based_scenarios(
+            TaskSettingsLocation::Global(Path::new("")),
+            Some(
+                r#"
+                        [{
+                            "label": "testing scenario",
+                            "adapter": "Delve",
+                            "request": "launch",
+                            "program": "wowzer",
+                        }]
+                        "#,
+            ),
+        )
+        .unwrap();
+    });
+
+    assert!(
+        inventory
+            .update(cx, |this, cx| {
+                this.list_debug_scenarios(&TaskContexts::default(), vec![], vec![], false, cx)
+            })
+            .await
+            .0
+            .is_empty(),
+    );
+}
+
+#[gpui::test]
+async fn test_inventory_static_task_filters(cx: &mut TestAppContext) {
+    init_test(cx);
+    let inventory = cx.update(|cx| Inventory::new(cx));
+    let common_name = "common_task_name";
+    let worktree_1 = WorktreeId::from_usize(1);
+    let worktree_2 = WorktreeId::from_usize(2);
+
+    cx.run_until_parked();
+    let worktree_independent_tasks = vec![
+        (
+            TaskSourceKind::AbsPath {
+                id_base: "global tasks.json".into(),
+                abs_path: paths::tasks_file().clone(),
+            },
+            common_name.to_string(),
+        ),
+        (
+            TaskSourceKind::AbsPath {
+                id_base: "global tasks.json".into(),
+                abs_path: paths::tasks_file().clone(),
+            },
+            "static_source_1".to_string(),
+        ),
+        (
+            TaskSourceKind::AbsPath {
+                id_base: "global tasks.json".into(),
+                abs_path: paths::tasks_file().clone(),
+            },
+            "static_source_2".to_string(),
+        ),
+    ];
+    let worktree_1_tasks = [
+        (
+            TaskSourceKind::Worktree {
+                id: worktree_1,
+                directory_in_worktree: rel_path(".zed").into(),
+                id_base: "local worktree tasks from directory \".zed\"".into(),
+            },
+            common_name.to_string(),
+        ),
+        (
+            TaskSourceKind::Worktree {
+                id: worktree_1,
+                directory_in_worktree: rel_path(".zed").into(),
+                id_base: "local worktree tasks from directory \".zed\"".into(),
+            },
+            "worktree_1".to_string(),
+        ),
+    ];
+    let worktree_2_tasks = [
+        (
+            TaskSourceKind::Worktree {
+                id: worktree_2,
+                directory_in_worktree: rel_path(".zed").into(),
+                id_base: "local worktree tasks from directory \".zed\"".into(),
+            },
+            common_name.to_string(),
+        ),
+        (
+            TaskSourceKind::Worktree {
+                id: worktree_2,
+                directory_in_worktree: rel_path(".zed").into(),
+                id_base: "local worktree tasks from directory \".zed\"".into(),
+            },
+            "worktree_2".to_string(),
+        ),
+    ];
+
+    inventory.update(cx, |inventory, _| {
+        inventory
+            .update_file_based_tasks(
+                TaskSettingsLocation::Global(tasks_file()),
+                Some(&mock_tasks_from_names(
+                    worktree_independent_tasks
+                        .iter()
+                        .map(|(_, name)| name.as_str()),
+                )),
+            )
+            .unwrap();
+        inventory
+            .update_file_based_tasks(
+                TaskSettingsLocation::Worktree(SettingsLocation {
+                    worktree_id: worktree_1,
+                    path: rel_path(".zed"),
+                }),
+                Some(&mock_tasks_from_names(
+                    worktree_1_tasks.iter().map(|(_, name)| name.as_str()),
+                )),
+            )
+            .unwrap();
+        inventory
+            .update_file_based_tasks(
+                TaskSettingsLocation::Worktree(SettingsLocation {
+                    worktree_id: worktree_2,
+                    path: rel_path(".zed"),
+                }),
+                Some(&mock_tasks_from_names(
+                    worktree_2_tasks.iter().map(|(_, name)| name.as_str()),
+                )),
+            )
+            .unwrap();
+    });
+
+    assert_eq!(
+        list_tasks_sorted_by_last_used(&inventory, None, cx).await,
+        worktree_independent_tasks,
+        "Without a worktree, only worktree-independent tasks should be listed"
+    );
+    assert_eq!(
+        list_tasks_sorted_by_last_used(&inventory, Some(worktree_1), cx).await,
+        worktree_1_tasks
+            .iter()
+            .chain(worktree_independent_tasks.iter())
+            .cloned()
+            .sorted_by_key(|(kind, label)| (task_source_kind_preference(kind), label.clone()))
+            .collect::<Vec<_>>(),
+    );
+    assert_eq!(
+        list_tasks_sorted_by_last_used(&inventory, Some(worktree_2), cx).await,
+        worktree_2_tasks
+            .iter()
+            .chain(worktree_independent_tasks.iter())
+            .cloned()
+            .sorted_by_key(|(kind, label)| (task_source_kind_preference(kind), label.clone()))
+            .collect::<Vec<_>>(),
+    );
+
+    assert_eq!(
+        list_tasks(&inventory, None, cx).await,
+        worktree_independent_tasks,
+        "Without a worktree, only worktree-independent tasks should be listed"
+    );
+    assert_eq!(
+        list_tasks(&inventory, Some(worktree_1), cx).await,
+        worktree_1_tasks
+            .iter()
+            .chain(worktree_independent_tasks.iter())
+            .cloned()
+            .collect::<Vec<_>>(),
+    );
+    assert_eq!(
+        list_tasks(&inventory, Some(worktree_2), cx).await,
+        worktree_2_tasks
+            .iter()
+            .chain(worktree_independent_tasks.iter())
+            .cloned()
+            .collect::<Vec<_>>(),
+    );
+}
+
+fn init_test(_cx: &mut TestAppContext) {
+    zlog::init_test();
+    TaskStore::init(None);
+}
+
+fn resolved_task_names(
+    inventory: &Entity<Inventory>,
+    worktree: Option<WorktreeId>,
+    cx: &mut TestAppContext,
+) -> Task<Vec<String>> {
+    let tasks = inventory.update(cx, |inventory, cx| {
+        let mut task_contexts = TaskContexts::default();
+        task_contexts.active_worktree_context =
+            worktree.map(|worktree| (worktree, Default::default()));
+
+        inventory.used_and_current_resolved_tasks(Arc::new(task_contexts), cx)
+    });
+
+    cx.background_spawn(async move {
+        let (used, current) = tasks.await;
+        used.into_iter()
+            .chain(current)
+            .map(|(_, task)| task.original_task().label.clone())
+            .collect()
+    })
+}
+
+fn mock_tasks_from_names<'a>(task_names: impl IntoIterator<Item = &'a str> + 'a) -> String {
+    serde_json::to_string(&serde_json::Value::Array(
+        task_names
+            .into_iter()
+            .map(|task_name| {
+                json!({
+                    "label": task_name,
+                    "command": "echo",
+                    "args": vec![task_name],
+                })
+            })
+            .collect::<Vec<_>>(),
+    ))
+    .unwrap()
+}
+
+async fn list_tasks_sorted_by_last_used(
+    inventory: &Entity<Inventory>,
+    worktree: Option<WorktreeId>,
+    cx: &mut TestAppContext,
+) -> Vec<(TaskSourceKind, String)> {
+    let (used, current) = inventory
+        .update(cx, |inventory, cx| {
+            let mut task_contexts = TaskContexts::default();
+            task_contexts.active_worktree_context =
+                worktree.map(|worktree| (worktree, Default::default()));
+
+            inventory.used_and_current_resolved_tasks(Arc::new(task_contexts), cx)
+        })
+        .await;
+    let mut all = used;
+    all.extend(current);
+    all.into_iter()
+        .map(|(source_kind, task)| (source_kind, task.resolved_label))
+        .sorted_by_key(|(kind, label)| (task_source_kind_preference(kind), label.clone()))
+        .collect()
+}

crates/project/tests/integration/trusted_worktrees.rs 🔗

@@ -0,0 +1,957 @@
+use std::{cell::RefCell, path::PathBuf, rc::Rc};
+
+use collections::HashSet;
+use gpui::{Entity, TestAppContext};
+use serde_json::json;
+use settings::SettingsStore;
+use util::path;
+
+use crate::{FakeFs, Project};
+
+use project::{trusted_worktrees::*, worktree_store::WorktreeStore};
+
+fn init_test(cx: &mut TestAppContext) {
+    cx.update(|cx| {
+        if cx.try_global::<SettingsStore>().is_none() {
+            let settings_store = SettingsStore::test(cx);
+            cx.set_global(settings_store);
+        }
+        if cx.try_global::<TrustedWorktrees>().is_some() {
+            cx.remove_global::<TrustedWorktrees>();
+        }
+    });
+}
+
+fn init_trust_global(
+    worktree_store: Entity<WorktreeStore>,
+    cx: &mut TestAppContext,
+) -> Entity<TrustedWorktreesStore> {
+    cx.update(|cx| {
+        init(DbTrustedPaths::default(), cx);
+        track_worktree_trust(worktree_store, None, None, None, cx);
+        TrustedWorktrees::try_get_global(cx).expect("global should be set")
+    })
+}
+
+#[gpui::test]
+async fn test_single_worktree_trust(cx: &mut TestAppContext) {
+    init_test(cx);
+
+    let fs = FakeFs::new(cx.executor());
+    fs.insert_tree(path!("/root"), json!({ "main.rs": "fn main() {}" }))
+        .await;
+
+    let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
+    let worktree_store = project.read_with(cx, |project, _| project.worktree_store());
+    let worktree_id = worktree_store.read_with(cx, |store, cx| {
+        store.worktrees().next().unwrap().read(cx).id()
+    });
+
+    let trusted_worktrees = init_trust_global(worktree_store.clone(), cx);
+
+    let events: Rc<RefCell<Vec<TrustedWorktreesEvent>>> = Rc::default();
+    cx.update({
+        let events = events.clone();
+        |cx| {
+            cx.subscribe(&trusted_worktrees, move |_, event, _| {
+                events.borrow_mut().push(match event {
+                    TrustedWorktreesEvent::Trusted(host, paths) => {
+                        TrustedWorktreesEvent::Trusted(host.clone(), paths.clone())
+                    }
+                    TrustedWorktreesEvent::Restricted(host, paths) => {
+                        TrustedWorktreesEvent::Restricted(host.clone(), paths.clone())
+                    }
+                });
+            })
+        }
+    })
+    .detach();
+
+    let can_trust = trusted_worktrees.update(cx, |store, cx| {
+        store.can_trust(&worktree_store, worktree_id, cx)
+    });
+    assert!(!can_trust, "worktree should be restricted by default");
+
+    {
+        let events = events.borrow();
+        assert_eq!(events.len(), 1);
+        match &events[0] {
+            TrustedWorktreesEvent::Restricted(event_worktree_store, paths) => {
+                assert_eq!(event_worktree_store, &worktree_store.downgrade());
+                assert!(paths.contains(&PathTrust::Worktree(worktree_id)));
+            }
+            _ => panic!("expected Restricted event"),
+        }
+    }
+
+    let has_restricted = trusted_worktrees.read_with(cx, |store, cx| {
+        store.has_restricted_worktrees(&worktree_store, cx)
+    });
+    assert!(has_restricted, "should have restricted worktrees");
+
+    let restricted = trusted_worktrees.read_with(cx, |trusted_worktrees, cx| {
+        trusted_worktrees.restricted_worktrees(&worktree_store, cx)
+    });
+    assert!(restricted.iter().any(|(id, _)| *id == worktree_id));
+
+    events.borrow_mut().clear();
+
+    let can_trust_again = trusted_worktrees.update(cx, |store, cx| {
+        store.can_trust(&worktree_store, worktree_id, cx)
+    });
+    assert!(!can_trust_again, "worktree should still be restricted");
+    assert!(
+        events.borrow().is_empty(),
+        "no duplicate Restricted event on repeated can_trust"
+    );
+
+    trusted_worktrees.update(cx, |store, cx| {
+        store.trust(
+            &worktree_store,
+            HashSet::from_iter([PathTrust::Worktree(worktree_id)]),
+            cx,
+        );
+    });
+
+    {
+        let events = events.borrow();
+        assert_eq!(events.len(), 1);
+        match &events[0] {
+            TrustedWorktreesEvent::Trusted(event_worktree_store, paths) => {
+                assert_eq!(event_worktree_store, &worktree_store.downgrade());
+                assert!(paths.contains(&PathTrust::Worktree(worktree_id)));
+            }
+            _ => panic!("expected Trusted event"),
+        }
+    }
+
+    let can_trust_after = trusted_worktrees.update(cx, |store, cx| {
+        store.can_trust(&worktree_store, worktree_id, cx)
+    });
+    assert!(can_trust_after, "worktree should be trusted after trust()");
+
+    let has_restricted_after = trusted_worktrees.read_with(cx, |store, cx| {
+        store.has_restricted_worktrees(&worktree_store, cx)
+    });
+    assert!(
+        !has_restricted_after,
+        "should have no restricted worktrees after trust"
+    );
+
+    let restricted_after = trusted_worktrees.read_with(cx, |trusted_worktrees, cx| {
+        trusted_worktrees.restricted_worktrees(&worktree_store, cx)
+    });
+    assert!(
+        restricted_after.is_empty(),
+        "restricted set should be empty"
+    );
+}
+
+#[gpui::test]
+async fn test_single_file_worktree_trust(cx: &mut TestAppContext) {
+    init_test(cx);
+
+    let fs = FakeFs::new(cx.executor());
+    fs.insert_tree(path!("/root"), json!({ "foo.rs": "fn foo() {}" }))
+        .await;
+
+    let project = Project::test(fs, [path!("/root/foo.rs").as_ref()], cx).await;
+    let worktree_store = project.read_with(cx, |project, _| project.worktree_store());
+    let worktree_id = worktree_store.read_with(cx, |store, cx| {
+        let worktree = store.worktrees().next().unwrap();
+        let worktree = worktree.read(cx);
+        assert!(worktree.is_single_file(), "expected single-file worktree");
+        worktree.id()
+    });
+
+    let trusted_worktrees = init_trust_global(worktree_store.clone(), cx);
+
+    let events: Rc<RefCell<Vec<TrustedWorktreesEvent>>> = Rc::default();
+    cx.update({
+        let events = events.clone();
+        |cx| {
+            cx.subscribe(&trusted_worktrees, move |_, event, _| {
+                events.borrow_mut().push(match event {
+                    TrustedWorktreesEvent::Trusted(host, paths) => {
+                        TrustedWorktreesEvent::Trusted(host.clone(), paths.clone())
+                    }
+                    TrustedWorktreesEvent::Restricted(host, paths) => {
+                        TrustedWorktreesEvent::Restricted(host.clone(), paths.clone())
+                    }
+                });
+            })
+        }
+    })
+    .detach();
+
+    let can_trust = trusted_worktrees.update(cx, |store, cx| {
+        store.can_trust(&worktree_store, worktree_id, cx)
+    });
+    assert!(
+        !can_trust,
+        "single-file worktree should be restricted by default"
+    );
+
+    {
+        let events = events.borrow();
+        assert_eq!(events.len(), 1);
+        match &events[0] {
+            TrustedWorktreesEvent::Restricted(event_worktree_store, paths) => {
+                assert_eq!(event_worktree_store, &worktree_store.downgrade());
+                assert!(paths.contains(&PathTrust::Worktree(worktree_id)));
+            }
+            _ => panic!("expected Restricted event"),
+        }
+    }
+
+    events.borrow_mut().clear();
+
+    trusted_worktrees.update(cx, |store, cx| {
+        store.trust(
+            &worktree_store,
+            HashSet::from_iter([PathTrust::Worktree(worktree_id)]),
+            cx,
+        );
+    });
+
+    {
+        let events = events.borrow();
+        assert_eq!(events.len(), 1);
+        match &events[0] {
+            TrustedWorktreesEvent::Trusted(event_worktree_store, paths) => {
+                assert_eq!(event_worktree_store, &worktree_store.downgrade());
+                assert!(paths.contains(&PathTrust::Worktree(worktree_id)));
+            }
+            _ => panic!("expected Trusted event"),
+        }
+    }
+
+    let can_trust_after = trusted_worktrees.update(cx, |store, cx| {
+        store.can_trust(&worktree_store, worktree_id, cx)
+    });
+    assert!(
+        can_trust_after,
+        "single-file worktree should be trusted after trust()"
+    );
+}
+
+#[gpui::test]
+async fn test_multiple_single_file_worktrees_trust_one(cx: &mut TestAppContext) {
+    init_test(cx);
+
+    let fs = FakeFs::new(cx.executor());
+    fs.insert_tree(
+        path!("/root"),
+        json!({
+            "a.rs": "fn a() {}",
+            "b.rs": "fn b() {}",
+            "c.rs": "fn c() {}"
+        }),
+    )
+    .await;
+
+    let project = Project::test(
+        fs,
+        [
+            path!("/root/a.rs").as_ref(),
+            path!("/root/b.rs").as_ref(),
+            path!("/root/c.rs").as_ref(),
+        ],
+        cx,
+    )
+    .await;
+    let worktree_store = project.read_with(cx, |project, _| project.worktree_store());
+    let worktree_ids: Vec<_> = worktree_store.read_with(cx, |store, cx| {
+        store
+            .worktrees()
+            .map(|worktree| {
+                let worktree = worktree.read(cx);
+                assert!(worktree.is_single_file());
+                worktree.id()
+            })
+            .collect()
+    });
+    assert_eq!(worktree_ids.len(), 3);
+
+    let trusted_worktrees = init_trust_global(worktree_store.clone(), cx);
+
+    for &worktree_id in &worktree_ids {
+        let can_trust = trusted_worktrees.update(cx, |store, cx| {
+            store.can_trust(&worktree_store, worktree_id, cx)
+        });
+        assert!(
+            !can_trust,
+            "worktree {worktree_id:?} should be restricted initially"
+        );
+    }
+
+    trusted_worktrees.update(cx, |store, cx| {
+        store.trust(
+            &worktree_store,
+            HashSet::from_iter([PathTrust::Worktree(worktree_ids[1])]),
+            cx,
+        );
+    });
+
+    let can_trust_0 = trusted_worktrees.update(cx, |store, cx| {
+        store.can_trust(&worktree_store, worktree_ids[0], cx)
+    });
+    let can_trust_1 = trusted_worktrees.update(cx, |store, cx| {
+        store.can_trust(&worktree_store, worktree_ids[1], cx)
+    });
+    let can_trust_2 = trusted_worktrees.update(cx, |store, cx| {
+        store.can_trust(&worktree_store, worktree_ids[2], cx)
+    });
+
+    assert!(!can_trust_0, "worktree 0 should still be restricted");
+    assert!(can_trust_1, "worktree 1 should be trusted");
+    assert!(!can_trust_2, "worktree 2 should still be restricted");
+}
+
+#[gpui::test]
+async fn test_two_directory_worktrees_separate_trust(cx: &mut TestAppContext) {
+    init_test(cx);
+
+    let fs = FakeFs::new(cx.executor());
+    fs.insert_tree(
+        path!("/projects"),
+        json!({
+            "project_a": { "main.rs": "fn main() {}" },
+            "project_b": { "lib.rs": "pub fn lib() {}" }
+        }),
+    )
+    .await;
+
+    let project = Project::test(
+        fs,
+        [
+            path!("/projects/project_a").as_ref(),
+            path!("/projects/project_b").as_ref(),
+        ],
+        cx,
+    )
+    .await;
+    let worktree_store = project.read_with(cx, |project, _| project.worktree_store());
+    let worktree_ids: Vec<_> = worktree_store.read_with(cx, |store, cx| {
+        store
+            .worktrees()
+            .map(|worktree| {
+                let worktree = worktree.read(cx);
+                assert!(!worktree.is_single_file());
+                worktree.id()
+            })
+            .collect()
+    });
+    assert_eq!(worktree_ids.len(), 2);
+
+    let trusted_worktrees = init_trust_global(worktree_store.clone(), cx);
+
+    let can_trust_a = trusted_worktrees.update(cx, |store, cx| {
+        store.can_trust(&worktree_store, worktree_ids[0], cx)
+    });
+    let can_trust_b = trusted_worktrees.update(cx, |store, cx| {
+        store.can_trust(&worktree_store, worktree_ids[1], cx)
+    });
+    assert!(!can_trust_a, "project_a should be restricted initially");
+    assert!(!can_trust_b, "project_b should be restricted initially");
+
+    trusted_worktrees.update(cx, |store, cx| {
+        store.trust(
+            &worktree_store,
+            HashSet::from_iter([PathTrust::Worktree(worktree_ids[0])]),
+            cx,
+        );
+    });
+
+    let can_trust_a = trusted_worktrees.update(cx, |store, cx| {
+        store.can_trust(&worktree_store, worktree_ids[0], cx)
+    });
+    let can_trust_b = trusted_worktrees.update(cx, |store, cx| {
+        store.can_trust(&worktree_store, worktree_ids[1], cx)
+    });
+    assert!(can_trust_a, "project_a should be trusted after trust()");
+    assert!(!can_trust_b, "project_b should still be restricted");
+
+    trusted_worktrees.update(cx, |store, cx| {
+        store.trust(
+            &worktree_store,
+            HashSet::from_iter([PathTrust::Worktree(worktree_ids[1])]),
+            cx,
+        );
+    });
+
+    let can_trust_a = trusted_worktrees.update(cx, |store, cx| {
+        store.can_trust(&worktree_store, worktree_ids[0], cx)
+    });
+    let can_trust_b = trusted_worktrees.update(cx, |store, cx| {
+        store.can_trust(&worktree_store, worktree_ids[1], cx)
+    });
+    assert!(can_trust_a, "project_a should remain trusted");
+    assert!(can_trust_b, "project_b should now be trusted");
+}
+
+#[gpui::test]
+async fn test_directory_worktree_trust_enables_single_file(cx: &mut TestAppContext) {
+    init_test(cx);
+
+    let fs = FakeFs::new(cx.executor());
+    fs.insert_tree(
+        path!("/"),
+        json!({
+            "project": { "main.rs": "fn main() {}" },
+            "standalone.rs": "fn standalone() {}"
+        }),
+    )
+    .await;
+
+    let project = Project::test(
+        fs,
+        [path!("/project").as_ref(), path!("/standalone.rs").as_ref()],
+        cx,
+    )
+    .await;
+    let worktree_store = project.read_with(cx, |project, _| project.worktree_store());
+    let (dir_worktree_id, file_worktree_id) = worktree_store.read_with(cx, |store, cx| {
+        let worktrees: Vec<_> = store.worktrees().collect();
+        assert_eq!(worktrees.len(), 2);
+        let (dir_worktree, file_worktree) = if worktrees[0].read(cx).is_single_file() {
+            (&worktrees[1], &worktrees[0])
+        } else {
+            (&worktrees[0], &worktrees[1])
+        };
+        assert!(!dir_worktree.read(cx).is_single_file());
+        assert!(file_worktree.read(cx).is_single_file());
+        (dir_worktree.read(cx).id(), file_worktree.read(cx).id())
+    });
+
+    let trusted_worktrees = init_trust_global(worktree_store.clone(), cx);
+
+    let can_trust_file = trusted_worktrees.update(cx, |store, cx| {
+        store.can_trust(&worktree_store, file_worktree_id, cx)
+    });
+    assert!(
+        !can_trust_file,
+        "single-file worktree should be restricted initially"
+    );
+
+    let can_trust_directory = trusted_worktrees.update(cx, |store, cx| {
+        store.can_trust(&worktree_store, dir_worktree_id, cx)
+    });
+    assert!(
+        !can_trust_directory,
+        "directory worktree should be restricted initially"
+    );
+
+    trusted_worktrees.update(cx, |store, cx| {
+        store.trust(
+            &worktree_store,
+            HashSet::from_iter([PathTrust::Worktree(dir_worktree_id)]),
+            cx,
+        );
+    });
+
+    let can_trust_dir = trusted_worktrees.update(cx, |store, cx| {
+        store.can_trust(&worktree_store, dir_worktree_id, cx)
+    });
+    let can_trust_file_after = trusted_worktrees.update(cx, |store, cx| {
+        store.can_trust(&worktree_store, file_worktree_id, cx)
+    });
+    assert!(can_trust_dir, "directory worktree should be trusted");
+    assert!(
+        can_trust_file_after,
+        "single-file worktree should be trusted after directory worktree trust"
+    );
+}
+
+#[gpui::test]
+async fn test_parent_path_trust_enables_single_file(cx: &mut TestAppContext) {
+    init_test(cx);
+
+    let fs = FakeFs::new(cx.executor());
+    fs.insert_tree(
+        path!("/"),
+        json!({
+            "project": { "main.rs": "fn main() {}" },
+            "standalone.rs": "fn standalone() {}"
+        }),
+    )
+    .await;
+
+    let project = Project::test(
+        fs,
+        [path!("/project").as_ref(), path!("/standalone.rs").as_ref()],
+        cx,
+    )
+    .await;
+    let worktree_store = project.read_with(cx, |project, _| project.worktree_store());
+    let (dir_worktree_id, file_worktree_id) = worktree_store.read_with(cx, |store, cx| {
+        let worktrees: Vec<_> = store.worktrees().collect();
+        assert_eq!(worktrees.len(), 2);
+        let (dir_worktree, file_worktree) = if worktrees[0].read(cx).is_single_file() {
+            (&worktrees[1], &worktrees[0])
+        } else {
+            (&worktrees[0], &worktrees[1])
+        };
+        assert!(!dir_worktree.read(cx).is_single_file());
+        assert!(file_worktree.read(cx).is_single_file());
+        (dir_worktree.read(cx).id(), file_worktree.read(cx).id())
+    });
+
+    let trusted_worktrees = init_trust_global(worktree_store.clone(), cx);
+
+    let can_trust_file = trusted_worktrees.update(cx, |store, cx| {
+        store.can_trust(&worktree_store, file_worktree_id, cx)
+    });
+    assert!(
+        !can_trust_file,
+        "single-file worktree should be restricted initially"
+    );
+
+    let can_trust_directory = trusted_worktrees.update(cx, |store, cx| {
+        store.can_trust(&worktree_store, dir_worktree_id, cx)
+    });
+    assert!(
+        !can_trust_directory,
+        "directory worktree should be restricted initially"
+    );
+
+    trusted_worktrees.update(cx, |store, cx| {
+        store.trust(
+            &worktree_store,
+            HashSet::from_iter([PathTrust::AbsPath(PathBuf::from(path!("/project")))]),
+            cx,
+        );
+    });
+
+    let can_trust_dir = trusted_worktrees.update(cx, |store, cx| {
+        store.can_trust(&worktree_store, dir_worktree_id, cx)
+    });
+    let can_trust_file_after = trusted_worktrees.update(cx, |store, cx| {
+        store.can_trust(&worktree_store, file_worktree_id, cx)
+    });
+    assert!(
+        can_trust_dir,
+        "directory worktree should be trusted after its parent is trusted"
+    );
+    assert!(
+        can_trust_file_after,
+        "single-file worktree should be trusted after directory worktree trust via its parent directory trust"
+    );
+}
+
+#[gpui::test]
+async fn test_abs_path_trust_covers_multiple_worktrees(cx: &mut TestAppContext) {
+    init_test(cx);
+
+    let fs = FakeFs::new(cx.executor());
+    fs.insert_tree(
+        path!("/root"),
+        json!({
+            "project_a": { "main.rs": "fn main() {}" },
+            "project_b": { "lib.rs": "pub fn lib() {}" }
+        }),
+    )
+    .await;
+
+    let project = Project::test(
+        fs,
+        [
+            path!("/root/project_a").as_ref(),
+            path!("/root/project_b").as_ref(),
+        ],
+        cx,
+    )
+    .await;
+    let worktree_store = project.read_with(cx, |project, _| project.worktree_store());
+    let worktree_ids: Vec<_> = worktree_store.read_with(cx, |store, cx| {
+        store
+            .worktrees()
+            .map(|worktree| worktree.read(cx).id())
+            .collect()
+    });
+    assert_eq!(worktree_ids.len(), 2);
+
+    let trusted_worktrees = init_trust_global(worktree_store.clone(), cx);
+
+    for &worktree_id in &worktree_ids {
+        let can_trust = trusted_worktrees.update(cx, |store, cx| {
+            store.can_trust(&worktree_store, worktree_id, cx)
+        });
+        assert!(!can_trust, "worktree should be restricted initially");
+    }
+
+    trusted_worktrees.update(cx, |store, cx| {
+        store.trust(
+            &worktree_store,
+            HashSet::from_iter([PathTrust::AbsPath(PathBuf::from(path!("/root")))]),
+            cx,
+        );
+    });
+
+    for &worktree_id in &worktree_ids {
+        let can_trust = trusted_worktrees.update(cx, |store, cx| {
+            store.can_trust(&worktree_store, worktree_id, cx)
+        });
+        assert!(
+            can_trust,
+            "worktree should be trusted after parent path trust"
+        );
+    }
+}
+
+#[gpui::test]
+async fn test_auto_trust_all(cx: &mut TestAppContext) {
+    init_test(cx);
+
+    let fs = FakeFs::new(cx.executor());
+    fs.insert_tree(
+        path!("/"),
+        json!({
+            "project_a": { "main.rs": "fn main() {}" },
+            "project_b": { "lib.rs": "pub fn lib() {}" },
+            "single.rs": "fn single() {}"
+        }),
+    )
+    .await;
+
+    let project = Project::test(
+        fs,
+        [
+            path!("/project_a").as_ref(),
+            path!("/project_b").as_ref(),
+            path!("/single.rs").as_ref(),
+        ],
+        cx,
+    )
+    .await;
+    let worktree_store = project.read_with(cx, |project, _| project.worktree_store());
+    let worktree_ids: Vec<_> = worktree_store.read_with(cx, |store, cx| {
+        store
+            .worktrees()
+            .map(|worktree| worktree.read(cx).id())
+            .collect()
+    });
+    assert_eq!(worktree_ids.len(), 3);
+
+    let trusted_worktrees = init_trust_global(worktree_store.clone(), cx);
+
+    let events: Rc<RefCell<Vec<TrustedWorktreesEvent>>> = Rc::default();
+    cx.update({
+        let events = events.clone();
+        |cx| {
+            cx.subscribe(&trusted_worktrees, move |_, event, _| {
+                events.borrow_mut().push(match event {
+                    TrustedWorktreesEvent::Trusted(host, paths) => {
+                        TrustedWorktreesEvent::Trusted(host.clone(), paths.clone())
+                    }
+                    TrustedWorktreesEvent::Restricted(host, paths) => {
+                        TrustedWorktreesEvent::Restricted(host.clone(), paths.clone())
+                    }
+                });
+            })
+        }
+    })
+    .detach();
+
+    for &worktree_id in &worktree_ids {
+        let can_trust = trusted_worktrees.update(cx, |store, cx| {
+            store.can_trust(&worktree_store, worktree_id, cx)
+        });
+        assert!(!can_trust, "worktree should be restricted initially");
+    }
+
+    let has_restricted = trusted_worktrees.read_with(cx, |store, cx| {
+        store.has_restricted_worktrees(&worktree_store, cx)
+    });
+    assert!(has_restricted, "should have restricted worktrees");
+
+    events.borrow_mut().clear();
+
+    trusted_worktrees.update(cx, |store, cx| {
+        store.auto_trust_all(cx);
+    });
+
+    for &worktree_id in &worktree_ids {
+        let can_trust = trusted_worktrees.update(cx, |store, cx| {
+            store.can_trust(&worktree_store, worktree_id, cx)
+        });
+        assert!(
+            can_trust,
+            "worktree {worktree_id:?} should be trusted after auto_trust_all"
+        );
+    }
+
+    let has_restricted_after = trusted_worktrees.read_with(cx, |store, cx| {
+        store.has_restricted_worktrees(&worktree_store, cx)
+    });
+    assert!(
+        !has_restricted_after,
+        "should have no restricted worktrees after auto_trust_all"
+    );
+
+    let trusted_event_count = events
+        .borrow()
+        .iter()
+        .filter(|e| matches!(e, TrustedWorktreesEvent::Trusted(..)))
+        .count();
+    assert!(
+        trusted_event_count > 0,
+        "should have emitted Trusted events"
+    );
+}
+
+#[gpui::test]
+async fn test_trust_restrict_trust_cycle(cx: &mut TestAppContext) {
+    init_test(cx);
+
+    let fs = FakeFs::new(cx.executor());
+    fs.insert_tree(path!("/root"), json!({ "main.rs": "fn main() {}" }))
+        .await;
+
+    let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
+    let worktree_store = project.read_with(cx, |project, _| project.worktree_store());
+    let worktree_id = worktree_store.read_with(cx, |store, cx| {
+        store.worktrees().next().unwrap().read(cx).id()
+    });
+
+    let trusted_worktrees = init_trust_global(worktree_store.clone(), cx);
+
+    let events: Rc<RefCell<Vec<TrustedWorktreesEvent>>> = Rc::default();
+    cx.update({
+        let events = events.clone();
+        |cx| {
+            cx.subscribe(&trusted_worktrees, move |_, event, _| {
+                events.borrow_mut().push(match event {
+                    TrustedWorktreesEvent::Trusted(host, paths) => {
+                        TrustedWorktreesEvent::Trusted(host.clone(), paths.clone())
+                    }
+                    TrustedWorktreesEvent::Restricted(host, paths) => {
+                        TrustedWorktreesEvent::Restricted(host.clone(), paths.clone())
+                    }
+                });
+            })
+        }
+    })
+    .detach();
+
+    let can_trust = trusted_worktrees.update(cx, |store, cx| {
+        store.can_trust(&worktree_store, worktree_id, cx)
+    });
+    assert!(!can_trust, "should be restricted initially");
+    assert_eq!(events.borrow().len(), 1);
+    events.borrow_mut().clear();
+
+    trusted_worktrees.update(cx, |store, cx| {
+        store.trust(
+            &worktree_store,
+            HashSet::from_iter([PathTrust::Worktree(worktree_id)]),
+            cx,
+        );
+    });
+    let can_trust = trusted_worktrees.update(cx, |store, cx| {
+        store.can_trust(&worktree_store, worktree_id, cx)
+    });
+    assert!(can_trust, "should be trusted after trust()");
+    assert_eq!(events.borrow().len(), 1);
+    assert!(matches!(
+        &events.borrow()[0],
+        TrustedWorktreesEvent::Trusted(..)
+    ));
+    events.borrow_mut().clear();
+
+    trusted_worktrees.update(cx, |store, cx| {
+        store.restrict(
+            worktree_store.downgrade(),
+            HashSet::from_iter([PathTrust::Worktree(worktree_id)]),
+            cx,
+        );
+    });
+    let can_trust = trusted_worktrees.update(cx, |store, cx| {
+        store.can_trust(&worktree_store, worktree_id, cx)
+    });
+    assert!(!can_trust, "should be restricted after restrict()");
+    assert_eq!(events.borrow().len(), 1);
+    assert!(matches!(
+        &events.borrow()[0],
+        TrustedWorktreesEvent::Restricted(..)
+    ));
+
+    let has_restricted = trusted_worktrees.read_with(cx, |store, cx| {
+        store.has_restricted_worktrees(&worktree_store, cx)
+    });
+    assert!(has_restricted);
+    events.borrow_mut().clear();
+
+    trusted_worktrees.update(cx, |store, cx| {
+        store.trust(
+            &worktree_store,
+            HashSet::from_iter([PathTrust::Worktree(worktree_id)]),
+            cx,
+        );
+    });
+    let can_trust = trusted_worktrees.update(cx, |store, cx| {
+        store.can_trust(&worktree_store, worktree_id, cx)
+    });
+    assert!(can_trust, "should be trusted again after second trust()");
+    assert_eq!(events.borrow().len(), 1);
+    assert!(matches!(
+        &events.borrow()[0],
+        TrustedWorktreesEvent::Trusted(..)
+    ));
+
+    let has_restricted = trusted_worktrees.read_with(cx, |store, cx| {
+        store.has_restricted_worktrees(&worktree_store, cx)
+    });
+    assert!(!has_restricted);
+}
+
+#[gpui::test]
+async fn test_multi_host_trust_isolation(cx: &mut TestAppContext) {
+    init_test(cx);
+
+    let fs = FakeFs::new(cx.executor());
+    fs.insert_tree(
+        path!("/"),
+        json!({
+            "local_project": { "main.rs": "fn main() {}" },
+            "remote_project": { "lib.rs": "pub fn lib() {}" }
+        }),
+    )
+    .await;
+
+    let project = Project::test(
+        fs,
+        [
+            path!("/local_project").as_ref(),
+            path!("/remote_project").as_ref(),
+        ],
+        cx,
+    )
+    .await;
+    let worktree_store = project.read_with(cx, |project, _| project.worktree_store());
+    let worktree_ids: Vec<_> = worktree_store.read_with(cx, |store, cx| {
+        store
+            .worktrees()
+            .map(|worktree| worktree.read(cx).id())
+            .collect()
+    });
+    assert_eq!(worktree_ids.len(), 2);
+    let local_worktree = worktree_ids[0];
+    let _remote_worktree = worktree_ids[1];
+
+    let trusted_worktrees = init_trust_global(worktree_store.clone(), cx);
+
+    let can_trust_local = trusted_worktrees.update(cx, |store, cx| {
+        store.can_trust(&worktree_store, local_worktree, cx)
+    });
+    assert!(!can_trust_local, "local worktree restricted on host_a");
+
+    trusted_worktrees.update(cx, |store, cx| {
+        store.trust(
+            &worktree_store,
+            HashSet::from_iter([PathTrust::Worktree(local_worktree)]),
+            cx,
+        );
+    });
+
+    let can_trust_local_after = trusted_worktrees.update(cx, |store, cx| {
+        store.can_trust(&worktree_store, local_worktree, cx)
+    });
+    assert!(
+        can_trust_local_after,
+        "local worktree should be trusted on local host"
+    );
+}
+
+#[gpui::test]
+async fn test_invisible_worktree_stores_do_not_affect_trust(cx: &mut TestAppContext) {
+    init_test(cx);
+
+    let fs = FakeFs::new(cx.executor());
+    fs.insert_tree(
+        path!("/"),
+        json!({
+            "visible": { "main.rs": "fn main() {}" },
+            "other": { "a.rs": "fn other() {}" },
+            "invisible": { "b.rs": "fn invisible() {}" }
+        }),
+    )
+    .await;
+
+    let project = Project::test(fs, [path!("/visible").as_ref()], cx).await;
+    let worktree_store = project.read_with(cx, |project, _| project.worktree_store());
+    let visible_worktree_id = worktree_store.read_with(cx, |store, cx| {
+        store
+            .worktrees()
+            .find(|worktree| worktree.read(cx).root_dir().unwrap().ends_with("visible"))
+            .expect("visible worktree")
+            .read(cx)
+            .id()
+    });
+    let trusted_worktrees = init_trust_global(worktree_store.clone(), cx);
+
+    let events: Rc<RefCell<Vec<TrustedWorktreesEvent>>> = Rc::default();
+    cx.update({
+        let events = events.clone();
+        |cx| {
+            cx.subscribe(&trusted_worktrees, move |_, event, _| {
+                events.borrow_mut().push(match event {
+                    TrustedWorktreesEvent::Trusted(host, paths) => {
+                        TrustedWorktreesEvent::Trusted(host.clone(), paths.clone())
+                    }
+                    TrustedWorktreesEvent::Restricted(host, paths) => {
+                        TrustedWorktreesEvent::Restricted(host.clone(), paths.clone())
+                    }
+                });
+            })
+        }
+    })
+    .detach();
+
+    assert!(
+        !trusted_worktrees.update(cx, |store, cx| {
+            store.can_trust(&worktree_store, visible_worktree_id, cx)
+        }),
+        "visible worktree should be restricted initially"
+    );
+    assert_eq!(
+        HashSet::from_iter([(visible_worktree_id)]),
+        trusted_worktrees.read_with(cx, |store, _| {
+            store.restricted_worktrees_for_store(&worktree_store)
+        }),
+        "only visible worktree should be restricted",
+    );
+
+    let (new_visible_worktree, new_invisible_worktree) =
+        worktree_store.update(cx, |worktree_store, cx| {
+            let new_visible_worktree = worktree_store.create_worktree("/other", true, cx);
+            let new_invisible_worktree = worktree_store.create_worktree("/invisible", false, cx);
+            (new_visible_worktree, new_invisible_worktree)
+        });
+    let (new_visible_worktree, new_invisible_worktree) = (
+        new_visible_worktree.await.unwrap(),
+        new_invisible_worktree.await.unwrap(),
+    );
+
+    let new_visible_worktree_id =
+        new_visible_worktree.read_with(cx, |new_visible_worktree, _| new_visible_worktree.id());
+    assert!(
+        !trusted_worktrees.update(cx, |store, cx| {
+            store.can_trust(&worktree_store, new_visible_worktree_id, cx)
+        }),
+        "new visible worktree should be restricted initially",
+    );
+    assert!(
+        trusted_worktrees.update(cx, |store, cx| {
+            store.can_trust(&worktree_store, new_invisible_worktree.read(cx).id(), cx)
+        }),
+        "invisible worktree should be skipped",
+    );
+    assert_eq!(
+        HashSet::from_iter([visible_worktree_id, new_visible_worktree_id]),
+        trusted_worktrees.read_with(cx, |store, _| {
+            store.restricted_worktrees_for_store(&worktree_store)
+        }),
+        "only visible worktrees should be restricted"
+    );
+}

crates/project/tests/integration/yarn.rs 🔗

@@ -0,0 +1,37 @@
+use project::yarn::*;
+use std::path::Path;
+
+#[test]
+fn test_resolve_virtual() {
+    let test_cases = vec![
+        (
+            "/path/to/some/folder/__virtual__/a0b1c2d3/0/subpath/to/file.dat",
+            Some(Path::new("/path/to/some/folder/subpath/to/file.dat")),
+        ),
+        (
+            "/path/to/some/folder/__virtual__/e4f5a0b1/0/subpath/to/file.dat",
+            Some(Path::new("/path/to/some/folder/subpath/to/file.dat")),
+        ),
+        (
+            "/path/to/some/folder/__virtual__/a0b1c2d3/1/subpath/to/file.dat",
+            Some(Path::new("/path/to/some/subpath/to/file.dat")),
+        ),
+        (
+            "/path/to/some/folder/__virtual__/a0b1c2d3/3/subpath/to/file.dat",
+            Some(Path::new("/path/subpath/to/file.dat")),
+        ),
+        ("/path/to/nonvirtual/", None),
+        ("/path/to/malformed/__virtual__", None),
+        ("/path/to/malformed/__virtual__/a0b1c2d3", None),
+        (
+            "/path/to/malformed/__virtual__/a0b1c2d3/this-should-be-a-number",
+            None,
+        ),
+    ];
+
+    for (input, expected) in test_cases {
+        let input_path = Path::new(input);
+        let resolved_path = resolve_virtual(input_path);
+        assert_eq!(resolved_path.as_deref(), expected);
+    }
+}