project_tests.rs

    1#![allow(clippy::format_collect)]
    2
    3use crate::{
    4    Event,
    5    git_store::{GitStoreEvent, RepositoryEvent, StatusEntry, pending_op},
    6    lsp_store::{DocumentDiagnostics, DocumentDiagnosticsUpdate},
    7    task_inventory::TaskContexts,
    8    task_store::TaskSettingsLocation,
    9    *,
   10};
   11use async_trait::async_trait;
   12use buffer_diff::{
   13    BufferDiffEvent, CALCULATE_DIFF_TASK, DiffHunkSecondaryStatus, DiffHunkStatus,
   14    DiffHunkStatusKind, assert_hunks,
   15};
   16use fs::FakeFs;
   17use futures::{StreamExt, future};
   18use git::{
   19    GitHostingProviderRegistry,
   20    repository::{RepoPath, repo_path},
   21    status::{StatusCode, TrackedStatus},
   22};
   23use git2::RepositoryInitOptions;
   24use gpui::{App, BackgroundExecutor, FutureExt, UpdateGlobal};
   25use itertools::Itertools;
   26use language::{
   27    Diagnostic, DiagnosticEntry, DiagnosticEntryRef, DiagnosticSet, DiagnosticSourceKind,
   28    DiskState, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageName, LineEnding,
   29    ManifestName, ManifestProvider, ManifestQuery, OffsetRangeExt, Point, ToPoint, ToolchainList,
   30    ToolchainLister,
   31    language_settings::{LanguageSettingsContent, language_settings},
   32    rust_lang, tree_sitter_typescript,
   33};
   34use lsp::{
   35    DiagnosticSeverity, DocumentChanges, FileOperationFilter, NumberOrString, TextDocumentEdit,
   36    Uri, WillRenameFiles, notification::DidRenameFiles,
   37};
   38use parking_lot::Mutex;
   39use paths::{config_dir, global_gitignore_path, tasks_file};
   40use postage::stream::Stream as _;
   41use pretty_assertions::{assert_eq, assert_matches};
   42use rand::{Rng as _, rngs::StdRng};
   43use serde_json::json;
   44#[cfg(not(windows))]
   45use std::os;
   46use std::{
   47    env, mem,
   48    num::NonZeroU32,
   49    ops::Range,
   50    str::FromStr,
   51    sync::{Arc, OnceLock},
   52    task::Poll,
   53};
   54use sum_tree::SumTree;
   55use task::{ResolvedTask, ShellKind, TaskContext};
   56use unindent::Unindent as _;
   57use util::{
   58    TryFutureExt as _, assert_set_eq, maybe, path,
   59    paths::PathMatcher,
   60    rel_path::rel_path,
   61    test::{TempTree, marked_text_offsets},
   62    uri,
   63};
   64use worktree::WorktreeModelHandle as _;
   65
   66#[gpui::test]
   67async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
   68    cx.executor().allow_parking();
   69
   70    let (tx, mut rx) = futures::channel::mpsc::unbounded();
   71    let _thread = std::thread::spawn(move || {
   72        #[cfg(not(target_os = "windows"))]
   73        std::fs::metadata("/tmp").unwrap();
   74        #[cfg(target_os = "windows")]
   75        std::fs::metadata("C:/Windows").unwrap();
   76        std::thread::sleep(Duration::from_millis(1000));
   77        tx.unbounded_send(1).unwrap();
   78    });
   79    rx.next().await.unwrap();
   80}
   81
   82#[gpui::test]
   83async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
   84    cx.executor().allow_parking();
   85
   86    let io_task = smol::unblock(move || {
   87        println!("sleeping on thread {:?}", std::thread::current().id());
   88        std::thread::sleep(Duration::from_millis(10));
   89        1
   90    });
   91
   92    let task = cx.foreground_executor().spawn(async move {
   93        io_task.await;
   94    });
   95
   96    task.await;
   97}
   98
   99// NOTE:
  100// While POSIX symbolic links are somewhat supported on Windows, they are an opt in by the user, and thus
  101// we assume that they are not supported out of the box.
  102#[cfg(not(windows))]
  103#[gpui::test]
  104async fn test_symlinks(cx: &mut gpui::TestAppContext) {
  105    init_test(cx);
  106    cx.executor().allow_parking();
  107
  108    let dir = TempTree::new(json!({
  109        "root": {
  110            "apple": "",
  111            "banana": {
  112                "carrot": {
  113                    "date": "",
  114                    "endive": "",
  115                }
  116            },
  117            "fennel": {
  118                "grape": "",
  119            }
  120        }
  121    }));
  122
  123    let root_link_path = dir.path().join("root_link");
  124    os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
  125    os::unix::fs::symlink(
  126        dir.path().join("root/fennel"),
  127        dir.path().join("root/finnochio"),
  128    )
  129    .unwrap();
  130
  131    let project = Project::test(
  132        Arc::new(RealFs::new(None, cx.executor())),
  133        [root_link_path.as_ref()],
  134        cx,
  135    )
  136    .await;
  137
  138    project.update(cx, |project, cx| {
  139        let tree = project.worktrees(cx).next().unwrap().read(cx);
  140        assert_eq!(tree.file_count(), 5);
  141        assert_eq!(
  142            tree.entry_for_path(rel_path("fennel/grape")).unwrap().inode,
  143            tree.entry_for_path(rel_path("finnochio/grape"))
  144                .unwrap()
  145                .inode
  146        );
  147    });
  148}
  149
  150#[gpui::test]
  151async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
  152    init_test(cx);
  153
  154    let dir = TempTree::new(json!({
  155        ".editorconfig": r#"
  156        root = true
  157        [*.rs]
  158            indent_style = tab
  159            indent_size = 3
  160            end_of_line = lf
  161            insert_final_newline = true
  162            trim_trailing_whitespace = true
  163            max_line_length = 120
  164        [*.js]
  165            tab_width = 10
  166            max_line_length = off
  167        "#,
  168        ".zed": {
  169            "settings.json": r#"{
  170                "tab_size": 8,
  171                "hard_tabs": false,
  172                "ensure_final_newline_on_save": false,
  173                "remove_trailing_whitespace_on_save": false,
  174                "preferred_line_length": 64,
  175                "soft_wrap": "editor_width",
  176            }"#,
  177        },
  178        "a.rs": "fn a() {\n    A\n}",
  179        "b": {
  180            ".editorconfig": r#"
  181            [*.rs]
  182                indent_size = 2
  183                max_line_length = off,
  184            "#,
  185            "b.rs": "fn b() {\n    B\n}",
  186        },
  187        "c.js": "def c\n  C\nend",
  188        "README.json": "tabs are better\n",
  189    }));
  190
  191    let path = dir.path();
  192    let fs = FakeFs::new(cx.executor());
  193    fs.insert_tree_from_real_fs(path, path).await;
  194    let project = Project::test(fs, [path], cx).await;
  195
  196    let language_registry = project.read_with(cx, |project, _| project.languages().clone());
  197    language_registry.add(js_lang());
  198    language_registry.add(json_lang());
  199    language_registry.add(rust_lang());
  200
  201    let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
  202
  203    cx.executor().run_until_parked();
  204
  205    cx.update(|cx| {
  206        let tree = worktree.read(cx);
  207        let settings_for = |path: &str| {
  208            let file_entry = tree.entry_for_path(rel_path(path)).unwrap().clone();
  209            let file = File::for_entry(file_entry, worktree.clone());
  210            let file_language = project
  211                .read(cx)
  212                .languages()
  213                .load_language_for_file_path(file.path.as_std_path());
  214            let file_language = cx
  215                .background_executor()
  216                .block(file_language)
  217                .expect("Failed to get file language");
  218            let file = file as _;
  219            language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
  220        };
  221
  222        let settings_a = settings_for("a.rs");
  223        let settings_b = settings_for("b/b.rs");
  224        let settings_c = settings_for("c.js");
  225        let settings_readme = settings_for("README.json");
  226
  227        // .editorconfig overrides .zed/settings
  228        assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
  229        assert_eq!(settings_a.hard_tabs, true);
  230        assert_eq!(settings_a.ensure_final_newline_on_save, true);
  231        assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
  232        assert_eq!(settings_a.preferred_line_length, 120);
  233
  234        // .editorconfig in b/ overrides .editorconfig in root
  235        assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
  236
  237        // "indent_size" is not set, so "tab_width" is used
  238        assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
  239
  240        // When max_line_length is "off", default to .zed/settings.json
  241        assert_eq!(settings_b.preferred_line_length, 64);
  242        assert_eq!(settings_c.preferred_line_length, 64);
  243
  244        // README.md should not be affected by .editorconfig's globe "*.rs"
  245        assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
  246    });
  247}
  248
  249#[gpui::test]
  250async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
  251    init_test(cx);
  252    cx.update(|cx| {
  253        GitHostingProviderRegistry::default_global(cx);
  254        git_hosting_providers::init(cx);
  255    });
  256
  257    let fs = FakeFs::new(cx.executor());
  258    let str_path = path!("/dir");
  259    let path = Path::new(str_path);
  260
  261    fs.insert_tree(
  262        path!("/dir"),
  263        json!({
  264            ".zed": {
  265                "settings.json": r#"{
  266                    "git_hosting_providers": [
  267                        {
  268                            "provider": "gitlab",
  269                            "base_url": "https://google.com",
  270                            "name": "foo"
  271                        }
  272                    ]
  273                }"#
  274            },
  275        }),
  276    )
  277    .await;
  278
  279    let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
  280    let (_worktree, _) =
  281        project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
  282    cx.executor().run_until_parked();
  283
  284    cx.update(|cx| {
  285        let provider = GitHostingProviderRegistry::global(cx);
  286        assert!(
  287            provider
  288                .list_hosting_providers()
  289                .into_iter()
  290                .any(|provider| provider.name() == "foo")
  291        );
  292    });
  293
  294    fs.atomic_write(
  295        Path::new(path!("/dir/.zed/settings.json")).to_owned(),
  296        "{}".into(),
  297    )
  298    .await
  299    .unwrap();
  300
  301    cx.run_until_parked();
  302
  303    cx.update(|cx| {
  304        let provider = GitHostingProviderRegistry::global(cx);
  305        assert!(
  306            !provider
  307                .list_hosting_providers()
  308                .into_iter()
  309                .any(|provider| provider.name() == "foo")
  310        );
  311    });
  312}
  313
  314#[gpui::test]
  315async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
  316    init_test(cx);
  317    TaskStore::init(None);
  318
  319    let fs = FakeFs::new(cx.executor());
  320    fs.insert_tree(
  321        path!("/dir"),
  322        json!({
  323            ".zed": {
  324                "settings.json": r#"{ "tab_size": 8 }"#,
  325                "tasks.json": r#"[{
  326                    "label": "cargo check all",
  327                    "command": "cargo",
  328                    "args": ["check", "--all"]
  329                },]"#,
  330            },
  331            "a": {
  332                "a.rs": "fn a() {\n    A\n}"
  333            },
  334            "b": {
  335                ".zed": {
  336                    "settings.json": r#"{ "tab_size": 2 }"#,
  337                    "tasks.json": r#"[{
  338                        "label": "cargo check",
  339                        "command": "cargo",
  340                        "args": ["check"]
  341                    },]"#,
  342                },
  343                "b.rs": "fn b() {\n  B\n}"
  344            }
  345        }),
  346    )
  347    .await;
  348
  349    let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
  350    let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
  351
  352    cx.executor().run_until_parked();
  353    let worktree_id = cx.update(|cx| {
  354        project.update(cx, |project, cx| {
  355            project.worktrees(cx).next().unwrap().read(cx).id()
  356        })
  357    });
  358
  359    let mut task_contexts = TaskContexts::default();
  360    task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
  361    let task_contexts = Arc::new(task_contexts);
  362
  363    let topmost_local_task_source_kind = TaskSourceKind::Worktree {
  364        id: worktree_id,
  365        directory_in_worktree: rel_path(".zed").into(),
  366        id_base: "local worktree tasks from directory \".zed\"".into(),
  367    };
  368
  369    let all_tasks = cx
  370        .update(|cx| {
  371            let tree = worktree.read(cx);
  372
  373            let file_a = File::for_entry(
  374                tree.entry_for_path(rel_path("a/a.rs")).unwrap().clone(),
  375                worktree.clone(),
  376            ) as _;
  377            let settings_a = language_settings(None, Some(&file_a), cx);
  378            let file_b = File::for_entry(
  379                tree.entry_for_path(rel_path("b/b.rs")).unwrap().clone(),
  380                worktree.clone(),
  381            ) as _;
  382            let settings_b = language_settings(None, Some(&file_b), cx);
  383
  384            assert_eq!(settings_a.tab_size.get(), 8);
  385            assert_eq!(settings_b.tab_size.get(), 2);
  386
  387            get_all_tasks(&project, task_contexts.clone(), cx)
  388        })
  389        .await
  390        .into_iter()
  391        .map(|(source_kind, task)| {
  392            let resolved = task.resolved;
  393            (
  394                source_kind,
  395                task.resolved_label,
  396                resolved.args,
  397                resolved.env,
  398            )
  399        })
  400        .collect::<Vec<_>>();
  401    assert_eq!(
  402        all_tasks,
  403        vec![
  404            (
  405                TaskSourceKind::Worktree {
  406                    id: worktree_id,
  407                    directory_in_worktree: rel_path("b/.zed").into(),
  408                    id_base: "local worktree tasks from directory \"b/.zed\"".into()
  409                },
  410                "cargo check".to_string(),
  411                vec!["check".to_string()],
  412                HashMap::default(),
  413            ),
  414            (
  415                topmost_local_task_source_kind.clone(),
  416                "cargo check all".to_string(),
  417                vec!["check".to_string(), "--all".to_string()],
  418                HashMap::default(),
  419            ),
  420        ]
  421    );
  422
  423    let (_, resolved_task) = cx
  424        .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
  425        .await
  426        .into_iter()
  427        .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
  428        .expect("should have one global task");
  429    project.update(cx, |project, cx| {
  430        let task_inventory = project
  431            .task_store
  432            .read(cx)
  433            .task_inventory()
  434            .cloned()
  435            .unwrap();
  436        task_inventory.update(cx, |inventory, _| {
  437            inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
  438            inventory
  439                .update_file_based_tasks(
  440                    TaskSettingsLocation::Global(tasks_file()),
  441                    Some(
  442                        &json!([{
  443                            "label": "cargo check unstable",
  444                            "command": "cargo",
  445                            "args": [
  446                                "check",
  447                                "--all",
  448                                "--all-targets"
  449                            ],
  450                            "env": {
  451                                "RUSTFLAGS": "-Zunstable-options"
  452                            }
  453                        }])
  454                        .to_string(),
  455                    ),
  456                )
  457                .unwrap();
  458        });
  459    });
  460    cx.run_until_parked();
  461
  462    let all_tasks = cx
  463        .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
  464        .await
  465        .into_iter()
  466        .map(|(source_kind, task)| {
  467            let resolved = task.resolved;
  468            (
  469                source_kind,
  470                task.resolved_label,
  471                resolved.args,
  472                resolved.env,
  473            )
  474        })
  475        .collect::<Vec<_>>();
  476    assert_eq!(
  477        all_tasks,
  478        vec![
  479            (
  480                topmost_local_task_source_kind.clone(),
  481                "cargo check all".to_string(),
  482                vec!["check".to_string(), "--all".to_string()],
  483                HashMap::default(),
  484            ),
  485            (
  486                TaskSourceKind::Worktree {
  487                    id: worktree_id,
  488                    directory_in_worktree: rel_path("b/.zed").into(),
  489                    id_base: "local worktree tasks from directory \"b/.zed\"".into()
  490                },
  491                "cargo check".to_string(),
  492                vec!["check".to_string()],
  493                HashMap::default(),
  494            ),
  495            (
  496                TaskSourceKind::AbsPath {
  497                    abs_path: paths::tasks_file().clone(),
  498                    id_base: "global tasks.json".into(),
  499                },
  500                "cargo check unstable".to_string(),
  501                vec![
  502                    "check".to_string(),
  503                    "--all".to_string(),
  504                    "--all-targets".to_string(),
  505                ],
  506                HashMap::from_iter(Some((
  507                    "RUSTFLAGS".to_string(),
  508                    "-Zunstable-options".to_string()
  509                ))),
  510            ),
  511        ]
  512    );
  513}
  514
  515#[gpui::test]
  516async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
  517    init_test(cx);
  518    TaskStore::init(None);
  519
  520    let fs = FakeFs::new(cx.executor());
  521    fs.insert_tree(
  522        path!("/dir"),
  523        json!({
  524            ".zed": {
  525                "tasks.json": r#"[{
  526                    "label": "test worktree root",
  527                    "command": "echo $ZED_WORKTREE_ROOT"
  528                }]"#,
  529            },
  530            "a": {
  531                "a.rs": "fn a() {\n    A\n}"
  532            },
  533        }),
  534    )
  535    .await;
  536
  537    let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
  538    let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
  539
  540    cx.executor().run_until_parked();
  541    let worktree_id = cx.update(|cx| {
  542        project.update(cx, |project, cx| {
  543            project.worktrees(cx).next().unwrap().read(cx).id()
  544        })
  545    });
  546
  547    let active_non_worktree_item_tasks = cx
  548        .update(|cx| {
  549            get_all_tasks(
  550                &project,
  551                Arc::new(TaskContexts {
  552                    active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
  553                    active_worktree_context: None,
  554                    other_worktree_contexts: Vec::new(),
  555                    lsp_task_sources: HashMap::default(),
  556                    latest_selection: None,
  557                }),
  558                cx,
  559            )
  560        })
  561        .await;
  562    assert!(
  563        active_non_worktree_item_tasks.is_empty(),
  564        "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
  565    );
  566
  567    let active_worktree_tasks = cx
  568        .update(|cx| {
  569            get_all_tasks(
  570                &project,
  571                Arc::new(TaskContexts {
  572                    active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
  573                    active_worktree_context: Some((worktree_id, {
  574                        let mut worktree_context = TaskContext::default();
  575                        worktree_context
  576                            .task_variables
  577                            .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
  578                        worktree_context
  579                    })),
  580                    other_worktree_contexts: Vec::new(),
  581                    lsp_task_sources: HashMap::default(),
  582                    latest_selection: None,
  583                }),
  584                cx,
  585            )
  586        })
  587        .await;
  588    assert_eq!(
  589        active_worktree_tasks
  590            .into_iter()
  591            .map(|(source_kind, task)| {
  592                let resolved = task.resolved;
  593                (source_kind, resolved.command.unwrap())
  594            })
  595            .collect::<Vec<_>>(),
  596        vec![(
  597            TaskSourceKind::Worktree {
  598                id: worktree_id,
  599                directory_in_worktree: rel_path(".zed").into(),
  600                id_base: "local worktree tasks from directory \".zed\"".into(),
  601            },
  602            "echo /dir".to_string(),
  603        )]
  604    );
  605}
  606
  607#[gpui::test]
  608async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
  609    cx: &mut gpui::TestAppContext,
  610) {
  611    pub(crate) struct PyprojectTomlManifestProvider;
  612
  613    impl ManifestProvider for PyprojectTomlManifestProvider {
  614        fn name(&self) -> ManifestName {
  615            SharedString::new_static("pyproject.toml").into()
  616        }
  617
  618        fn search(
  619            &self,
  620            ManifestQuery {
  621                path,
  622                depth,
  623                delegate,
  624            }: ManifestQuery,
  625        ) -> Option<Arc<RelPath>> {
  626            for path in path.ancestors().take(depth) {
  627                let p = path.join(rel_path("pyproject.toml"));
  628                if delegate.exists(&p, Some(false)) {
  629                    return Some(path.into());
  630                }
  631            }
  632
  633            None
  634        }
  635    }
  636
  637    init_test(cx);
  638    let fs = FakeFs::new(cx.executor());
  639
  640    fs.insert_tree(
  641        path!("/the-root"),
  642        json!({
  643            ".zed": {
  644                "settings.json": r#"
  645                {
  646                    "languages": {
  647                        "Python": {
  648                            "language_servers": ["ty"]
  649                        }
  650                    }
  651                }"#
  652            },
  653            "project-a": {
  654                ".venv": {},
  655                "file.py": "",
  656                "pyproject.toml": ""
  657            },
  658            "project-b": {
  659                ".venv": {},
  660                "source_file.py":"",
  661                "another_file.py": "",
  662                "pyproject.toml": ""
  663            }
  664        }),
  665    )
  666    .await;
  667    cx.update(|cx| {
  668        ManifestProvidersStore::global(cx).register(Arc::new(PyprojectTomlManifestProvider))
  669    });
  670
  671    let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
  672    let language_registry = project.read_with(cx, |project, _| project.languages().clone());
  673    let _fake_python_server = language_registry.register_fake_lsp(
  674        "Python",
  675        FakeLspAdapter {
  676            name: "ty",
  677            capabilities: lsp::ServerCapabilities {
  678                ..Default::default()
  679            },
  680            ..Default::default()
  681        },
  682    );
  683
  684    language_registry.add(python_lang(fs.clone()));
  685    let (first_buffer, _handle) = project
  686        .update(cx, |project, cx| {
  687            project.open_local_buffer_with_lsp(path!("/the-root/project-a/file.py"), cx)
  688        })
  689        .await
  690        .unwrap();
  691    cx.executor().run_until_parked();
  692    let servers = project.update(cx, |project, cx| {
  693        project.lsp_store.update(cx, |this, cx| {
  694            first_buffer.update(cx, |buffer, cx| {
  695                this.running_language_servers_for_local_buffer(buffer, cx)
  696                    .map(|(adapter, server)| (adapter.clone(), server.clone()))
  697                    .collect::<Vec<_>>()
  698            })
  699        })
  700    });
  701    cx.executor().run_until_parked();
  702    assert_eq!(servers.len(), 1);
  703    let (adapter, server) = servers.into_iter().next().unwrap();
  704    assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
  705    assert_eq!(server.server_id(), LanguageServerId(0));
  706    // `workspace_folders` are set to the rooting point.
  707    assert_eq!(
  708        server.workspace_folders(),
  709        BTreeSet::from_iter(
  710            [Uri::from_file_path(path!("/the-root/project-a")).unwrap()].into_iter()
  711        )
  712    );
  713
  714    let (second_project_buffer, _other_handle) = project
  715        .update(cx, |project, cx| {
  716            project.open_local_buffer_with_lsp(path!("/the-root/project-b/source_file.py"), cx)
  717        })
  718        .await
  719        .unwrap();
  720    cx.executor().run_until_parked();
  721    let servers = project.update(cx, |project, cx| {
  722        project.lsp_store.update(cx, |this, cx| {
  723            second_project_buffer.update(cx, |buffer, cx| {
  724                this.running_language_servers_for_local_buffer(buffer, cx)
  725                    .map(|(adapter, server)| (adapter.clone(), server.clone()))
  726                    .collect::<Vec<_>>()
  727            })
  728        })
  729    });
  730    cx.executor().run_until_parked();
  731    assert_eq!(servers.len(), 1);
  732    let (adapter, server) = servers.into_iter().next().unwrap();
  733    assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
  734    // We're not using venvs at all here, so both folders should fall under the same root.
  735    assert_eq!(server.server_id(), LanguageServerId(0));
  736    // Now, let's select a different toolchain for one of subprojects.
  737
  738    let Toolchains {
  739        toolchains: available_toolchains_for_b,
  740        root_path,
  741        ..
  742    } = project
  743        .update(cx, |this, cx| {
  744            let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
  745            this.available_toolchains(
  746                ProjectPath {
  747                    worktree_id,
  748                    path: rel_path("project-b/source_file.py").into(),
  749                },
  750                LanguageName::new_static("Python"),
  751                cx,
  752            )
  753        })
  754        .await
  755        .expect("A toolchain to be discovered");
  756    assert_eq!(root_path.as_ref(), rel_path("project-b"));
  757    assert_eq!(available_toolchains_for_b.toolchains().len(), 1);
  758    let currently_active_toolchain = project
  759        .update(cx, |this, cx| {
  760            let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
  761            this.active_toolchain(
  762                ProjectPath {
  763                    worktree_id,
  764                    path: rel_path("project-b/source_file.py").into(),
  765                },
  766                LanguageName::new_static("Python"),
  767                cx,
  768            )
  769        })
  770        .await;
  771
  772    assert!(currently_active_toolchain.is_none());
  773    let _ = project
  774        .update(cx, |this, cx| {
  775            let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
  776            this.activate_toolchain(
  777                ProjectPath {
  778                    worktree_id,
  779                    path: root_path,
  780                },
  781                available_toolchains_for_b
  782                    .toolchains
  783                    .into_iter()
  784                    .next()
  785                    .unwrap(),
  786                cx,
  787            )
  788        })
  789        .await
  790        .unwrap();
  791    cx.run_until_parked();
  792    let servers = project.update(cx, |project, cx| {
  793        project.lsp_store.update(cx, |this, cx| {
  794            second_project_buffer.update(cx, |buffer, cx| {
  795                this.running_language_servers_for_local_buffer(buffer, cx)
  796                    .map(|(adapter, server)| (adapter.clone(), server.clone()))
  797                    .collect::<Vec<_>>()
  798            })
  799        })
  800    });
  801    cx.executor().run_until_parked();
  802    assert_eq!(servers.len(), 1);
  803    let (adapter, server) = servers.into_iter().next().unwrap();
  804    assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
  805    // There's a new language server in town.
  806    assert_eq!(server.server_id(), LanguageServerId(1));
  807}
  808
  809#[gpui::test]
  810async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
  811    init_test(cx);
  812
  813    let fs = FakeFs::new(cx.executor());
  814    fs.insert_tree(
  815        path!("/dir"),
  816        json!({
  817            "test.rs": "const A: i32 = 1;",
  818            "test2.rs": "",
  819            "Cargo.toml": "a = 1",
  820            "package.json": "{\"a\": 1}",
  821        }),
  822    )
  823    .await;
  824
  825    let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
  826    let language_registry = project.read_with(cx, |project, _| project.languages().clone());
  827
  828    let mut fake_rust_servers = language_registry.register_fake_lsp(
  829        "Rust",
  830        FakeLspAdapter {
  831            name: "the-rust-language-server",
  832            capabilities: lsp::ServerCapabilities {
  833                completion_provider: Some(lsp::CompletionOptions {
  834                    trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
  835                    ..Default::default()
  836                }),
  837                text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
  838                    lsp::TextDocumentSyncOptions {
  839                        save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
  840                        ..Default::default()
  841                    },
  842                )),
  843                ..Default::default()
  844            },
  845            ..Default::default()
  846        },
  847    );
  848    let mut fake_json_servers = language_registry.register_fake_lsp(
  849        "JSON",
  850        FakeLspAdapter {
  851            name: "the-json-language-server",
  852            capabilities: lsp::ServerCapabilities {
  853                completion_provider: Some(lsp::CompletionOptions {
  854                    trigger_characters: Some(vec![":".to_string()]),
  855                    ..Default::default()
  856                }),
  857                text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
  858                    lsp::TextDocumentSyncOptions {
  859                        save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
  860                        ..Default::default()
  861                    },
  862                )),
  863                ..Default::default()
  864            },
  865            ..Default::default()
  866        },
  867    );
  868
  869    // Open a buffer without an associated language server.
  870    let (toml_buffer, _handle) = project
  871        .update(cx, |project, cx| {
  872            project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
  873        })
  874        .await
  875        .unwrap();
  876
  877    // Open a buffer with an associated language server before the language for it has been loaded.
  878    let (rust_buffer, _handle2) = project
  879        .update(cx, |project, cx| {
  880            project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
  881        })
  882        .await
  883        .unwrap();
  884    rust_buffer.update(cx, |buffer, _| {
  885        assert_eq!(buffer.language().map(|l| l.name()), None);
  886    });
  887
  888    // Now we add the languages to the project, and ensure they get assigned to all
  889    // the relevant open buffers.
  890    language_registry.add(json_lang());
  891    language_registry.add(rust_lang());
  892    cx.executor().run_until_parked();
  893    rust_buffer.update(cx, |buffer, _| {
  894        assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
  895    });
  896
  897    // A server is started up, and it is notified about Rust files.
  898    let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
  899    assert_eq!(
  900        fake_rust_server
  901            .receive_notification::<lsp::notification::DidOpenTextDocument>()
  902            .await
  903            .text_document,
  904        lsp::TextDocumentItem {
  905            uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
  906            version: 0,
  907            text: "const A: i32 = 1;".to_string(),
  908            language_id: "rust".to_string(),
  909        }
  910    );
  911
  912    // The buffer is configured based on the language server's capabilities.
  913    rust_buffer.update(cx, |buffer, _| {
  914        assert_eq!(
  915            buffer
  916                .completion_triggers()
  917                .iter()
  918                .cloned()
  919                .collect::<Vec<_>>(),
  920            &[".".to_string(), "::".to_string()]
  921        );
  922    });
  923    toml_buffer.update(cx, |buffer, _| {
  924        assert!(buffer.completion_triggers().is_empty());
  925    });
  926
  927    // Edit a buffer. The changes are reported to the language server.
  928    rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
  929    assert_eq!(
  930        fake_rust_server
  931            .receive_notification::<lsp::notification::DidChangeTextDocument>()
  932            .await
  933            .text_document,
  934        lsp::VersionedTextDocumentIdentifier::new(
  935            lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
  936            1
  937        )
  938    );
  939
  940    // Open a third buffer with a different associated language server.
  941    let (json_buffer, _json_handle) = project
  942        .update(cx, |project, cx| {
  943            project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
  944        })
  945        .await
  946        .unwrap();
  947
  948    // A json language server is started up and is only notified about the json buffer.
  949    let mut fake_json_server = fake_json_servers.next().await.unwrap();
  950    assert_eq!(
  951        fake_json_server
  952            .receive_notification::<lsp::notification::DidOpenTextDocument>()
  953            .await
  954            .text_document,
  955        lsp::TextDocumentItem {
  956            uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
  957            version: 0,
  958            text: "{\"a\": 1}".to_string(),
  959            language_id: "json".to_string(),
  960        }
  961    );
  962
  963    // This buffer is configured based on the second language server's
  964    // capabilities.
  965    json_buffer.update(cx, |buffer, _| {
  966        assert_eq!(
  967            buffer
  968                .completion_triggers()
  969                .iter()
  970                .cloned()
  971                .collect::<Vec<_>>(),
  972            &[":".to_string()]
  973        );
  974    });
  975
  976    // When opening another buffer whose language server is already running,
  977    // it is also configured based on the existing language server's capabilities.
  978    let (rust_buffer2, _handle4) = project
  979        .update(cx, |project, cx| {
  980            project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
  981        })
  982        .await
  983        .unwrap();
  984    rust_buffer2.update(cx, |buffer, _| {
  985        assert_eq!(
  986            buffer
  987                .completion_triggers()
  988                .iter()
  989                .cloned()
  990                .collect::<Vec<_>>(),
  991            &[".".to_string(), "::".to_string()]
  992        );
  993    });
  994
  995    // Changes are reported only to servers matching the buffer's language.
  996    toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
  997    rust_buffer2.update(cx, |buffer, cx| {
  998        buffer.edit([(0..0, "let x = 1;")], None, cx)
  999    });
 1000    assert_eq!(
 1001        fake_rust_server
 1002            .receive_notification::<lsp::notification::DidChangeTextDocument>()
 1003            .await
 1004            .text_document,
 1005        lsp::VersionedTextDocumentIdentifier::new(
 1006            lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap(),
 1007            1
 1008        )
 1009    );
 1010
 1011    // Save notifications are reported to all servers.
 1012    project
 1013        .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
 1014        .await
 1015        .unwrap();
 1016    assert_eq!(
 1017        fake_rust_server
 1018            .receive_notification::<lsp::notification::DidSaveTextDocument>()
 1019            .await
 1020            .text_document,
 1021        lsp::TextDocumentIdentifier::new(
 1022            lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
 1023        )
 1024    );
 1025    assert_eq!(
 1026        fake_json_server
 1027            .receive_notification::<lsp::notification::DidSaveTextDocument>()
 1028            .await
 1029            .text_document,
 1030        lsp::TextDocumentIdentifier::new(
 1031            lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
 1032        )
 1033    );
 1034
 1035    // Renames are reported only to servers matching the buffer's language.
 1036    fs.rename(
 1037        Path::new(path!("/dir/test2.rs")),
 1038        Path::new(path!("/dir/test3.rs")),
 1039        Default::default(),
 1040    )
 1041    .await
 1042    .unwrap();
 1043    assert_eq!(
 1044        fake_rust_server
 1045            .receive_notification::<lsp::notification::DidCloseTextDocument>()
 1046            .await
 1047            .text_document,
 1048        lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap()),
 1049    );
 1050    assert_eq!(
 1051        fake_rust_server
 1052            .receive_notification::<lsp::notification::DidOpenTextDocument>()
 1053            .await
 1054            .text_document,
 1055        lsp::TextDocumentItem {
 1056            uri: lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap(),
 1057            version: 0,
 1058            text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
 1059            language_id: "rust".to_string(),
 1060        },
 1061    );
 1062
 1063    rust_buffer2.update(cx, |buffer, cx| {
 1064        buffer.update_diagnostics(
 1065            LanguageServerId(0),
 1066            DiagnosticSet::from_sorted_entries(
 1067                vec![DiagnosticEntry {
 1068                    diagnostic: Default::default(),
 1069                    range: Anchor::MIN..Anchor::MAX,
 1070                }],
 1071                &buffer.snapshot(),
 1072            ),
 1073            cx,
 1074        );
 1075        assert_eq!(
 1076            buffer
 1077                .snapshot()
 1078                .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
 1079                .count(),
 1080            1
 1081        );
 1082    });
 1083
 1084    // When the rename changes the extension of the file, the buffer gets closed on the old
 1085    // language server and gets opened on the new one.
 1086    fs.rename(
 1087        Path::new(path!("/dir/test3.rs")),
 1088        Path::new(path!("/dir/test3.json")),
 1089        Default::default(),
 1090    )
 1091    .await
 1092    .unwrap();
 1093    assert_eq!(
 1094        fake_rust_server
 1095            .receive_notification::<lsp::notification::DidCloseTextDocument>()
 1096            .await
 1097            .text_document,
 1098        lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap()),
 1099    );
 1100    assert_eq!(
 1101        fake_json_server
 1102            .receive_notification::<lsp::notification::DidOpenTextDocument>()
 1103            .await
 1104            .text_document,
 1105        lsp::TextDocumentItem {
 1106            uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
 1107            version: 0,
 1108            text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
 1109            language_id: "json".to_string(),
 1110        },
 1111    );
 1112
 1113    // We clear the diagnostics, since the language has changed.
 1114    rust_buffer2.update(cx, |buffer, _| {
 1115        assert_eq!(
 1116            buffer
 1117                .snapshot()
 1118                .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
 1119                .count(),
 1120            0
 1121        );
 1122    });
 1123
 1124    // The renamed file's version resets after changing language server.
 1125    rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
 1126    assert_eq!(
 1127        fake_json_server
 1128            .receive_notification::<lsp::notification::DidChangeTextDocument>()
 1129            .await
 1130            .text_document,
 1131        lsp::VersionedTextDocumentIdentifier::new(
 1132            lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
 1133            1
 1134        )
 1135    );
 1136
 1137    // Restart language servers
 1138    project.update(cx, |project, cx| {
 1139        project.restart_language_servers_for_buffers(
 1140            vec![rust_buffer.clone(), json_buffer.clone()],
 1141            HashSet::default(),
 1142            cx,
 1143        );
 1144    });
 1145
 1146    let mut rust_shutdown_requests = fake_rust_server
 1147        .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
 1148    let mut json_shutdown_requests = fake_json_server
 1149        .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
 1150    futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
 1151
 1152    let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
 1153    let mut fake_json_server = fake_json_servers.next().await.unwrap();
 1154
 1155    // Ensure rust document is reopened in new rust language server
 1156    assert_eq!(
 1157        fake_rust_server
 1158            .receive_notification::<lsp::notification::DidOpenTextDocument>()
 1159            .await
 1160            .text_document,
 1161        lsp::TextDocumentItem {
 1162            uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
 1163            version: 0,
 1164            text: rust_buffer.update(cx, |buffer, _| buffer.text()),
 1165            language_id: "rust".to_string(),
 1166        }
 1167    );
 1168
 1169    // Ensure json documents are reopened in new json language server
 1170    assert_set_eq!(
 1171        [
 1172            fake_json_server
 1173                .receive_notification::<lsp::notification::DidOpenTextDocument>()
 1174                .await
 1175                .text_document,
 1176            fake_json_server
 1177                .receive_notification::<lsp::notification::DidOpenTextDocument>()
 1178                .await
 1179                .text_document,
 1180        ],
 1181        [
 1182            lsp::TextDocumentItem {
 1183                uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
 1184                version: 0,
 1185                text: json_buffer.update(cx, |buffer, _| buffer.text()),
 1186                language_id: "json".to_string(),
 1187            },
 1188            lsp::TextDocumentItem {
 1189                uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
 1190                version: 0,
 1191                text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
 1192                language_id: "json".to_string(),
 1193            }
 1194        ]
 1195    );
 1196
 1197    // Close notifications are reported only to servers matching the buffer's language.
 1198    cx.update(|_| drop(_json_handle));
 1199    let close_message = lsp::DidCloseTextDocumentParams {
 1200        text_document: lsp::TextDocumentIdentifier::new(
 1201            lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
 1202        ),
 1203    };
 1204    assert_eq!(
 1205        fake_json_server
 1206            .receive_notification::<lsp::notification::DidCloseTextDocument>()
 1207            .await,
 1208        close_message,
 1209    );
 1210}
 1211
 1212#[gpui::test]
 1213async fn test_language_server_relative_path(cx: &mut gpui::TestAppContext) {
 1214    init_test(cx);
 1215
 1216    let settings_json_contents = json!({
 1217        "languages": {
 1218            "Rust": {
 1219                "language_servers": ["my_fake_lsp", "lsp_on_path"]
 1220            }
 1221        },
 1222        "lsp": {
 1223            "my_fake_lsp": {
 1224                "binary": {
 1225                    // file exists, so this is treated as a relative path
 1226                    "path": path!(".relative_path/to/my_fake_lsp_binary.exe").to_string(),
 1227                }
 1228            },
 1229            "lsp_on_path": {
 1230                "binary": {
 1231                    // file doesn't exist, so it will fall back on PATH env var
 1232                    "path": path!("lsp_on_path.exe").to_string(),
 1233                }
 1234            }
 1235        },
 1236    });
 1237
 1238    let fs = FakeFs::new(cx.executor());
 1239    fs.insert_tree(
 1240        path!("/the-root"),
 1241        json!({
 1242            ".zed": {
 1243                "settings.json": settings_json_contents.to_string(),
 1244            },
 1245            ".relative_path": {
 1246                "to": {
 1247                    "my_fake_lsp.exe": "",
 1248                },
 1249            },
 1250            "src": {
 1251                "main.rs": "",
 1252            }
 1253        }),
 1254    )
 1255    .await;
 1256
 1257    let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
 1258    let language_registry = project.read_with(cx, |project, _| project.languages().clone());
 1259    language_registry.add(rust_lang());
 1260
 1261    let mut my_fake_lsp = language_registry.register_fake_lsp(
 1262        "Rust",
 1263        FakeLspAdapter {
 1264            name: "my_fake_lsp",
 1265            ..Default::default()
 1266        },
 1267    );
 1268    let mut lsp_on_path = language_registry.register_fake_lsp(
 1269        "Rust",
 1270        FakeLspAdapter {
 1271            name: "lsp_on_path",
 1272            ..Default::default()
 1273        },
 1274    );
 1275
 1276    cx.run_until_parked();
 1277
 1278    // Start the language server by opening a buffer with a compatible file extension.
 1279    project
 1280        .update(cx, |project, cx| {
 1281            project.open_local_buffer_with_lsp(path!("/the-root/src/main.rs"), cx)
 1282        })
 1283        .await
 1284        .unwrap();
 1285
 1286    let lsp_path = my_fake_lsp.next().await.unwrap().binary.path;
 1287    assert_eq!(
 1288        lsp_path.to_string_lossy(),
 1289        path!("/the-root/.relative_path/to/my_fake_lsp_binary.exe"),
 1290    );
 1291
 1292    let lsp_path = lsp_on_path.next().await.unwrap().binary.path;
 1293    assert_eq!(lsp_path.to_string_lossy(), path!("lsp_on_path.exe"));
 1294}
 1295
 1296#[gpui::test]
 1297async fn test_language_server_tilde_path(cx: &mut gpui::TestAppContext) {
 1298    init_test(cx);
 1299
 1300    let settings_json_contents = json!({
 1301        "languages": {
 1302            "Rust": {
 1303                "language_servers": ["tilde_lsp"]
 1304            }
 1305        },
 1306        "lsp": {
 1307            "tilde_lsp": {
 1308                "binary": {
 1309                    "path": "~/.local/bin/rust-analyzer",
 1310                }
 1311            }
 1312        },
 1313    });
 1314
 1315    let fs = FakeFs::new(cx.executor());
 1316    fs.insert_tree(
 1317        path!("/root"),
 1318        json!({
 1319            ".zed": {
 1320                "settings.json": settings_json_contents.to_string(),
 1321            },
 1322            "src": {
 1323                "main.rs": "fn main() {}",
 1324            }
 1325        }),
 1326    )
 1327    .await;
 1328
 1329    let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
 1330    let language_registry = project.read_with(cx, |project, _| project.languages().clone());
 1331    language_registry.add(rust_lang());
 1332
 1333    let mut tilde_lsp = language_registry.register_fake_lsp(
 1334        "Rust",
 1335        FakeLspAdapter {
 1336            name: "tilde_lsp",
 1337            ..Default::default()
 1338        },
 1339    );
 1340    cx.run_until_parked();
 1341
 1342    project
 1343        .update(cx, |project, cx| {
 1344            project.open_local_buffer_with_lsp(path!("/root/src/main.rs"), cx)
 1345        })
 1346        .await
 1347        .unwrap();
 1348
 1349    let lsp_path = tilde_lsp.next().await.unwrap().binary.path;
 1350    let expected_path = paths::home_dir().join(".local/bin/rust-analyzer");
 1351    assert_eq!(
 1352        lsp_path, expected_path,
 1353        "Tilde path should expand to home directory"
 1354    );
 1355}
 1356
 1357#[gpui::test]
 1358async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
 1359    init_test(cx);
 1360
 1361    let fs = FakeFs::new(cx.executor());
 1362    fs.insert_tree(
 1363        path!("/the-root"),
 1364        json!({
 1365            ".gitignore": "target\n",
 1366            "Cargo.lock": "",
 1367            "src": {
 1368                "a.rs": "",
 1369                "b.rs": "",
 1370            },
 1371            "target": {
 1372                "x": {
 1373                    "out": {
 1374                        "x.rs": ""
 1375                    }
 1376                },
 1377                "y": {
 1378                    "out": {
 1379                        "y.rs": "",
 1380                    }
 1381                },
 1382                "z": {
 1383                    "out": {
 1384                        "z.rs": ""
 1385                    }
 1386                }
 1387            }
 1388        }),
 1389    )
 1390    .await;
 1391    fs.insert_tree(
 1392        path!("/the-registry"),
 1393        json!({
 1394            "dep1": {
 1395                "src": {
 1396                    "dep1.rs": "",
 1397                }
 1398            },
 1399            "dep2": {
 1400                "src": {
 1401                    "dep2.rs": "",
 1402                }
 1403            },
 1404        }),
 1405    )
 1406    .await;
 1407    fs.insert_tree(
 1408        path!("/the/stdlib"),
 1409        json!({
 1410            "LICENSE": "",
 1411            "src": {
 1412                "string.rs": "",
 1413            }
 1414        }),
 1415    )
 1416    .await;
 1417
 1418    let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
 1419    let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
 1420        (project.languages().clone(), project.lsp_store())
 1421    });
 1422    language_registry.add(rust_lang());
 1423    let mut fake_servers = language_registry.register_fake_lsp(
 1424        "Rust",
 1425        FakeLspAdapter {
 1426            name: "the-language-server",
 1427            ..Default::default()
 1428        },
 1429    );
 1430
 1431    cx.executor().run_until_parked();
 1432
 1433    // Start the language server by opening a buffer with a compatible file extension.
 1434    project
 1435        .update(cx, |project, cx| {
 1436            project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
 1437        })
 1438        .await
 1439        .unwrap();
 1440
 1441    // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
 1442    project.update(cx, |project, cx| {
 1443        let worktree = project.worktrees(cx).next().unwrap();
 1444        assert_eq!(
 1445            worktree
 1446                .read(cx)
 1447                .snapshot()
 1448                .entries(true, 0)
 1449                .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
 1450                .collect::<Vec<_>>(),
 1451            &[
 1452                ("", false),
 1453                (".gitignore", false),
 1454                ("Cargo.lock", false),
 1455                ("src", false),
 1456                ("src/a.rs", false),
 1457                ("src/b.rs", false),
 1458                ("target", true),
 1459            ]
 1460        );
 1461    });
 1462
 1463    let prev_read_dir_count = fs.read_dir_call_count();
 1464
 1465    let fake_server = fake_servers.next().await.unwrap();
 1466    let server_id = lsp_store.read_with(cx, |lsp_store, _| {
 1467        let (id, _) = lsp_store.language_server_statuses().next().unwrap();
 1468        id
 1469    });
 1470
 1471    // Simulate jumping to a definition in a dependency outside of the worktree.
 1472    let _out_of_worktree_buffer = project
 1473        .update(cx, |project, cx| {
 1474            project.open_local_buffer_via_lsp(
 1475                lsp::Uri::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
 1476                server_id,
 1477                cx,
 1478            )
 1479        })
 1480        .await
 1481        .unwrap();
 1482
 1483    // Keep track of the FS events reported to the language server.
 1484    let file_changes = Arc::new(Mutex::new(Vec::new()));
 1485    fake_server
 1486        .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
 1487            registrations: vec![lsp::Registration {
 1488                id: Default::default(),
 1489                method: "workspace/didChangeWatchedFiles".to_string(),
 1490                register_options: serde_json::to_value(
 1491                    lsp::DidChangeWatchedFilesRegistrationOptions {
 1492                        watchers: vec![
 1493                            lsp::FileSystemWatcher {
 1494                                glob_pattern: lsp::GlobPattern::String(
 1495                                    path!("/the-root/Cargo.toml").to_string(),
 1496                                ),
 1497                                kind: None,
 1498                            },
 1499                            lsp::FileSystemWatcher {
 1500                                glob_pattern: lsp::GlobPattern::String(
 1501                                    path!("/the-root/src/*.{rs,c}").to_string(),
 1502                                ),
 1503                                kind: None,
 1504                            },
 1505                            lsp::FileSystemWatcher {
 1506                                glob_pattern: lsp::GlobPattern::String(
 1507                                    path!("/the-root/target/y/**/*.rs").to_string(),
 1508                                ),
 1509                                kind: None,
 1510                            },
 1511                            lsp::FileSystemWatcher {
 1512                                glob_pattern: lsp::GlobPattern::String(
 1513                                    path!("/the/stdlib/src/**/*.rs").to_string(),
 1514                                ),
 1515                                kind: None,
 1516                            },
 1517                            lsp::FileSystemWatcher {
 1518                                glob_pattern: lsp::GlobPattern::String(
 1519                                    path!("**/Cargo.lock").to_string(),
 1520                                ),
 1521                                kind: None,
 1522                            },
 1523                        ],
 1524                    },
 1525                )
 1526                .ok(),
 1527            }],
 1528        })
 1529        .await
 1530        .into_response()
 1531        .unwrap();
 1532    fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
 1533        let file_changes = file_changes.clone();
 1534        move |params, _| {
 1535            let mut file_changes = file_changes.lock();
 1536            file_changes.extend(params.changes);
 1537            file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
 1538        }
 1539    });
 1540
 1541    cx.executor().run_until_parked();
 1542    assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
 1543    assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
 1544
 1545    let mut new_watched_paths = fs.watched_paths();
 1546    new_watched_paths.retain(|path| {
 1547        !path.starts_with(config_dir()) && !path.starts_with(global_gitignore_path().unwrap())
 1548    });
 1549    assert_eq!(
 1550        &new_watched_paths,
 1551        &[
 1552            Path::new(path!("/the-root")),
 1553            Path::new(path!("/the-registry/dep1/src/dep1.rs")),
 1554            Path::new(path!("/the/stdlib/src"))
 1555        ]
 1556    );
 1557
 1558    // Now the language server has asked us to watch an ignored directory path,
 1559    // so we recursively load it.
 1560    project.update(cx, |project, cx| {
 1561        let worktree = project.visible_worktrees(cx).next().unwrap();
 1562        assert_eq!(
 1563            worktree
 1564                .read(cx)
 1565                .snapshot()
 1566                .entries(true, 0)
 1567                .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
 1568                .collect::<Vec<_>>(),
 1569            &[
 1570                ("", false),
 1571                (".gitignore", false),
 1572                ("Cargo.lock", false),
 1573                ("src", false),
 1574                ("src/a.rs", false),
 1575                ("src/b.rs", false),
 1576                ("target", true),
 1577                ("target/x", true),
 1578                ("target/y", true),
 1579                ("target/y/out", true),
 1580                ("target/y/out/y.rs", true),
 1581                ("target/z", true),
 1582            ]
 1583        );
 1584    });
 1585
 1586    // Perform some file system mutations, two of which match the watched patterns,
 1587    // and one of which does not.
 1588    fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
 1589        .await
 1590        .unwrap();
 1591    fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
 1592        .await
 1593        .unwrap();
 1594    fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
 1595        .await
 1596        .unwrap();
 1597    fs.create_file(
 1598        path!("/the-root/target/x/out/x2.rs").as_ref(),
 1599        Default::default(),
 1600    )
 1601    .await
 1602    .unwrap();
 1603    fs.create_file(
 1604        path!("/the-root/target/y/out/y2.rs").as_ref(),
 1605        Default::default(),
 1606    )
 1607    .await
 1608    .unwrap();
 1609    fs.save(
 1610        path!("/the-root/Cargo.lock").as_ref(),
 1611        &"".into(),
 1612        Default::default(),
 1613    )
 1614    .await
 1615    .unwrap();
 1616    fs.save(
 1617        path!("/the-stdlib/LICENSE").as_ref(),
 1618        &"".into(),
 1619        Default::default(),
 1620    )
 1621    .await
 1622    .unwrap();
 1623    fs.save(
 1624        path!("/the/stdlib/src/string.rs").as_ref(),
 1625        &"".into(),
 1626        Default::default(),
 1627    )
 1628    .await
 1629    .unwrap();
 1630
 1631    // The language server receives events for the FS mutations that match its watch patterns.
 1632    cx.executor().run_until_parked();
 1633    assert_eq!(
 1634        &*file_changes.lock(),
 1635        &[
 1636            lsp::FileEvent {
 1637                uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
 1638                typ: lsp::FileChangeType::CHANGED,
 1639            },
 1640            lsp::FileEvent {
 1641                uri: lsp::Uri::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
 1642                typ: lsp::FileChangeType::DELETED,
 1643            },
 1644            lsp::FileEvent {
 1645                uri: lsp::Uri::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
 1646                typ: lsp::FileChangeType::CREATED,
 1647            },
 1648            lsp::FileEvent {
 1649                uri: lsp::Uri::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
 1650                typ: lsp::FileChangeType::CREATED,
 1651            },
 1652            lsp::FileEvent {
 1653                uri: lsp::Uri::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
 1654                typ: lsp::FileChangeType::CHANGED,
 1655            },
 1656        ]
 1657    );
 1658}
 1659
 1660#[gpui::test]
 1661async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
 1662    init_test(cx);
 1663
 1664    let fs = FakeFs::new(cx.executor());
 1665    fs.insert_tree(
 1666        path!("/dir"),
 1667        json!({
 1668            "a.rs": "let a = 1;",
 1669            "b.rs": "let b = 2;"
 1670        }),
 1671    )
 1672    .await;
 1673
 1674    let project = Project::test(
 1675        fs,
 1676        [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
 1677        cx,
 1678    )
 1679    .await;
 1680    let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
 1681
 1682    let buffer_a = project
 1683        .update(cx, |project, cx| {
 1684            project.open_local_buffer(path!("/dir/a.rs"), cx)
 1685        })
 1686        .await
 1687        .unwrap();
 1688    let buffer_b = project
 1689        .update(cx, |project, cx| {
 1690            project.open_local_buffer(path!("/dir/b.rs"), cx)
 1691        })
 1692        .await
 1693        .unwrap();
 1694
 1695    lsp_store.update(cx, |lsp_store, cx| {
 1696        lsp_store
 1697            .update_diagnostics(
 1698                LanguageServerId(0),
 1699                lsp::PublishDiagnosticsParams {
 1700                    uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
 1701                    version: None,
 1702                    diagnostics: vec![lsp::Diagnostic {
 1703                        range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
 1704                        severity: Some(lsp::DiagnosticSeverity::ERROR),
 1705                        message: "error 1".to_string(),
 1706                        ..Default::default()
 1707                    }],
 1708                },
 1709                None,
 1710                DiagnosticSourceKind::Pushed,
 1711                &[],
 1712                cx,
 1713            )
 1714            .unwrap();
 1715        lsp_store
 1716            .update_diagnostics(
 1717                LanguageServerId(0),
 1718                lsp::PublishDiagnosticsParams {
 1719                    uri: Uri::from_file_path(path!("/dir/b.rs")).unwrap(),
 1720                    version: None,
 1721                    diagnostics: vec![lsp::Diagnostic {
 1722                        range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
 1723                        severity: Some(DiagnosticSeverity::WARNING),
 1724                        message: "error 2".to_string(),
 1725                        ..Default::default()
 1726                    }],
 1727                },
 1728                None,
 1729                DiagnosticSourceKind::Pushed,
 1730                &[],
 1731                cx,
 1732            )
 1733            .unwrap();
 1734    });
 1735
 1736    buffer_a.update(cx, |buffer, _| {
 1737        let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
 1738        assert_eq!(
 1739            chunks
 1740                .iter()
 1741                .map(|(s, d)| (s.as_str(), *d))
 1742                .collect::<Vec<_>>(),
 1743            &[
 1744                ("let ", None),
 1745                ("a", Some(DiagnosticSeverity::ERROR)),
 1746                (" = 1;", None),
 1747            ]
 1748        );
 1749    });
 1750    buffer_b.update(cx, |buffer, _| {
 1751        let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
 1752        assert_eq!(
 1753            chunks
 1754                .iter()
 1755                .map(|(s, d)| (s.as_str(), *d))
 1756                .collect::<Vec<_>>(),
 1757            &[
 1758                ("let ", None),
 1759                ("b", Some(DiagnosticSeverity::WARNING)),
 1760                (" = 2;", None),
 1761            ]
 1762        );
 1763    });
 1764}
 1765
 1766#[gpui::test]
 1767async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
 1768    init_test(cx);
 1769
 1770    let fs = FakeFs::new(cx.executor());
 1771    fs.insert_tree(
 1772        path!("/root"),
 1773        json!({
 1774            "dir": {
 1775                ".git": {
 1776                    "HEAD": "ref: refs/heads/main",
 1777                },
 1778                ".gitignore": "b.rs",
 1779                "a.rs": "let a = 1;",
 1780                "b.rs": "let b = 2;",
 1781            },
 1782            "other.rs": "let b = c;"
 1783        }),
 1784    )
 1785    .await;
 1786
 1787    let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
 1788    let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
 1789    let (worktree, _) = project
 1790        .update(cx, |project, cx| {
 1791            project.find_or_create_worktree(path!("/root/dir"), true, cx)
 1792        })
 1793        .await
 1794        .unwrap();
 1795    let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
 1796
 1797    let (worktree, _) = project
 1798        .update(cx, |project, cx| {
 1799            project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
 1800        })
 1801        .await
 1802        .unwrap();
 1803    let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
 1804
 1805    let server_id = LanguageServerId(0);
 1806    lsp_store.update(cx, |lsp_store, cx| {
 1807        lsp_store
 1808            .update_diagnostics(
 1809                server_id,
 1810                lsp::PublishDiagnosticsParams {
 1811                    uri: Uri::from_file_path(path!("/root/dir/b.rs")).unwrap(),
 1812                    version: None,
 1813                    diagnostics: vec![lsp::Diagnostic {
 1814                        range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
 1815                        severity: Some(lsp::DiagnosticSeverity::ERROR),
 1816                        message: "unused variable 'b'".to_string(),
 1817                        ..Default::default()
 1818                    }],
 1819                },
 1820                None,
 1821                DiagnosticSourceKind::Pushed,
 1822                &[],
 1823                cx,
 1824            )
 1825            .unwrap();
 1826        lsp_store
 1827            .update_diagnostics(
 1828                server_id,
 1829                lsp::PublishDiagnosticsParams {
 1830                    uri: Uri::from_file_path(path!("/root/other.rs")).unwrap(),
 1831                    version: None,
 1832                    diagnostics: vec![lsp::Diagnostic {
 1833                        range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
 1834                        severity: Some(lsp::DiagnosticSeverity::ERROR),
 1835                        message: "unknown variable 'c'".to_string(),
 1836                        ..Default::default()
 1837                    }],
 1838                },
 1839                None,
 1840                DiagnosticSourceKind::Pushed,
 1841                &[],
 1842                cx,
 1843            )
 1844            .unwrap();
 1845    });
 1846
 1847    let main_ignored_buffer = project
 1848        .update(cx, |project, cx| {
 1849            project.open_buffer((main_worktree_id, rel_path("b.rs")), cx)
 1850        })
 1851        .await
 1852        .unwrap();
 1853    main_ignored_buffer.update(cx, |buffer, _| {
 1854        let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
 1855        assert_eq!(
 1856            chunks
 1857                .iter()
 1858                .map(|(s, d)| (s.as_str(), *d))
 1859                .collect::<Vec<_>>(),
 1860            &[
 1861                ("let ", None),
 1862                ("b", Some(DiagnosticSeverity::ERROR)),
 1863                (" = 2;", None),
 1864            ],
 1865            "Gigitnored buffers should still get in-buffer diagnostics",
 1866        );
 1867    });
 1868    let other_buffer = project
 1869        .update(cx, |project, cx| {
 1870            project.open_buffer((other_worktree_id, rel_path("")), cx)
 1871        })
 1872        .await
 1873        .unwrap();
 1874    other_buffer.update(cx, |buffer, _| {
 1875        let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
 1876        assert_eq!(
 1877            chunks
 1878                .iter()
 1879                .map(|(s, d)| (s.as_str(), *d))
 1880                .collect::<Vec<_>>(),
 1881            &[
 1882                ("let b = ", None),
 1883                ("c", Some(DiagnosticSeverity::ERROR)),
 1884                (";", None),
 1885            ],
 1886            "Buffers from hidden projects should still get in-buffer diagnostics"
 1887        );
 1888    });
 1889
 1890    project.update(cx, |project, cx| {
 1891        assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
 1892        assert_eq!(
 1893            project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
 1894            vec![(
 1895                ProjectPath {
 1896                    worktree_id: main_worktree_id,
 1897                    path: rel_path("b.rs").into(),
 1898                },
 1899                server_id,
 1900                DiagnosticSummary {
 1901                    error_count: 1,
 1902                    warning_count: 0,
 1903                }
 1904            )]
 1905        );
 1906        assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
 1907        assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
 1908    });
 1909}
 1910
 1911#[gpui::test]
 1912async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
 1913    init_test(cx);
 1914
 1915    let progress_token = "the-progress-token";
 1916
 1917    let fs = FakeFs::new(cx.executor());
 1918    fs.insert_tree(
 1919        path!("/dir"),
 1920        json!({
 1921            "a.rs": "fn a() { A }",
 1922            "b.rs": "const y: i32 = 1",
 1923        }),
 1924    )
 1925    .await;
 1926
 1927    let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
 1928    let language_registry = project.read_with(cx, |project, _| project.languages().clone());
 1929
 1930    language_registry.add(rust_lang());
 1931    let mut fake_servers = language_registry.register_fake_lsp(
 1932        "Rust",
 1933        FakeLspAdapter {
 1934            disk_based_diagnostics_progress_token: Some(progress_token.into()),
 1935            disk_based_diagnostics_sources: vec!["disk".into()],
 1936            ..Default::default()
 1937        },
 1938    );
 1939
 1940    let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
 1941
 1942    // Cause worktree to start the fake language server
 1943    let _ = project
 1944        .update(cx, |project, cx| {
 1945            project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
 1946        })
 1947        .await
 1948        .unwrap();
 1949
 1950    let mut events = cx.events(&project);
 1951
 1952    let fake_server = fake_servers.next().await.unwrap();
 1953    assert_eq!(
 1954        events.next().await.unwrap(),
 1955        Event::LanguageServerAdded(
 1956            LanguageServerId(0),
 1957            fake_server.server.name(),
 1958            Some(worktree_id)
 1959        ),
 1960    );
 1961
 1962    fake_server
 1963        .start_progress(format!("{}/0", progress_token))
 1964        .await;
 1965    assert_eq!(
 1966        events.next().await.unwrap(),
 1967        Event::DiskBasedDiagnosticsStarted {
 1968            language_server_id: LanguageServerId(0),
 1969        }
 1970    );
 1971
 1972    fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
 1973        uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
 1974        version: None,
 1975        diagnostics: vec![lsp::Diagnostic {
 1976            range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
 1977            severity: Some(lsp::DiagnosticSeverity::ERROR),
 1978            message: "undefined variable 'A'".to_string(),
 1979            ..Default::default()
 1980        }],
 1981    });
 1982    assert_eq!(
 1983        events.next().await.unwrap(),
 1984        Event::DiagnosticsUpdated {
 1985            language_server_id: LanguageServerId(0),
 1986            paths: vec![(worktree_id, rel_path("a.rs")).into()],
 1987        }
 1988    );
 1989
 1990    fake_server.end_progress(format!("{}/0", progress_token));
 1991    assert_eq!(
 1992        events.next().await.unwrap(),
 1993        Event::DiskBasedDiagnosticsFinished {
 1994            language_server_id: LanguageServerId(0)
 1995        }
 1996    );
 1997
 1998    let buffer = project
 1999        .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
 2000        .await
 2001        .unwrap();
 2002
 2003    buffer.update(cx, |buffer, _| {
 2004        let snapshot = buffer.snapshot();
 2005        let diagnostics = snapshot
 2006            .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
 2007            .collect::<Vec<_>>();
 2008        assert_eq!(
 2009            diagnostics,
 2010            &[DiagnosticEntryRef {
 2011                range: Point::new(0, 9)..Point::new(0, 10),
 2012                diagnostic: &Diagnostic {
 2013                    severity: lsp::DiagnosticSeverity::ERROR,
 2014                    message: "undefined variable 'A'".to_string(),
 2015                    group_id: 0,
 2016                    is_primary: true,
 2017                    source_kind: DiagnosticSourceKind::Pushed,
 2018                    ..Diagnostic::default()
 2019                }
 2020            }]
 2021        )
 2022    });
 2023
 2024    // Ensure publishing empty diagnostics twice only results in one update event.
 2025    fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
 2026        uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
 2027        version: None,
 2028        diagnostics: Default::default(),
 2029    });
 2030    assert_eq!(
 2031        events.next().await.unwrap(),
 2032        Event::DiagnosticsUpdated {
 2033            language_server_id: LanguageServerId(0),
 2034            paths: vec![(worktree_id, rel_path("a.rs")).into()],
 2035        }
 2036    );
 2037
 2038    fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
 2039        uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
 2040        version: None,
 2041        diagnostics: Default::default(),
 2042    });
 2043    cx.executor().run_until_parked();
 2044    assert_eq!(futures::poll!(events.next()), Poll::Pending);
 2045}
 2046
 2047#[gpui::test]
 2048async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
 2049    init_test(cx);
 2050
 2051    let progress_token = "the-progress-token";
 2052
 2053    let fs = FakeFs::new(cx.executor());
 2054    fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
 2055
 2056    let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
 2057
 2058    let language_registry = project.read_with(cx, |project, _| project.languages().clone());
 2059    language_registry.add(rust_lang());
 2060    let mut fake_servers = language_registry.register_fake_lsp(
 2061        "Rust",
 2062        FakeLspAdapter {
 2063            name: "the-language-server",
 2064            disk_based_diagnostics_sources: vec!["disk".into()],
 2065            disk_based_diagnostics_progress_token: Some(progress_token.into()),
 2066            ..FakeLspAdapter::default()
 2067        },
 2068    );
 2069
 2070    let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
 2071
 2072    let (buffer, _handle) = project
 2073        .update(cx, |project, cx| {
 2074            project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
 2075        })
 2076        .await
 2077        .unwrap();
 2078    let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
 2079    // Simulate diagnostics starting to update.
 2080    let fake_server = fake_servers.next().await.unwrap();
 2081    fake_server.start_progress(progress_token).await;
 2082
 2083    // Restart the server before the diagnostics finish updating.
 2084    project.update(cx, |project, cx| {
 2085        project.restart_language_servers_for_buffers(vec![buffer], HashSet::default(), cx);
 2086    });
 2087    let mut events = cx.events(&project);
 2088
 2089    // Simulate the newly started server sending more diagnostics.
 2090    let fake_server = fake_servers.next().await.unwrap();
 2091    assert_eq!(
 2092        events.next().await.unwrap(),
 2093        Event::LanguageServerRemoved(LanguageServerId(0))
 2094    );
 2095    assert_eq!(
 2096        events.next().await.unwrap(),
 2097        Event::LanguageServerAdded(
 2098            LanguageServerId(1),
 2099            fake_server.server.name(),
 2100            Some(worktree_id)
 2101        )
 2102    );
 2103    fake_server.start_progress(progress_token).await;
 2104    assert_eq!(
 2105        events.next().await.unwrap(),
 2106        Event::LanguageServerBufferRegistered {
 2107            server_id: LanguageServerId(1),
 2108            buffer_id,
 2109            buffer_abs_path: PathBuf::from(path!("/dir/a.rs")),
 2110            name: Some(fake_server.server.name())
 2111        }
 2112    );
 2113    assert_eq!(
 2114        events.next().await.unwrap(),
 2115        Event::DiskBasedDiagnosticsStarted {
 2116            language_server_id: LanguageServerId(1)
 2117        }
 2118    );
 2119    project.update(cx, |project, cx| {
 2120        assert_eq!(
 2121            project
 2122                .language_servers_running_disk_based_diagnostics(cx)
 2123                .collect::<Vec<_>>(),
 2124            [LanguageServerId(1)]
 2125        );
 2126    });
 2127
 2128    // All diagnostics are considered done, despite the old server's diagnostic
 2129    // task never completing.
 2130    fake_server.end_progress(progress_token);
 2131    assert_eq!(
 2132        events.next().await.unwrap(),
 2133        Event::DiskBasedDiagnosticsFinished {
 2134            language_server_id: LanguageServerId(1)
 2135        }
 2136    );
 2137    project.update(cx, |project, cx| {
 2138        assert_eq!(
 2139            project
 2140                .language_servers_running_disk_based_diagnostics(cx)
 2141                .collect::<Vec<_>>(),
 2142            [] as [language::LanguageServerId; 0]
 2143        );
 2144    });
 2145}
 2146
 2147#[gpui::test]
 2148async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
 2149    init_test(cx);
 2150
 2151    let fs = FakeFs::new(cx.executor());
 2152    fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
 2153
 2154    let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
 2155
 2156    let language_registry = project.read_with(cx, |project, _| project.languages().clone());
 2157    language_registry.add(rust_lang());
 2158    let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
 2159
 2160    let (buffer, _) = project
 2161        .update(cx, |project, cx| {
 2162            project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
 2163        })
 2164        .await
 2165        .unwrap();
 2166
 2167    // Publish diagnostics
 2168    let fake_server = fake_servers.next().await.unwrap();
 2169    fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
 2170        uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
 2171        version: None,
 2172        diagnostics: vec![lsp::Diagnostic {
 2173            range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
 2174            severity: Some(lsp::DiagnosticSeverity::ERROR),
 2175            message: "the message".to_string(),
 2176            ..Default::default()
 2177        }],
 2178    });
 2179
 2180    cx.executor().run_until_parked();
 2181    buffer.update(cx, |buffer, _| {
 2182        assert_eq!(
 2183            buffer
 2184                .snapshot()
 2185                .diagnostics_in_range::<_, usize>(0..1, false)
 2186                .map(|entry| entry.diagnostic.message.clone())
 2187                .collect::<Vec<_>>(),
 2188            ["the message".to_string()]
 2189        );
 2190    });
 2191    project.update(cx, |project, cx| {
 2192        assert_eq!(
 2193            project.diagnostic_summary(false, cx),
 2194            DiagnosticSummary {
 2195                error_count: 1,
 2196                warning_count: 0,
 2197            }
 2198        );
 2199    });
 2200
 2201    project.update(cx, |project, cx| {
 2202        project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
 2203    });
 2204
 2205    // The diagnostics are cleared.
 2206    cx.executor().run_until_parked();
 2207    buffer.update(cx, |buffer, _| {
 2208        assert_eq!(
 2209            buffer
 2210                .snapshot()
 2211                .diagnostics_in_range::<_, usize>(0..1, false)
 2212                .map(|entry| entry.diagnostic.message.clone())
 2213                .collect::<Vec<_>>(),
 2214            Vec::<String>::new(),
 2215        );
 2216    });
 2217    project.update(cx, |project, cx| {
 2218        assert_eq!(
 2219            project.diagnostic_summary(false, cx),
 2220            DiagnosticSummary {
 2221                error_count: 0,
 2222                warning_count: 0,
 2223            }
 2224        );
 2225    });
 2226}
 2227
 2228#[gpui::test]
 2229async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
 2230    init_test(cx);
 2231
 2232    let fs = FakeFs::new(cx.executor());
 2233    fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
 2234
 2235    let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
 2236    let language_registry = project.read_with(cx, |project, _| project.languages().clone());
 2237
 2238    language_registry.add(rust_lang());
 2239    let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
 2240
 2241    let (buffer, _handle) = project
 2242        .update(cx, |project, cx| {
 2243            project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
 2244        })
 2245        .await
 2246        .unwrap();
 2247
 2248    // Before restarting the server, report diagnostics with an unknown buffer version.
 2249    let fake_server = fake_servers.next().await.unwrap();
 2250    fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
 2251        uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
 2252        version: Some(10000),
 2253        diagnostics: Vec::new(),
 2254    });
 2255    cx.executor().run_until_parked();
 2256    project.update(cx, |project, cx| {
 2257        project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
 2258    });
 2259
 2260    let mut fake_server = fake_servers.next().await.unwrap();
 2261    let notification = fake_server
 2262        .receive_notification::<lsp::notification::DidOpenTextDocument>()
 2263        .await
 2264        .text_document;
 2265    assert_eq!(notification.version, 0);
 2266}
 2267
 2268#[gpui::test]
 2269async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
 2270    init_test(cx);
 2271
 2272    let progress_token = "the-progress-token";
 2273
 2274    let fs = FakeFs::new(cx.executor());
 2275    fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
 2276
 2277    let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
 2278
 2279    let language_registry = project.read_with(cx, |project, _| project.languages().clone());
 2280    language_registry.add(rust_lang());
 2281    let mut fake_servers = language_registry.register_fake_lsp(
 2282        "Rust",
 2283        FakeLspAdapter {
 2284            name: "the-language-server",
 2285            disk_based_diagnostics_sources: vec!["disk".into()],
 2286            disk_based_diagnostics_progress_token: Some(progress_token.into()),
 2287            ..Default::default()
 2288        },
 2289    );
 2290
 2291    let (buffer, _handle) = project
 2292        .update(cx, |project, cx| {
 2293            project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
 2294        })
 2295        .await
 2296        .unwrap();
 2297
 2298    // Simulate diagnostics starting to update.
 2299    let mut fake_server = fake_servers.next().await.unwrap();
 2300    fake_server
 2301        .start_progress_with(
 2302            "another-token",
 2303            lsp::WorkDoneProgressBegin {
 2304                cancellable: Some(false),
 2305                ..Default::default()
 2306            },
 2307        )
 2308        .await;
 2309    fake_server
 2310        .start_progress_with(
 2311            progress_token,
 2312            lsp::WorkDoneProgressBegin {
 2313                cancellable: Some(true),
 2314                ..Default::default()
 2315            },
 2316        )
 2317        .await;
 2318    cx.executor().run_until_parked();
 2319
 2320    project.update(cx, |project, cx| {
 2321        project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
 2322    });
 2323
 2324    let cancel_notification = fake_server
 2325        .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
 2326        .await;
 2327    assert_eq!(
 2328        cancel_notification.token,
 2329        NumberOrString::String(progress_token.into())
 2330    );
 2331}
 2332
 2333#[gpui::test]
 2334async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
 2335    init_test(cx);
 2336
 2337    let fs = FakeFs::new(cx.executor());
 2338    fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
 2339        .await;
 2340
 2341    let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
 2342    let language_registry = project.read_with(cx, |project, _| project.languages().clone());
 2343
 2344    let mut fake_rust_servers = language_registry.register_fake_lsp(
 2345        "Rust",
 2346        FakeLspAdapter {
 2347            name: "rust-lsp",
 2348            ..Default::default()
 2349        },
 2350    );
 2351    let mut fake_js_servers = language_registry.register_fake_lsp(
 2352        "JavaScript",
 2353        FakeLspAdapter {
 2354            name: "js-lsp",
 2355            ..Default::default()
 2356        },
 2357    );
 2358    language_registry.add(rust_lang());
 2359    language_registry.add(js_lang());
 2360
 2361    let _rs_buffer = project
 2362        .update(cx, |project, cx| {
 2363            project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
 2364        })
 2365        .await
 2366        .unwrap();
 2367    let _js_buffer = project
 2368        .update(cx, |project, cx| {
 2369            project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
 2370        })
 2371        .await
 2372        .unwrap();
 2373
 2374    let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
 2375    assert_eq!(
 2376        fake_rust_server_1
 2377            .receive_notification::<lsp::notification::DidOpenTextDocument>()
 2378            .await
 2379            .text_document
 2380            .uri
 2381            .as_str(),
 2382        uri!("file:///dir/a.rs")
 2383    );
 2384
 2385    let mut fake_js_server = fake_js_servers.next().await.unwrap();
 2386    assert_eq!(
 2387        fake_js_server
 2388            .receive_notification::<lsp::notification::DidOpenTextDocument>()
 2389            .await
 2390            .text_document
 2391            .uri
 2392            .as_str(),
 2393        uri!("file:///dir/b.js")
 2394    );
 2395
 2396    // Disable Rust language server, ensuring only that server gets stopped.
 2397    cx.update(|cx| {
 2398        SettingsStore::update_global(cx, |settings, cx| {
 2399            settings.update_user_settings(cx, |settings| {
 2400                settings.languages_mut().insert(
 2401                    "Rust".into(),
 2402                    LanguageSettingsContent {
 2403                        enable_language_server: Some(false),
 2404                        ..Default::default()
 2405                    },
 2406                );
 2407            });
 2408        })
 2409    });
 2410    fake_rust_server_1
 2411        .receive_notification::<lsp::notification::Exit>()
 2412        .await;
 2413
 2414    // Enable Rust and disable JavaScript language servers, ensuring that the
 2415    // former gets started again and that the latter stops.
 2416    cx.update(|cx| {
 2417        SettingsStore::update_global(cx, |settings, cx| {
 2418            settings.update_user_settings(cx, |settings| {
 2419                settings.languages_mut().insert(
 2420                    "Rust".into(),
 2421                    LanguageSettingsContent {
 2422                        enable_language_server: Some(true),
 2423                        ..Default::default()
 2424                    },
 2425                );
 2426                settings.languages_mut().insert(
 2427                    "JavaScript".into(),
 2428                    LanguageSettingsContent {
 2429                        enable_language_server: Some(false),
 2430                        ..Default::default()
 2431                    },
 2432                );
 2433            });
 2434        })
 2435    });
 2436    let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
 2437    assert_eq!(
 2438        fake_rust_server_2
 2439            .receive_notification::<lsp::notification::DidOpenTextDocument>()
 2440            .await
 2441            .text_document
 2442            .uri
 2443            .as_str(),
 2444        uri!("file:///dir/a.rs")
 2445    );
 2446    fake_js_server
 2447        .receive_notification::<lsp::notification::Exit>()
 2448        .await;
 2449}
 2450
 2451#[gpui::test(iterations = 3)]
 2452async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
 2453    init_test(cx);
 2454
 2455    let text = "
 2456        fn a() { A }
 2457        fn b() { BB }
 2458        fn c() { CCC }
 2459    "
 2460    .unindent();
 2461
 2462    let fs = FakeFs::new(cx.executor());
 2463    fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
 2464
 2465    let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
 2466    let language_registry = project.read_with(cx, |project, _| project.languages().clone());
 2467
 2468    language_registry.add(rust_lang());
 2469    let mut fake_servers = language_registry.register_fake_lsp(
 2470        "Rust",
 2471        FakeLspAdapter {
 2472            disk_based_diagnostics_sources: vec!["disk".into()],
 2473            ..Default::default()
 2474        },
 2475    );
 2476
 2477    let buffer = project
 2478        .update(cx, |project, cx| {
 2479            project.open_local_buffer(path!("/dir/a.rs"), cx)
 2480        })
 2481        .await
 2482        .unwrap();
 2483
 2484    let _handle = project.update(cx, |project, cx| {
 2485        project.register_buffer_with_language_servers(&buffer, cx)
 2486    });
 2487
 2488    let mut fake_server = fake_servers.next().await.unwrap();
 2489    let open_notification = fake_server
 2490        .receive_notification::<lsp::notification::DidOpenTextDocument>()
 2491        .await;
 2492
 2493    // Edit the buffer, moving the content down
 2494    buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
 2495    let change_notification_1 = fake_server
 2496        .receive_notification::<lsp::notification::DidChangeTextDocument>()
 2497        .await;
 2498    assert!(change_notification_1.text_document.version > open_notification.text_document.version);
 2499
 2500    // Report some diagnostics for the initial version of the buffer
 2501    fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
 2502        uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
 2503        version: Some(open_notification.text_document.version),
 2504        diagnostics: vec![
 2505            lsp::Diagnostic {
 2506                range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
 2507                severity: Some(DiagnosticSeverity::ERROR),
 2508                message: "undefined variable 'A'".to_string(),
 2509                source: Some("disk".to_string()),
 2510                ..Default::default()
 2511            },
 2512            lsp::Diagnostic {
 2513                range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
 2514                severity: Some(DiagnosticSeverity::ERROR),
 2515                message: "undefined variable 'BB'".to_string(),
 2516                source: Some("disk".to_string()),
 2517                ..Default::default()
 2518            },
 2519            lsp::Diagnostic {
 2520                range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
 2521                severity: Some(DiagnosticSeverity::ERROR),
 2522                source: Some("disk".to_string()),
 2523                message: "undefined variable 'CCC'".to_string(),
 2524                ..Default::default()
 2525            },
 2526        ],
 2527    });
 2528
 2529    // The diagnostics have moved down since they were created.
 2530    cx.executor().run_until_parked();
 2531    buffer.update(cx, |buffer, _| {
 2532        assert_eq!(
 2533            buffer
 2534                .snapshot()
 2535                .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
 2536                .collect::<Vec<_>>(),
 2537            &[
 2538                DiagnosticEntry {
 2539                    range: Point::new(3, 9)..Point::new(3, 11),
 2540                    diagnostic: Diagnostic {
 2541                        source: Some("disk".into()),
 2542                        severity: DiagnosticSeverity::ERROR,
 2543                        message: "undefined variable 'BB'".to_string(),
 2544                        is_disk_based: true,
 2545                        group_id: 1,
 2546                        is_primary: true,
 2547                        source_kind: DiagnosticSourceKind::Pushed,
 2548                        ..Diagnostic::default()
 2549                    },
 2550                },
 2551                DiagnosticEntry {
 2552                    range: Point::new(4, 9)..Point::new(4, 12),
 2553                    diagnostic: Diagnostic {
 2554                        source: Some("disk".into()),
 2555                        severity: DiagnosticSeverity::ERROR,
 2556                        message: "undefined variable 'CCC'".to_string(),
 2557                        is_disk_based: true,
 2558                        group_id: 2,
 2559                        is_primary: true,
 2560                        source_kind: DiagnosticSourceKind::Pushed,
 2561                        ..Diagnostic::default()
 2562                    }
 2563                }
 2564            ]
 2565        );
 2566        assert_eq!(
 2567            chunks_with_diagnostics(buffer, 0..buffer.len()),
 2568            [
 2569                ("\n\nfn a() { ".to_string(), None),
 2570                ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
 2571                (" }\nfn b() { ".to_string(), None),
 2572                ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
 2573                (" }\nfn c() { ".to_string(), None),
 2574                ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
 2575                (" }\n".to_string(), None),
 2576            ]
 2577        );
 2578        assert_eq!(
 2579            chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
 2580            [
 2581                ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
 2582                (" }\nfn c() { ".to_string(), None),
 2583                ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
 2584            ]
 2585        );
 2586    });
 2587
 2588    // Ensure overlapping diagnostics are highlighted correctly.
 2589    fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
 2590        uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
 2591        version: Some(open_notification.text_document.version),
 2592        diagnostics: vec![
 2593            lsp::Diagnostic {
 2594                range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
 2595                severity: Some(DiagnosticSeverity::ERROR),
 2596                message: "undefined variable 'A'".to_string(),
 2597                source: Some("disk".to_string()),
 2598                ..Default::default()
 2599            },
 2600            lsp::Diagnostic {
 2601                range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
 2602                severity: Some(DiagnosticSeverity::WARNING),
 2603                message: "unreachable statement".to_string(),
 2604                source: Some("disk".to_string()),
 2605                ..Default::default()
 2606            },
 2607        ],
 2608    });
 2609
 2610    cx.executor().run_until_parked();
 2611    buffer.update(cx, |buffer, _| {
 2612        assert_eq!(
 2613            buffer
 2614                .snapshot()
 2615                .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
 2616                .collect::<Vec<_>>(),
 2617            &[
 2618                DiagnosticEntry {
 2619                    range: Point::new(2, 9)..Point::new(2, 12),
 2620                    diagnostic: Diagnostic {
 2621                        source: Some("disk".into()),
 2622                        severity: DiagnosticSeverity::WARNING,
 2623                        message: "unreachable statement".to_string(),
 2624                        is_disk_based: true,
 2625                        group_id: 4,
 2626                        is_primary: true,
 2627                        source_kind: DiagnosticSourceKind::Pushed,
 2628                        ..Diagnostic::default()
 2629                    }
 2630                },
 2631                DiagnosticEntry {
 2632                    range: Point::new(2, 9)..Point::new(2, 10),
 2633                    diagnostic: Diagnostic {
 2634                        source: Some("disk".into()),
 2635                        severity: DiagnosticSeverity::ERROR,
 2636                        message: "undefined variable 'A'".to_string(),
 2637                        is_disk_based: true,
 2638                        group_id: 3,
 2639                        is_primary: true,
 2640                        source_kind: DiagnosticSourceKind::Pushed,
 2641                        ..Diagnostic::default()
 2642                    },
 2643                }
 2644            ]
 2645        );
 2646        assert_eq!(
 2647            chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
 2648            [
 2649                ("fn a() { ".to_string(), None),
 2650                ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
 2651                (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
 2652                ("\n".to_string(), None),
 2653            ]
 2654        );
 2655        assert_eq!(
 2656            chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
 2657            [
 2658                (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
 2659                ("\n".to_string(), None),
 2660            ]
 2661        );
 2662    });
 2663
 2664    // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
 2665    // changes since the last save.
 2666    buffer.update(cx, |buffer, cx| {
 2667        buffer.edit([(Point::new(2, 0)..Point::new(2, 0), "    ")], None, cx);
 2668        buffer.edit(
 2669            [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
 2670            None,
 2671            cx,
 2672        );
 2673        buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
 2674    });
 2675    let change_notification_2 = fake_server
 2676        .receive_notification::<lsp::notification::DidChangeTextDocument>()
 2677        .await;
 2678    assert!(
 2679        change_notification_2.text_document.version > change_notification_1.text_document.version
 2680    );
 2681
 2682    // Handle out-of-order diagnostics
 2683    fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
 2684        uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
 2685        version: Some(change_notification_2.text_document.version),
 2686        diagnostics: vec![
 2687            lsp::Diagnostic {
 2688                range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
 2689                severity: Some(DiagnosticSeverity::ERROR),
 2690                message: "undefined variable 'BB'".to_string(),
 2691                source: Some("disk".to_string()),
 2692                ..Default::default()
 2693            },
 2694            lsp::Diagnostic {
 2695                range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
 2696                severity: Some(DiagnosticSeverity::WARNING),
 2697                message: "undefined variable 'A'".to_string(),
 2698                source: Some("disk".to_string()),
 2699                ..Default::default()
 2700            },
 2701        ],
 2702    });
 2703
 2704    cx.executor().run_until_parked();
 2705    buffer.update(cx, |buffer, _| {
 2706        assert_eq!(
 2707            buffer
 2708                .snapshot()
 2709                .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
 2710                .collect::<Vec<_>>(),
 2711            &[
 2712                DiagnosticEntry {
 2713                    range: Point::new(2, 21)..Point::new(2, 22),
 2714                    diagnostic: Diagnostic {
 2715                        source: Some("disk".into()),
 2716                        severity: DiagnosticSeverity::WARNING,
 2717                        message: "undefined variable 'A'".to_string(),
 2718                        is_disk_based: true,
 2719                        group_id: 6,
 2720                        is_primary: true,
 2721                        source_kind: DiagnosticSourceKind::Pushed,
 2722                        ..Diagnostic::default()
 2723                    }
 2724                },
 2725                DiagnosticEntry {
 2726                    range: Point::new(3, 9)..Point::new(3, 14),
 2727                    diagnostic: Diagnostic {
 2728                        source: Some("disk".into()),
 2729                        severity: DiagnosticSeverity::ERROR,
 2730                        message: "undefined variable 'BB'".to_string(),
 2731                        is_disk_based: true,
 2732                        group_id: 5,
 2733                        is_primary: true,
 2734                        source_kind: DiagnosticSourceKind::Pushed,
 2735                        ..Diagnostic::default()
 2736                    },
 2737                }
 2738            ]
 2739        );
 2740    });
 2741}
 2742
 2743#[gpui::test]
 2744async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
 2745    init_test(cx);
 2746
 2747    let text = concat!(
 2748        "let one = ;\n", //
 2749        "let two = \n",
 2750        "let three = 3;\n",
 2751    );
 2752
 2753    let fs = FakeFs::new(cx.executor());
 2754    fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
 2755
 2756    let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
 2757    let buffer = project
 2758        .update(cx, |project, cx| {
 2759            project.open_local_buffer(path!("/dir/a.rs"), cx)
 2760        })
 2761        .await
 2762        .unwrap();
 2763
 2764    project.update(cx, |project, cx| {
 2765        project.lsp_store.update(cx, |lsp_store, cx| {
 2766            lsp_store
 2767                .update_diagnostic_entries(
 2768                    LanguageServerId(0),
 2769                    PathBuf::from(path!("/dir/a.rs")),
 2770                    None,
 2771                    None,
 2772                    vec![
 2773                        DiagnosticEntry {
 2774                            range: Unclipped(PointUtf16::new(0, 10))
 2775                                ..Unclipped(PointUtf16::new(0, 10)),
 2776                            diagnostic: Diagnostic {
 2777                                severity: DiagnosticSeverity::ERROR,
 2778                                message: "syntax error 1".to_string(),
 2779                                source_kind: DiagnosticSourceKind::Pushed,
 2780                                ..Diagnostic::default()
 2781                            },
 2782                        },
 2783                        DiagnosticEntry {
 2784                            range: Unclipped(PointUtf16::new(1, 10))
 2785                                ..Unclipped(PointUtf16::new(1, 10)),
 2786                            diagnostic: Diagnostic {
 2787                                severity: DiagnosticSeverity::ERROR,
 2788                                message: "syntax error 2".to_string(),
 2789                                source_kind: DiagnosticSourceKind::Pushed,
 2790                                ..Diagnostic::default()
 2791                            },
 2792                        },
 2793                    ],
 2794                    cx,
 2795                )
 2796                .unwrap();
 2797        })
 2798    });
 2799
 2800    // An empty range is extended forward to include the following character.
 2801    // At the end of a line, an empty range is extended backward to include
 2802    // the preceding character.
 2803    buffer.update(cx, |buffer, _| {
 2804        let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
 2805        assert_eq!(
 2806            chunks
 2807                .iter()
 2808                .map(|(s, d)| (s.as_str(), *d))
 2809                .collect::<Vec<_>>(),
 2810            &[
 2811                ("let one = ", None),
 2812                (";", Some(DiagnosticSeverity::ERROR)),
 2813                ("\nlet two =", None),
 2814                (" ", Some(DiagnosticSeverity::ERROR)),
 2815                ("\nlet three = 3;\n", None)
 2816            ]
 2817        );
 2818    });
 2819}
 2820
 2821#[gpui::test]
 2822async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
 2823    init_test(cx);
 2824
 2825    let fs = FakeFs::new(cx.executor());
 2826    fs.insert_tree(path!("/dir"), json!({ "a.rs": "one two three" }))
 2827        .await;
 2828
 2829    let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
 2830    let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
 2831
 2832    lsp_store.update(cx, |lsp_store, cx| {
 2833        lsp_store
 2834            .update_diagnostic_entries(
 2835                LanguageServerId(0),
 2836                Path::new(path!("/dir/a.rs")).to_owned(),
 2837                None,
 2838                None,
 2839                vec![DiagnosticEntry {
 2840                    range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
 2841                    diagnostic: Diagnostic {
 2842                        severity: DiagnosticSeverity::ERROR,
 2843                        is_primary: true,
 2844                        message: "syntax error a1".to_string(),
 2845                        source_kind: DiagnosticSourceKind::Pushed,
 2846                        ..Diagnostic::default()
 2847                    },
 2848                }],
 2849                cx,
 2850            )
 2851            .unwrap();
 2852        lsp_store
 2853            .update_diagnostic_entries(
 2854                LanguageServerId(1),
 2855                Path::new(path!("/dir/a.rs")).to_owned(),
 2856                None,
 2857                None,
 2858                vec![DiagnosticEntry {
 2859                    range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
 2860                    diagnostic: Diagnostic {
 2861                        severity: DiagnosticSeverity::ERROR,
 2862                        is_primary: true,
 2863                        message: "syntax error b1".to_string(),
 2864                        source_kind: DiagnosticSourceKind::Pushed,
 2865                        ..Diagnostic::default()
 2866                    },
 2867                }],
 2868                cx,
 2869            )
 2870            .unwrap();
 2871
 2872        assert_eq!(
 2873            lsp_store.diagnostic_summary(false, cx),
 2874            DiagnosticSummary {
 2875                error_count: 2,
 2876                warning_count: 0,
 2877            }
 2878        );
 2879    });
 2880}
 2881
 2882#[gpui::test]
 2883async fn test_disk_based_diagnostics_not_reused(cx: &mut gpui::TestAppContext) {
 2884    init_test(cx);
 2885
 2886    let fs = FakeFs::new(cx.executor());
 2887    fs.insert_tree(path!("/dir"), json!({ "a.rs": "fn a() { A }" }))
 2888        .await;
 2889
 2890    let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
 2891    let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
 2892    let buffer = project
 2893        .update(cx, |project, cx| {
 2894            project.open_local_buffer(path!("/dir/a.rs"), cx)
 2895        })
 2896        .await
 2897        .unwrap();
 2898
 2899    // Setup an initial disk-based diagnostic, which will later become old, so
 2900    // we can later assert that it is ignored when merging diagnostic entries.
 2901    let diagnostic = DiagnosticEntry {
 2902        range: Unclipped(PointUtf16::new(0, 9))..Unclipped(PointUtf16::new(0, 10)),
 2903        diagnostic: Diagnostic {
 2904            is_disk_based: true,
 2905            ..Diagnostic::default()
 2906        },
 2907    };
 2908    let diagnostic_update = DocumentDiagnosticsUpdate {
 2909        diagnostics: DocumentDiagnostics::new(
 2910            vec![diagnostic],
 2911            PathBuf::from(path!("/dir/a.rs")),
 2912            None,
 2913        ),
 2914        result_id: None,
 2915        server_id: LanguageServerId(0),
 2916        disk_based_sources: Cow::Borrowed(&[]),
 2917        registration_id: None,
 2918    };
 2919    lsp_store.update(cx, |lsp_store, cx| {
 2920        lsp_store
 2921            .merge_diagnostic_entries(vec![diagnostic_update], |_, _, _| false, cx)
 2922            .unwrap();
 2923    });
 2924
 2925    // Quick sanity check, ensure that the initial diagnostic is part of the
 2926    // buffer's diagnostics.
 2927    buffer.update(cx, |buffer, _| {
 2928        let snapshot = buffer.snapshot();
 2929        let diagnostics: Vec<_> = snapshot
 2930            .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
 2931            .collect();
 2932
 2933        assert_eq!(diagnostics.len(), 1);
 2934    });
 2935
 2936    // We'll now merge the diagnostic entries with a new diagnostic update, with
 2937    // no diagnostics. This time around, the `merge` closure will return `true`
 2938    // to ensure that old diagnostics are retained, ensuring that the first
 2939    // diagnostic does get added to the full list of diagnostics, even though
 2940    // it'll later be ignored.
 2941    let diagnostic_update = lsp_store::DocumentDiagnosticsUpdate {
 2942        diagnostics: lsp_store::DocumentDiagnostics::new(
 2943            vec![],
 2944            PathBuf::from(path!("/dir/a.rs")),
 2945            None,
 2946        ),
 2947        result_id: None,
 2948        server_id: LanguageServerId(0),
 2949        disk_based_sources: Cow::Borrowed(&[]),
 2950        registration_id: None,
 2951    };
 2952    lsp_store.update(cx, |lsp_store, cx| {
 2953        lsp_store
 2954            .merge_diagnostic_entries(vec![diagnostic_update], |_, _, _| true, cx)
 2955            .unwrap();
 2956    });
 2957
 2958    // We can now assert that the initial, disk-based diagnostic has been
 2959    // removed from the buffer's diagnostics, even if the `merge` closure
 2960    // returned `true`, informing that the old diagnostic could be reused.
 2961    // The old disk-based diagnostic should be gone, NOT retained
 2962    buffer.update(cx, |buffer, _| {
 2963        let snapshot = buffer.snapshot();
 2964        let diagnostics: Vec<_> = snapshot
 2965            .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
 2966            .collect();
 2967
 2968        assert_eq!(diagnostics.len(), 0);
 2969    });
 2970}
 2971
 2972#[gpui::test]
 2973async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
 2974    init_test(cx);
 2975
 2976    let text = "
 2977        fn a() {
 2978            f1();
 2979        }
 2980        fn b() {
 2981            f2();
 2982        }
 2983        fn c() {
 2984            f3();
 2985        }
 2986    "
 2987    .unindent();
 2988
 2989    let fs = FakeFs::new(cx.executor());
 2990    fs.insert_tree(
 2991        path!("/dir"),
 2992        json!({
 2993            "a.rs": text.clone(),
 2994        }),
 2995    )
 2996    .await;
 2997
 2998    let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
 2999    let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
 3000
 3001    let language_registry = project.read_with(cx, |project, _| project.languages().clone());
 3002    language_registry.add(rust_lang());
 3003    let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
 3004
 3005    let (buffer, _handle) = project
 3006        .update(cx, |project, cx| {
 3007            project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
 3008        })
 3009        .await
 3010        .unwrap();
 3011
 3012    let mut fake_server = fake_servers.next().await.unwrap();
 3013    let lsp_document_version = fake_server
 3014        .receive_notification::<lsp::notification::DidOpenTextDocument>()
 3015        .await
 3016        .text_document
 3017        .version;
 3018
 3019    // Simulate editing the buffer after the language server computes some edits.
 3020    buffer.update(cx, |buffer, cx| {
 3021        buffer.edit(
 3022            [(
 3023                Point::new(0, 0)..Point::new(0, 0),
 3024                "// above first function\n",
 3025            )],
 3026            None,
 3027            cx,
 3028        );
 3029        buffer.edit(
 3030            [(
 3031                Point::new(2, 0)..Point::new(2, 0),
 3032                "    // inside first function\n",
 3033            )],
 3034            None,
 3035            cx,
 3036        );
 3037        buffer.edit(
 3038            [(
 3039                Point::new(6, 4)..Point::new(6, 4),
 3040                "// inside second function ",
 3041            )],
 3042            None,
 3043            cx,
 3044        );
 3045
 3046        assert_eq!(
 3047            buffer.text(),
 3048            "
 3049                // above first function
 3050                fn a() {
 3051                    // inside first function
 3052                    f1();
 3053                }
 3054                fn b() {
 3055                    // inside second function f2();
 3056                }
 3057                fn c() {
 3058                    f3();
 3059                }
 3060            "
 3061            .unindent()
 3062        );
 3063    });
 3064
 3065    let edits = lsp_store
 3066        .update(cx, |lsp_store, cx| {
 3067            lsp_store.as_local_mut().unwrap().edits_from_lsp(
 3068                &buffer,
 3069                vec![
 3070                    // replace body of first function
 3071                    lsp::TextEdit {
 3072                        range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
 3073                        new_text: "
 3074                            fn a() {
 3075                                f10();
 3076                            }
 3077                            "
 3078                        .unindent(),
 3079                    },
 3080                    // edit inside second function
 3081                    lsp::TextEdit {
 3082                        range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
 3083                        new_text: "00".into(),
 3084                    },
 3085                    // edit inside third function via two distinct edits
 3086                    lsp::TextEdit {
 3087                        range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
 3088                        new_text: "4000".into(),
 3089                    },
 3090                    lsp::TextEdit {
 3091                        range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
 3092                        new_text: "".into(),
 3093                    },
 3094                ],
 3095                LanguageServerId(0),
 3096                Some(lsp_document_version),
 3097                cx,
 3098            )
 3099        })
 3100        .await
 3101        .unwrap();
 3102
 3103    buffer.update(cx, |buffer, cx| {
 3104        for (range, new_text) in edits {
 3105            buffer.edit([(range, new_text)], None, cx);
 3106        }
 3107        assert_eq!(
 3108            buffer.text(),
 3109            "
 3110                // above first function
 3111                fn a() {
 3112                    // inside first function
 3113                    f10();
 3114                }
 3115                fn b() {
 3116                    // inside second function f200();
 3117                }
 3118                fn c() {
 3119                    f4000();
 3120                }
 3121                "
 3122            .unindent()
 3123        );
 3124    });
 3125}
 3126
 3127#[gpui::test]
 3128async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
 3129    init_test(cx);
 3130
 3131    let text = "
 3132        use a::b;
 3133        use a::c;
 3134
 3135        fn f() {
 3136            b();
 3137            c();
 3138        }
 3139    "
 3140    .unindent();
 3141
 3142    let fs = FakeFs::new(cx.executor());
 3143    fs.insert_tree(
 3144        path!("/dir"),
 3145        json!({
 3146            "a.rs": text.clone(),
 3147        }),
 3148    )
 3149    .await;
 3150
 3151    let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
 3152    let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
 3153    let buffer = project
 3154        .update(cx, |project, cx| {
 3155            project.open_local_buffer(path!("/dir/a.rs"), cx)
 3156        })
 3157        .await
 3158        .unwrap();
 3159
 3160    // Simulate the language server sending us a small edit in the form of a very large diff.
 3161    // Rust-analyzer does this when performing a merge-imports code action.
 3162    let edits = lsp_store
 3163        .update(cx, |lsp_store, cx| {
 3164            lsp_store.as_local_mut().unwrap().edits_from_lsp(
 3165                &buffer,
 3166                [
 3167                    // Replace the first use statement without editing the semicolon.
 3168                    lsp::TextEdit {
 3169                        range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
 3170                        new_text: "a::{b, c}".into(),
 3171                    },
 3172                    // Reinsert the remainder of the file between the semicolon and the final
 3173                    // newline of the file.
 3174                    lsp::TextEdit {
 3175                        range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
 3176                        new_text: "\n\n".into(),
 3177                    },
 3178                    lsp::TextEdit {
 3179                        range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
 3180                        new_text: "
 3181                            fn f() {
 3182                                b();
 3183                                c();
 3184                            }"
 3185                        .unindent(),
 3186                    },
 3187                    // Delete everything after the first newline of the file.
 3188                    lsp::TextEdit {
 3189                        range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
 3190                        new_text: "".into(),
 3191                    },
 3192                ],
 3193                LanguageServerId(0),
 3194                None,
 3195                cx,
 3196            )
 3197        })
 3198        .await
 3199        .unwrap();
 3200
 3201    buffer.update(cx, |buffer, cx| {
 3202        let edits = edits
 3203            .into_iter()
 3204            .map(|(range, text)| {
 3205                (
 3206                    range.start.to_point(buffer)..range.end.to_point(buffer),
 3207                    text,
 3208                )
 3209            })
 3210            .collect::<Vec<_>>();
 3211
 3212        assert_eq!(
 3213            edits,
 3214            [
 3215                (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
 3216                (Point::new(1, 0)..Point::new(2, 0), "".into())
 3217            ]
 3218        );
 3219
 3220        for (range, new_text) in edits {
 3221            buffer.edit([(range, new_text)], None, cx);
 3222        }
 3223        assert_eq!(
 3224            buffer.text(),
 3225            "
 3226                use a::{b, c};
 3227
 3228                fn f() {
 3229                    b();
 3230                    c();
 3231                }
 3232            "
 3233            .unindent()
 3234        );
 3235    });
 3236}
 3237
 3238#[gpui::test]
 3239async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
 3240    cx: &mut gpui::TestAppContext,
 3241) {
 3242    init_test(cx);
 3243
 3244    let text = "Path()";
 3245
 3246    let fs = FakeFs::new(cx.executor());
 3247    fs.insert_tree(
 3248        path!("/dir"),
 3249        json!({
 3250            "a.rs": text
 3251        }),
 3252    )
 3253    .await;
 3254
 3255    let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
 3256    let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
 3257    let buffer = project
 3258        .update(cx, |project, cx| {
 3259            project.open_local_buffer(path!("/dir/a.rs"), cx)
 3260        })
 3261        .await
 3262        .unwrap();
 3263
 3264    // Simulate the language server sending us a pair of edits at the same location,
 3265    // with an insertion following a replacement (which violates the LSP spec).
 3266    let edits = lsp_store
 3267        .update(cx, |lsp_store, cx| {
 3268            lsp_store.as_local_mut().unwrap().edits_from_lsp(
 3269                &buffer,
 3270                [
 3271                    lsp::TextEdit {
 3272                        range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
 3273                        new_text: "Path".into(),
 3274                    },
 3275                    lsp::TextEdit {
 3276                        range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
 3277                        new_text: "from path import Path\n\n\n".into(),
 3278                    },
 3279                ],
 3280                LanguageServerId(0),
 3281                None,
 3282                cx,
 3283            )
 3284        })
 3285        .await
 3286        .unwrap();
 3287
 3288    buffer.update(cx, |buffer, cx| {
 3289        buffer.edit(edits, None, cx);
 3290        assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
 3291    });
 3292}
 3293
 3294#[gpui::test]
 3295async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
 3296    init_test(cx);
 3297
 3298    let text = "
 3299        use a::b;
 3300        use a::c;
 3301
 3302        fn f() {
 3303            b();
 3304            c();
 3305        }
 3306    "
 3307    .unindent();
 3308
 3309    let fs = FakeFs::new(cx.executor());
 3310    fs.insert_tree(
 3311        path!("/dir"),
 3312        json!({
 3313            "a.rs": text.clone(),
 3314        }),
 3315    )
 3316    .await;
 3317
 3318    let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
 3319    let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
 3320    let buffer = project
 3321        .update(cx, |project, cx| {
 3322            project.open_local_buffer(path!("/dir/a.rs"), cx)
 3323        })
 3324        .await
 3325        .unwrap();
 3326
 3327    // Simulate the language server sending us edits in a non-ordered fashion,
 3328    // with ranges sometimes being inverted or pointing to invalid locations.
 3329    let edits = lsp_store
 3330        .update(cx, |lsp_store, cx| {
 3331            lsp_store.as_local_mut().unwrap().edits_from_lsp(
 3332                &buffer,
 3333                [
 3334                    lsp::TextEdit {
 3335                        range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
 3336                        new_text: "\n\n".into(),
 3337                    },
 3338                    lsp::TextEdit {
 3339                        range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
 3340                        new_text: "a::{b, c}".into(),
 3341                    },
 3342                    lsp::TextEdit {
 3343                        range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
 3344                        new_text: "".into(),
 3345                    },
 3346                    lsp::TextEdit {
 3347                        range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
 3348                        new_text: "
 3349                            fn f() {
 3350                                b();
 3351                                c();
 3352                            }"
 3353                        .unindent(),
 3354                    },
 3355                ],
 3356                LanguageServerId(0),
 3357                None,
 3358                cx,
 3359            )
 3360        })
 3361        .await
 3362        .unwrap();
 3363
 3364    buffer.update(cx, |buffer, cx| {
 3365        let edits = edits
 3366            .into_iter()
 3367            .map(|(range, text)| {
 3368                (
 3369                    range.start.to_point(buffer)..range.end.to_point(buffer),
 3370                    text,
 3371                )
 3372            })
 3373            .collect::<Vec<_>>();
 3374
 3375        assert_eq!(
 3376            edits,
 3377            [
 3378                (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
 3379                (Point::new(1, 0)..Point::new(2, 0), "".into())
 3380            ]
 3381        );
 3382
 3383        for (range, new_text) in edits {
 3384            buffer.edit([(range, new_text)], None, cx);
 3385        }
 3386        assert_eq!(
 3387            buffer.text(),
 3388            "
 3389                use a::{b, c};
 3390
 3391                fn f() {
 3392                    b();
 3393                    c();
 3394                }
 3395            "
 3396            .unindent()
 3397        );
 3398    });
 3399}
 3400
 3401fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
 3402    buffer: &Buffer,
 3403    range: Range<T>,
 3404) -> Vec<(String, Option<DiagnosticSeverity>)> {
 3405    let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
 3406    for chunk in buffer.snapshot().chunks(range, true) {
 3407        if chunks
 3408            .last()
 3409            .is_some_and(|prev_chunk| prev_chunk.1 == chunk.diagnostic_severity)
 3410        {
 3411            chunks.last_mut().unwrap().0.push_str(chunk.text);
 3412        } else {
 3413            chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
 3414        }
 3415    }
 3416    chunks
 3417}
 3418
 3419#[gpui::test(iterations = 10)]
 3420async fn test_definition(cx: &mut gpui::TestAppContext) {
 3421    init_test(cx);
 3422
 3423    let fs = FakeFs::new(cx.executor());
 3424    fs.insert_tree(
 3425        path!("/dir"),
 3426        json!({
 3427            "a.rs": "const fn a() { A }",
 3428            "b.rs": "const y: i32 = crate::a()",
 3429        }),
 3430    )
 3431    .await;
 3432
 3433    let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
 3434
 3435    let language_registry = project.read_with(cx, |project, _| project.languages().clone());
 3436    language_registry.add(rust_lang());
 3437    let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
 3438
 3439    let (buffer, _handle) = project
 3440        .update(cx, |project, cx| {
 3441            project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
 3442        })
 3443        .await
 3444        .unwrap();
 3445
 3446    let fake_server = fake_servers.next().await.unwrap();
 3447    fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
 3448        let params = params.text_document_position_params;
 3449        assert_eq!(
 3450            params.text_document.uri.to_file_path().unwrap(),
 3451            Path::new(path!("/dir/b.rs")),
 3452        );
 3453        assert_eq!(params.position, lsp::Position::new(0, 22));
 3454
 3455        Ok(Some(lsp::GotoDefinitionResponse::Scalar(
 3456            lsp::Location::new(
 3457                lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
 3458                lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
 3459            ),
 3460        )))
 3461    });
 3462    let mut definitions = project
 3463        .update(cx, |project, cx| project.definitions(&buffer, 22, cx))
 3464        .await
 3465        .unwrap()
 3466        .unwrap();
 3467
 3468    // Assert no new language server started
 3469    cx.executor().run_until_parked();
 3470    assert!(fake_servers.try_next().is_err());
 3471
 3472    assert_eq!(definitions.len(), 1);
 3473    let definition = definitions.pop().unwrap();
 3474    cx.update(|cx| {
 3475        let target_buffer = definition.target.buffer.read(cx);
 3476        assert_eq!(
 3477            target_buffer
 3478                .file()
 3479                .unwrap()
 3480                .as_local()
 3481                .unwrap()
 3482                .abs_path(cx),
 3483            Path::new(path!("/dir/a.rs")),
 3484        );
 3485        assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
 3486        assert_eq!(
 3487            list_worktrees(&project, cx),
 3488            [
 3489                (path!("/dir/a.rs").as_ref(), false),
 3490                (path!("/dir/b.rs").as_ref(), true)
 3491            ],
 3492        );
 3493
 3494        drop(definition);
 3495    });
 3496    cx.update(|cx| {
 3497        assert_eq!(
 3498            list_worktrees(&project, cx),
 3499            [(path!("/dir/b.rs").as_ref(), true)]
 3500        );
 3501    });
 3502
 3503    fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
 3504        project
 3505            .read(cx)
 3506            .worktrees(cx)
 3507            .map(|worktree| {
 3508                let worktree = worktree.read(cx);
 3509                (
 3510                    worktree.as_local().unwrap().abs_path().as_ref(),
 3511                    worktree.is_visible(),
 3512                )
 3513            })
 3514            .collect::<Vec<_>>()
 3515    }
 3516}
 3517
 3518#[gpui::test]
 3519async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
 3520    init_test(cx);
 3521
 3522    let fs = FakeFs::new(cx.executor());
 3523    fs.insert_tree(
 3524        path!("/dir"),
 3525        json!({
 3526            "a.ts": "",
 3527        }),
 3528    )
 3529    .await;
 3530
 3531    let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
 3532
 3533    let language_registry = project.read_with(cx, |project, _| project.languages().clone());
 3534    language_registry.add(typescript_lang());
 3535    let mut fake_language_servers = language_registry.register_fake_lsp(
 3536        "TypeScript",
 3537        FakeLspAdapter {
 3538            capabilities: lsp::ServerCapabilities {
 3539                completion_provider: Some(lsp::CompletionOptions {
 3540                    trigger_characters: Some(vec![".".to_string()]),
 3541                    ..Default::default()
 3542                }),
 3543                ..Default::default()
 3544            },
 3545            ..Default::default()
 3546        },
 3547    );
 3548
 3549    let (buffer, _handle) = project
 3550        .update(cx, |p, cx| {
 3551            p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
 3552        })
 3553        .await
 3554        .unwrap();
 3555
 3556    let fake_server = fake_language_servers.next().await.unwrap();
 3557
 3558    // When text_edit exists, it takes precedence over insert_text and label
 3559    let text = "let a = obj.fqn";
 3560    buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
 3561    let completions = project.update(cx, |project, cx| {
 3562        project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
 3563    });
 3564
 3565    fake_server
 3566        .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
 3567            Ok(Some(lsp::CompletionResponse::Array(vec![
 3568                lsp::CompletionItem {
 3569                    label: "labelText".into(),
 3570                    insert_text: Some("insertText".into()),
 3571                    text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
 3572                        range: lsp::Range::new(
 3573                            lsp::Position::new(0, text.len() as u32 - 3),
 3574                            lsp::Position::new(0, text.len() as u32),
 3575                        ),
 3576                        new_text: "textEditText".into(),
 3577                    })),
 3578                    ..Default::default()
 3579                },
 3580            ])))
 3581        })
 3582        .next()
 3583        .await;
 3584
 3585    let completions = completions
 3586        .await
 3587        .unwrap()
 3588        .into_iter()
 3589        .flat_map(|response| response.completions)
 3590        .collect::<Vec<_>>();
 3591    let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
 3592
 3593    assert_eq!(completions.len(), 1);
 3594    assert_eq!(completions[0].new_text, "textEditText");
 3595    assert_eq!(
 3596        completions[0].replace_range.to_offset(&snapshot),
 3597        text.len() - 3..text.len()
 3598    );
 3599}
 3600
 3601#[gpui::test]
 3602async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
 3603    init_test(cx);
 3604
 3605    let fs = FakeFs::new(cx.executor());
 3606    fs.insert_tree(
 3607        path!("/dir"),
 3608        json!({
 3609            "a.ts": "",
 3610        }),
 3611    )
 3612    .await;
 3613
 3614    let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
 3615
 3616    let language_registry = project.read_with(cx, |project, _| project.languages().clone());
 3617    language_registry.add(typescript_lang());
 3618    let mut fake_language_servers = language_registry.register_fake_lsp(
 3619        "TypeScript",
 3620        FakeLspAdapter {
 3621            capabilities: lsp::ServerCapabilities {
 3622                completion_provider: Some(lsp::CompletionOptions {
 3623                    trigger_characters: Some(vec![".".to_string()]),
 3624                    ..Default::default()
 3625                }),
 3626                ..Default::default()
 3627            },
 3628            ..Default::default()
 3629        },
 3630    );
 3631
 3632    let (buffer, _handle) = project
 3633        .update(cx, |p, cx| {
 3634            p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
 3635        })
 3636        .await
 3637        .unwrap();
 3638
 3639    let fake_server = fake_language_servers.next().await.unwrap();
 3640    let text = "let a = obj.fqn";
 3641
 3642    // Test 1: When text_edit is None but text_edit_text exists with default edit_range
 3643    {
 3644        buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
 3645        let completions = project.update(cx, |project, cx| {
 3646            project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
 3647        });
 3648
 3649        fake_server
 3650            .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
 3651                Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
 3652                    is_incomplete: false,
 3653                    item_defaults: Some(lsp::CompletionListItemDefaults {
 3654                        edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
 3655                            lsp::Range::new(
 3656                                lsp::Position::new(0, text.len() as u32 - 3),
 3657                                lsp::Position::new(0, text.len() as u32),
 3658                            ),
 3659                        )),
 3660                        ..Default::default()
 3661                    }),
 3662                    items: vec![lsp::CompletionItem {
 3663                        label: "labelText".into(),
 3664                        text_edit_text: Some("textEditText".into()),
 3665                        text_edit: None,
 3666                        ..Default::default()
 3667                    }],
 3668                })))
 3669            })
 3670            .next()
 3671            .await;
 3672
 3673        let completions = completions
 3674            .await
 3675            .unwrap()
 3676            .into_iter()
 3677            .flat_map(|response| response.completions)
 3678            .collect::<Vec<_>>();
 3679        let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
 3680
 3681        assert_eq!(completions.len(), 1);
 3682        assert_eq!(completions[0].new_text, "textEditText");
 3683        assert_eq!(
 3684            completions[0].replace_range.to_offset(&snapshot),
 3685            text.len() - 3..text.len()
 3686        );
 3687    }
 3688
 3689    // Test 2: When both text_edit and text_edit_text are None with default edit_range
 3690    {
 3691        buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
 3692        let completions = project.update(cx, |project, cx| {
 3693            project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
 3694        });
 3695
 3696        fake_server
 3697            .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
 3698                Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
 3699                    is_incomplete: false,
 3700                    item_defaults: Some(lsp::CompletionListItemDefaults {
 3701                        edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
 3702                            lsp::Range::new(
 3703                                lsp::Position::new(0, text.len() as u32 - 3),
 3704                                lsp::Position::new(0, text.len() as u32),
 3705                            ),
 3706                        )),
 3707                        ..Default::default()
 3708                    }),
 3709                    items: vec![lsp::CompletionItem {
 3710                        label: "labelText".into(),
 3711                        text_edit_text: None,
 3712                        insert_text: Some("irrelevant".into()),
 3713                        text_edit: None,
 3714                        ..Default::default()
 3715                    }],
 3716                })))
 3717            })
 3718            .next()
 3719            .await;
 3720
 3721        let completions = completions
 3722            .await
 3723            .unwrap()
 3724            .into_iter()
 3725            .flat_map(|response| response.completions)
 3726            .collect::<Vec<_>>();
 3727        let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
 3728
 3729        assert_eq!(completions.len(), 1);
 3730        assert_eq!(completions[0].new_text, "labelText");
 3731        assert_eq!(
 3732            completions[0].replace_range.to_offset(&snapshot),
 3733            text.len() - 3..text.len()
 3734        );
 3735    }
 3736}
 3737
 3738#[gpui::test]
 3739async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
 3740    init_test(cx);
 3741
 3742    let fs = FakeFs::new(cx.executor());
 3743    fs.insert_tree(
 3744        path!("/dir"),
 3745        json!({
 3746            "a.ts": "",
 3747        }),
 3748    )
 3749    .await;
 3750
 3751    let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
 3752
 3753    let language_registry = project.read_with(cx, |project, _| project.languages().clone());
 3754    language_registry.add(typescript_lang());
 3755    let mut fake_language_servers = language_registry.register_fake_lsp(
 3756        "TypeScript",
 3757        FakeLspAdapter {
 3758            capabilities: lsp::ServerCapabilities {
 3759                completion_provider: Some(lsp::CompletionOptions {
 3760                    trigger_characters: Some(vec![":".to_string()]),
 3761                    ..Default::default()
 3762                }),
 3763                ..Default::default()
 3764            },
 3765            ..Default::default()
 3766        },
 3767    );
 3768
 3769    let (buffer, _handle) = project
 3770        .update(cx, |p, cx| {
 3771            p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
 3772        })
 3773        .await
 3774        .unwrap();
 3775
 3776    let fake_server = fake_language_servers.next().await.unwrap();
 3777
 3778    // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
 3779    let text = "let a = b.fqn";
 3780    buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
 3781    let completions = project.update(cx, |project, cx| {
 3782        project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
 3783    });
 3784
 3785    fake_server
 3786        .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
 3787            Ok(Some(lsp::CompletionResponse::Array(vec![
 3788                lsp::CompletionItem {
 3789                    label: "fullyQualifiedName?".into(),
 3790                    insert_text: Some("fullyQualifiedName".into()),
 3791                    ..Default::default()
 3792                },
 3793            ])))
 3794        })
 3795        .next()
 3796        .await;
 3797    let completions = completions
 3798        .await
 3799        .unwrap()
 3800        .into_iter()
 3801        .flat_map(|response| response.completions)
 3802        .collect::<Vec<_>>();
 3803    let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
 3804    assert_eq!(completions.len(), 1);
 3805    assert_eq!(completions[0].new_text, "fullyQualifiedName");
 3806    assert_eq!(
 3807        completions[0].replace_range.to_offset(&snapshot),
 3808        text.len() - 3..text.len()
 3809    );
 3810
 3811    // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
 3812    let text = "let a = \"atoms/cmp\"";
 3813    buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
 3814    let completions = project.update(cx, |project, cx| {
 3815        project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
 3816    });
 3817
 3818    fake_server
 3819        .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
 3820            Ok(Some(lsp::CompletionResponse::Array(vec![
 3821                lsp::CompletionItem {
 3822                    label: "component".into(),
 3823                    ..Default::default()
 3824                },
 3825            ])))
 3826        })
 3827        .next()
 3828        .await;
 3829    let completions = completions
 3830        .await
 3831        .unwrap()
 3832        .into_iter()
 3833        .flat_map(|response| response.completions)
 3834        .collect::<Vec<_>>();
 3835    let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
 3836    assert_eq!(completions.len(), 1);
 3837    assert_eq!(completions[0].new_text, "component");
 3838    assert_eq!(
 3839        completions[0].replace_range.to_offset(&snapshot),
 3840        text.len() - 4..text.len() - 1
 3841    );
 3842}
 3843
 3844#[gpui::test]
 3845async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
 3846    init_test(cx);
 3847
 3848    let fs = FakeFs::new(cx.executor());
 3849    fs.insert_tree(
 3850        path!("/dir"),
 3851        json!({
 3852            "a.ts": "",
 3853        }),
 3854    )
 3855    .await;
 3856
 3857    let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
 3858
 3859    let language_registry = project.read_with(cx, |project, _| project.languages().clone());
 3860    language_registry.add(typescript_lang());
 3861    let mut fake_language_servers = language_registry.register_fake_lsp(
 3862        "TypeScript",
 3863        FakeLspAdapter {
 3864            capabilities: lsp::ServerCapabilities {
 3865                completion_provider: Some(lsp::CompletionOptions {
 3866                    trigger_characters: Some(vec![":".to_string()]),
 3867                    ..Default::default()
 3868                }),
 3869                ..Default::default()
 3870            },
 3871            ..Default::default()
 3872        },
 3873    );
 3874
 3875    let (buffer, _handle) = project
 3876        .update(cx, |p, cx| {
 3877            p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
 3878        })
 3879        .await
 3880        .unwrap();
 3881
 3882    let fake_server = fake_language_servers.next().await.unwrap();
 3883
 3884    let text = "let a = b.fqn";
 3885    buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
 3886    let completions = project.update(cx, |project, cx| {
 3887        project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
 3888    });
 3889
 3890    fake_server
 3891        .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
 3892            Ok(Some(lsp::CompletionResponse::Array(vec![
 3893                lsp::CompletionItem {
 3894                    label: "fullyQualifiedName?".into(),
 3895                    insert_text: Some("fully\rQualified\r\nName".into()),
 3896                    ..Default::default()
 3897                },
 3898            ])))
 3899        })
 3900        .next()
 3901        .await;
 3902    let completions = completions
 3903        .await
 3904        .unwrap()
 3905        .into_iter()
 3906        .flat_map(|response| response.completions)
 3907        .collect::<Vec<_>>();
 3908    assert_eq!(completions.len(), 1);
 3909    assert_eq!(completions[0].new_text, "fully\nQualified\nName");
 3910}
 3911
 3912#[gpui::test(iterations = 10)]
 3913async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
 3914    init_test(cx);
 3915
 3916    let fs = FakeFs::new(cx.executor());
 3917    fs.insert_tree(
 3918        path!("/dir"),
 3919        json!({
 3920            "a.ts": "a",
 3921        }),
 3922    )
 3923    .await;
 3924
 3925    let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
 3926
 3927    let language_registry = project.read_with(cx, |project, _| project.languages().clone());
 3928    language_registry.add(typescript_lang());
 3929    let mut fake_language_servers = language_registry.register_fake_lsp(
 3930        "TypeScript",
 3931        FakeLspAdapter {
 3932            capabilities: lsp::ServerCapabilities {
 3933                code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
 3934                    lsp::CodeActionOptions {
 3935                        resolve_provider: Some(true),
 3936                        ..lsp::CodeActionOptions::default()
 3937                    },
 3938                )),
 3939                execute_command_provider: Some(lsp::ExecuteCommandOptions {
 3940                    commands: vec!["_the/command".to_string()],
 3941                    ..lsp::ExecuteCommandOptions::default()
 3942                }),
 3943                ..lsp::ServerCapabilities::default()
 3944            },
 3945            ..FakeLspAdapter::default()
 3946        },
 3947    );
 3948
 3949    let (buffer, _handle) = project
 3950        .update(cx, |p, cx| {
 3951            p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
 3952        })
 3953        .await
 3954        .unwrap();
 3955
 3956    let fake_server = fake_language_servers.next().await.unwrap();
 3957
 3958    // Language server returns code actions that contain commands, and not edits.
 3959    let actions = project.update(cx, |project, cx| {
 3960        project.code_actions(&buffer, 0..0, None, cx)
 3961    });
 3962    fake_server
 3963        .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
 3964            Ok(Some(vec![
 3965                lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
 3966                    title: "The code action".into(),
 3967                    data: Some(serde_json::json!({
 3968                        "command": "_the/command",
 3969                    })),
 3970                    ..lsp::CodeAction::default()
 3971                }),
 3972                lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
 3973                    title: "two".into(),
 3974                    ..lsp::CodeAction::default()
 3975                }),
 3976            ]))
 3977        })
 3978        .next()
 3979        .await;
 3980
 3981    let action = actions.await.unwrap().unwrap()[0].clone();
 3982    let apply = project.update(cx, |project, cx| {
 3983        project.apply_code_action(buffer.clone(), action, true, cx)
 3984    });
 3985
 3986    // Resolving the code action does not populate its edits. In absence of
 3987    // edits, we must execute the given command.
 3988    fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
 3989        |mut action, _| async move {
 3990            if action.data.is_some() {
 3991                action.command = Some(lsp::Command {
 3992                    title: "The command".into(),
 3993                    command: "_the/command".into(),
 3994                    arguments: Some(vec![json!("the-argument")]),
 3995                });
 3996            }
 3997            Ok(action)
 3998        },
 3999    );
 4000
 4001    // While executing the command, the language server sends the editor
 4002    // a `workspaceEdit` request.
 4003    fake_server
 4004        .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
 4005            let fake = fake_server.clone();
 4006            move |params, _| {
 4007                assert_eq!(params.command, "_the/command");
 4008                let fake = fake.clone();
 4009                async move {
 4010                    fake.server
 4011                        .request::<lsp::request::ApplyWorkspaceEdit>(
 4012                            lsp::ApplyWorkspaceEditParams {
 4013                                label: None,
 4014                                edit: lsp::WorkspaceEdit {
 4015                                    changes: Some(
 4016                                        [(
 4017                                            lsp::Uri::from_file_path(path!("/dir/a.ts")).unwrap(),
 4018                                            vec![lsp::TextEdit {
 4019                                                range: lsp::Range::new(
 4020                                                    lsp::Position::new(0, 0),
 4021                                                    lsp::Position::new(0, 0),
 4022                                                ),
 4023                                                new_text: "X".into(),
 4024                                            }],
 4025                                        )]
 4026                                        .into_iter()
 4027                                        .collect(),
 4028                                    ),
 4029                                    ..Default::default()
 4030                                },
 4031                            },
 4032                        )
 4033                        .await
 4034                        .into_response()
 4035                        .unwrap();
 4036                    Ok(Some(json!(null)))
 4037                }
 4038            }
 4039        })
 4040        .next()
 4041        .await;
 4042
 4043    // Applying the code action returns a project transaction containing the edits
 4044    // sent by the language server in its `workspaceEdit` request.
 4045    let transaction = apply.await.unwrap();
 4046    assert!(transaction.0.contains_key(&buffer));
 4047    buffer.update(cx, |buffer, cx| {
 4048        assert_eq!(buffer.text(), "Xa");
 4049        buffer.undo(cx);
 4050        assert_eq!(buffer.text(), "a");
 4051    });
 4052}
 4053
 4054#[gpui::test]
 4055async fn test_rename_file_to_new_directory(cx: &mut gpui::TestAppContext) {
 4056    init_test(cx);
 4057    let fs = FakeFs::new(cx.background_executor.clone());
 4058    let expected_contents = "content";
 4059    fs.as_fake()
 4060        .insert_tree(
 4061            "/root",
 4062            json!({
 4063                "test.txt": expected_contents
 4064            }),
 4065        )
 4066        .await;
 4067
 4068    let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
 4069
 4070    let (worktree, entry_id) = project.read_with(cx, |project, cx| {
 4071        let worktree = project.worktrees(cx).next().unwrap();
 4072        let entry_id = worktree
 4073            .read(cx)
 4074            .entry_for_path(rel_path("test.txt"))
 4075            .unwrap()
 4076            .id;
 4077        (worktree, entry_id)
 4078    });
 4079    let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
 4080    let _result = project
 4081        .update(cx, |project, cx| {
 4082            project.rename_entry(
 4083                entry_id,
 4084                (worktree_id, rel_path("dir1/dir2/dir3/test.txt")).into(),
 4085                cx,
 4086            )
 4087        })
 4088        .await
 4089        .unwrap();
 4090    worktree.read_with(cx, |worktree, _| {
 4091        assert!(
 4092            worktree.entry_for_path(rel_path("test.txt")).is_none(),
 4093            "Old file should have been removed"
 4094        );
 4095        assert!(
 4096            worktree
 4097                .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
 4098                .is_some(),
 4099            "Whole directory hierarchy and the new file should have been created"
 4100        );
 4101    });
 4102    assert_eq!(
 4103        worktree
 4104            .update(cx, |worktree, cx| {
 4105                worktree.load_file(rel_path("dir1/dir2/dir3/test.txt"), cx)
 4106            })
 4107            .await
 4108            .unwrap()
 4109            .text,
 4110        expected_contents,
 4111        "Moved file's contents should be preserved"
 4112    );
 4113
 4114    let entry_id = worktree.read_with(cx, |worktree, _| {
 4115        worktree
 4116            .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
 4117            .unwrap()
 4118            .id
 4119    });
 4120
 4121    let _result = project
 4122        .update(cx, |project, cx| {
 4123            project.rename_entry(
 4124                entry_id,
 4125                (worktree_id, rel_path("dir1/dir2/test.txt")).into(),
 4126                cx,
 4127            )
 4128        })
 4129        .await
 4130        .unwrap();
 4131    worktree.read_with(cx, |worktree, _| {
 4132        assert!(
 4133            worktree.entry_for_path(rel_path("test.txt")).is_none(),
 4134            "First file should not reappear"
 4135        );
 4136        assert!(
 4137            worktree
 4138                .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
 4139                .is_none(),
 4140            "Old file should have been removed"
 4141        );
 4142        assert!(
 4143            worktree
 4144                .entry_for_path(rel_path("dir1/dir2/test.txt"))
 4145                .is_some(),
 4146            "No error should have occurred after moving into existing directory"
 4147        );
 4148    });
 4149    assert_eq!(
 4150        worktree
 4151            .update(cx, |worktree, cx| {
 4152                worktree.load_file(rel_path("dir1/dir2/test.txt"), cx)
 4153            })
 4154            .await
 4155            .unwrap()
 4156            .text,
 4157        expected_contents,
 4158        "Moved file's contents should be preserved"
 4159    );
 4160}
 4161
 4162#[gpui::test(iterations = 10)]
 4163async fn test_save_file(cx: &mut gpui::TestAppContext) {
 4164    init_test(cx);
 4165
 4166    let fs = FakeFs::new(cx.executor());
 4167    fs.insert_tree(
 4168        path!("/dir"),
 4169        json!({
 4170            "file1": "the old contents",
 4171        }),
 4172    )
 4173    .await;
 4174
 4175    let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
 4176    let buffer = project
 4177        .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
 4178        .await
 4179        .unwrap();
 4180    buffer.update(cx, |buffer, cx| {
 4181        assert_eq!(buffer.text(), "the old contents");
 4182        buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
 4183    });
 4184
 4185    project
 4186        .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
 4187        .await
 4188        .unwrap();
 4189
 4190    let new_text = fs
 4191        .load(Path::new(path!("/dir/file1")))
 4192        .await
 4193        .unwrap()
 4194        .replace("\r\n", "\n");
 4195    assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
 4196}
 4197
 4198#[gpui::test(iterations = 10)]
 4199async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
 4200    // Issue: #24349
 4201    init_test(cx);
 4202
 4203    let fs = FakeFs::new(cx.executor());
 4204    fs.insert_tree(path!("/dir"), json!({})).await;
 4205
 4206    let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
 4207    let language_registry = project.read_with(cx, |project, _| project.languages().clone());
 4208
 4209    language_registry.add(rust_lang());
 4210    let mut fake_rust_servers = language_registry.register_fake_lsp(
 4211        "Rust",
 4212        FakeLspAdapter {
 4213            name: "the-rust-language-server",
 4214            capabilities: lsp::ServerCapabilities {
 4215                completion_provider: Some(lsp::CompletionOptions {
 4216                    trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
 4217                    ..Default::default()
 4218                }),
 4219                text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
 4220                    lsp::TextDocumentSyncOptions {
 4221                        save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
 4222                        ..Default::default()
 4223                    },
 4224                )),
 4225                ..Default::default()
 4226            },
 4227            ..Default::default()
 4228        },
 4229    );
 4230
 4231    let buffer = project
 4232        .update(cx, |this, cx| this.create_buffer(false, cx))
 4233        .unwrap()
 4234        .await;
 4235    project.update(cx, |this, cx| {
 4236        this.register_buffer_with_language_servers(&buffer, cx);
 4237        buffer.update(cx, |buffer, cx| {
 4238            assert!(!this.has_language_servers_for(buffer, cx));
 4239        })
 4240    });
 4241
 4242    project
 4243        .update(cx, |this, cx| {
 4244            let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
 4245            this.save_buffer_as(
 4246                buffer.clone(),
 4247                ProjectPath {
 4248                    worktree_id,
 4249                    path: rel_path("file.rs").into(),
 4250                },
 4251                cx,
 4252            )
 4253        })
 4254        .await
 4255        .unwrap();
 4256    // A server is started up, and it is notified about Rust files.
 4257    let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
 4258    assert_eq!(
 4259        fake_rust_server
 4260            .receive_notification::<lsp::notification::DidOpenTextDocument>()
 4261            .await
 4262            .text_document,
 4263        lsp::TextDocumentItem {
 4264            uri: lsp::Uri::from_file_path(path!("/dir/file.rs")).unwrap(),
 4265            version: 0,
 4266            text: "".to_string(),
 4267            language_id: "rust".to_string(),
 4268        }
 4269    );
 4270
 4271    project.update(cx, |this, cx| {
 4272        buffer.update(cx, |buffer, cx| {
 4273            assert!(this.has_language_servers_for(buffer, cx));
 4274        })
 4275    });
 4276}
 4277
 4278#[gpui::test(iterations = 30)]
 4279async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
 4280    init_test(cx);
 4281
 4282    let fs = FakeFs::new(cx.executor());
 4283    fs.insert_tree(
 4284        path!("/dir"),
 4285        json!({
 4286            "file1": "the original contents",
 4287        }),
 4288    )
 4289    .await;
 4290
 4291    let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
 4292    let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
 4293    let buffer = project
 4294        .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
 4295        .await
 4296        .unwrap();
 4297
 4298    // Simulate buffer diffs being slow, so that they don't complete before
 4299    // the next file change occurs.
 4300    cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
 4301
 4302    // Change the buffer's file on disk, and then wait for the file change
 4303    // to be detected by the worktree, so that the buffer starts reloading.
 4304    fs.save(
 4305        path!("/dir/file1").as_ref(),
 4306        &"the first contents".into(),
 4307        Default::default(),
 4308    )
 4309    .await
 4310    .unwrap();
 4311    worktree.next_event(cx).await;
 4312
 4313    // Change the buffer's file again. Depending on the random seed, the
 4314    // previous file change may still be in progress.
 4315    fs.save(
 4316        path!("/dir/file1").as_ref(),
 4317        &"the second contents".into(),
 4318        Default::default(),
 4319    )
 4320    .await
 4321    .unwrap();
 4322    worktree.next_event(cx).await;
 4323
 4324    cx.executor().run_until_parked();
 4325    let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
 4326    buffer.read_with(cx, |buffer, _| {
 4327        assert_eq!(buffer.text(), on_disk_text);
 4328        assert!(!buffer.is_dirty(), "buffer should not be dirty");
 4329        assert!(!buffer.has_conflict(), "buffer should not be dirty");
 4330    });
 4331}
 4332
 4333#[gpui::test(iterations = 30)]
 4334async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
 4335    init_test(cx);
 4336
 4337    let fs = FakeFs::new(cx.executor());
 4338    fs.insert_tree(
 4339        path!("/dir"),
 4340        json!({
 4341            "file1": "the original contents",
 4342        }),
 4343    )
 4344    .await;
 4345
 4346    let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
 4347    let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
 4348    let buffer = project
 4349        .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
 4350        .await
 4351        .unwrap();
 4352
 4353    // Simulate buffer diffs being slow, so that they don't complete before
 4354    // the next file change occurs.
 4355    cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
 4356
 4357    // Change the buffer's file on disk, and then wait for the file change
 4358    // to be detected by the worktree, so that the buffer starts reloading.
 4359    fs.save(
 4360        path!("/dir/file1").as_ref(),
 4361        &"the first contents".into(),
 4362        Default::default(),
 4363    )
 4364    .await
 4365    .unwrap();
 4366    worktree.next_event(cx).await;
 4367
 4368    cx.executor()
 4369        .spawn(cx.executor().simulate_random_delay())
 4370        .await;
 4371
 4372    // Perform a noop edit, causing the buffer's version to increase.
 4373    buffer.update(cx, |buffer, cx| {
 4374        buffer.edit([(0..0, " ")], None, cx);
 4375        buffer.undo(cx);
 4376    });
 4377
 4378    cx.executor().run_until_parked();
 4379    let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
 4380    buffer.read_with(cx, |buffer, _| {
 4381        let buffer_text = buffer.text();
 4382        if buffer_text == on_disk_text {
 4383            assert!(
 4384                !buffer.is_dirty() && !buffer.has_conflict(),
 4385                "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
 4386            );
 4387        }
 4388        // If the file change occurred while the buffer was processing the first
 4389        // change, the buffer will be in a conflicting state.
 4390        else {
 4391            assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
 4392            assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
 4393        }
 4394    });
 4395}
 4396
 4397#[gpui::test]
 4398async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
 4399    init_test(cx);
 4400
 4401    let fs = FakeFs::new(cx.executor());
 4402    fs.insert_tree(
 4403        path!("/dir"),
 4404        json!({
 4405            "file1": "the old contents",
 4406        }),
 4407    )
 4408    .await;
 4409
 4410    let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
 4411    let buffer = project
 4412        .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
 4413        .await
 4414        .unwrap();
 4415    buffer.update(cx, |buffer, cx| {
 4416        buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
 4417    });
 4418
 4419    project
 4420        .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
 4421        .await
 4422        .unwrap();
 4423
 4424    let new_text = fs
 4425        .load(Path::new(path!("/dir/file1")))
 4426        .await
 4427        .unwrap()
 4428        .replace("\r\n", "\n");
 4429    assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
 4430}
 4431
 4432#[gpui::test]
 4433async fn test_save_as(cx: &mut gpui::TestAppContext) {
 4434    init_test(cx);
 4435
 4436    let fs = FakeFs::new(cx.executor());
 4437    fs.insert_tree("/dir", json!({})).await;
 4438
 4439    let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
 4440
 4441    let languages = project.update(cx, |project, _| project.languages().clone());
 4442    languages.add(rust_lang());
 4443
 4444    let buffer = project.update(cx, |project, cx| {
 4445        project.create_local_buffer("", None, false, cx)
 4446    });
 4447    buffer.update(cx, |buffer, cx| {
 4448        buffer.edit([(0..0, "abc")], None, cx);
 4449        assert!(buffer.is_dirty());
 4450        assert!(!buffer.has_conflict());
 4451        assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
 4452    });
 4453    project
 4454        .update(cx, |project, cx| {
 4455            let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
 4456            let path = ProjectPath {
 4457                worktree_id,
 4458                path: rel_path("file1.rs").into(),
 4459            };
 4460            project.save_buffer_as(buffer.clone(), path, cx)
 4461        })
 4462        .await
 4463        .unwrap();
 4464    assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
 4465
 4466    cx.executor().run_until_parked();
 4467    buffer.update(cx, |buffer, cx| {
 4468        assert_eq!(
 4469            buffer.file().unwrap().full_path(cx),
 4470            Path::new("dir/file1.rs")
 4471        );
 4472        assert!(!buffer.is_dirty());
 4473        assert!(!buffer.has_conflict());
 4474        assert_eq!(buffer.language().unwrap().name(), "Rust".into());
 4475    });
 4476
 4477    let opened_buffer = project
 4478        .update(cx, |project, cx| {
 4479            project.open_local_buffer("/dir/file1.rs", cx)
 4480        })
 4481        .await
 4482        .unwrap();
 4483    assert_eq!(opened_buffer, buffer);
 4484}
 4485
 4486#[gpui::test]
 4487async fn test_save_as_existing_file(cx: &mut gpui::TestAppContext) {
 4488    init_test(cx);
 4489
 4490    let fs = FakeFs::new(cx.executor());
 4491    let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
 4492
 4493    fs.insert_tree(
 4494        path!("/dir"),
 4495        json!({
 4496                "data_a.txt": "data about a"
 4497        }),
 4498    )
 4499    .await;
 4500
 4501    let buffer = project
 4502        .update(cx, |project, cx| {
 4503            project.open_local_buffer(path!("/dir/data_a.txt"), cx)
 4504        })
 4505        .await
 4506        .unwrap();
 4507
 4508    buffer.update(cx, |buffer, cx| {
 4509        buffer.edit([(11..12, "b")], None, cx);
 4510    });
 4511
 4512    // Save buffer's contents as a new file and confirm that the buffer's now
 4513    // associated with `data_b.txt` instead of `data_a.txt`, confirming that the
 4514    // file associated with the buffer has now been updated to `data_b.txt`
 4515    project
 4516        .update(cx, |project, cx| {
 4517            let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
 4518            let new_path = ProjectPath {
 4519                worktree_id,
 4520                path: rel_path("data_b.txt").into(),
 4521            };
 4522
 4523            project.save_buffer_as(buffer.clone(), new_path, cx)
 4524        })
 4525        .await
 4526        .unwrap();
 4527
 4528    buffer.update(cx, |buffer, cx| {
 4529        assert_eq!(
 4530            buffer.file().unwrap().full_path(cx),
 4531            Path::new("dir/data_b.txt")
 4532        )
 4533    });
 4534
 4535    // Open the original `data_a.txt` file, confirming that its contents are
 4536    // unchanged and the resulting buffer's associated file is `data_a.txt`.
 4537    let original_buffer = project
 4538        .update(cx, |project, cx| {
 4539            project.open_local_buffer(path!("/dir/data_a.txt"), cx)
 4540        })
 4541        .await
 4542        .unwrap();
 4543
 4544    original_buffer.update(cx, |buffer, cx| {
 4545        assert_eq!(buffer.text(), "data about a");
 4546        assert_eq!(
 4547            buffer.file().unwrap().full_path(cx),
 4548            Path::new("dir/data_a.txt")
 4549        )
 4550    });
 4551}
 4552
 4553#[gpui::test(retries = 5)]
 4554async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
 4555    use worktree::WorktreeModelHandle as _;
 4556
 4557    init_test(cx);
 4558    cx.executor().allow_parking();
 4559
 4560    let dir = TempTree::new(json!({
 4561        "a": {
 4562            "file1": "",
 4563            "file2": "",
 4564            "file3": "",
 4565        },
 4566        "b": {
 4567            "c": {
 4568                "file4": "",
 4569                "file5": "",
 4570            }
 4571        }
 4572    }));
 4573
 4574    let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
 4575
 4576    let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
 4577        let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
 4578        async move { buffer.await.unwrap() }
 4579    };
 4580    let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
 4581        project.update(cx, |project, cx| {
 4582            let tree = project.worktrees(cx).next().unwrap();
 4583            tree.read(cx)
 4584                .entry_for_path(rel_path(path))
 4585                .unwrap_or_else(|| panic!("no entry for path {}", path))
 4586                .id
 4587        })
 4588    };
 4589
 4590    let buffer2 = buffer_for_path("a/file2", cx).await;
 4591    let buffer3 = buffer_for_path("a/file3", cx).await;
 4592    let buffer4 = buffer_for_path("b/c/file4", cx).await;
 4593    let buffer5 = buffer_for_path("b/c/file5", cx).await;
 4594
 4595    let file2_id = id_for_path("a/file2", cx);
 4596    let file3_id = id_for_path("a/file3", cx);
 4597    let file4_id = id_for_path("b/c/file4", cx);
 4598
 4599    // Create a remote copy of this worktree.
 4600    let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
 4601    let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
 4602
 4603    let updates = Arc::new(Mutex::new(Vec::new()));
 4604    tree.update(cx, |tree, cx| {
 4605        let updates = updates.clone();
 4606        tree.observe_updates(0, cx, move |update| {
 4607            updates.lock().push(update);
 4608            async { true }
 4609        });
 4610    });
 4611
 4612    let remote = cx.update(|cx| {
 4613        Worktree::remote(
 4614            0,
 4615            ReplicaId::REMOTE_SERVER,
 4616            metadata,
 4617            project.read(cx).client().into(),
 4618            project.read(cx).path_style(cx),
 4619            cx,
 4620        )
 4621    });
 4622
 4623    cx.executor().run_until_parked();
 4624
 4625    cx.update(|cx| {
 4626        assert!(!buffer2.read(cx).is_dirty());
 4627        assert!(!buffer3.read(cx).is_dirty());
 4628        assert!(!buffer4.read(cx).is_dirty());
 4629        assert!(!buffer5.read(cx).is_dirty());
 4630    });
 4631
 4632    // Rename and delete files and directories.
 4633    tree.flush_fs_events(cx).await;
 4634    std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
 4635    std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
 4636    std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
 4637    std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
 4638    tree.flush_fs_events(cx).await;
 4639
 4640    cx.update(|app| {
 4641        assert_eq!(
 4642            tree.read(app).paths().collect::<Vec<_>>(),
 4643            vec![
 4644                rel_path("a"),
 4645                rel_path("a/file1"),
 4646                rel_path("a/file2.new"),
 4647                rel_path("b"),
 4648                rel_path("d"),
 4649                rel_path("d/file3"),
 4650                rel_path("d/file4"),
 4651            ]
 4652        );
 4653    });
 4654
 4655    assert_eq!(id_for_path("a/file2.new", cx), file2_id);
 4656    assert_eq!(id_for_path("d/file3", cx), file3_id);
 4657    assert_eq!(id_for_path("d/file4", cx), file4_id);
 4658
 4659    cx.update(|cx| {
 4660        assert_eq!(
 4661            buffer2.read(cx).file().unwrap().path().as_ref(),
 4662            rel_path("a/file2.new")
 4663        );
 4664        assert_eq!(
 4665            buffer3.read(cx).file().unwrap().path().as_ref(),
 4666            rel_path("d/file3")
 4667        );
 4668        assert_eq!(
 4669            buffer4.read(cx).file().unwrap().path().as_ref(),
 4670            rel_path("d/file4")
 4671        );
 4672        assert_eq!(
 4673            buffer5.read(cx).file().unwrap().path().as_ref(),
 4674            rel_path("b/c/file5")
 4675        );
 4676
 4677        assert_matches!(
 4678            buffer2.read(cx).file().unwrap().disk_state(),
 4679            DiskState::Present { .. }
 4680        );
 4681        assert_matches!(
 4682            buffer3.read(cx).file().unwrap().disk_state(),
 4683            DiskState::Present { .. }
 4684        );
 4685        assert_matches!(
 4686            buffer4.read(cx).file().unwrap().disk_state(),
 4687            DiskState::Present { .. }
 4688        );
 4689        assert_eq!(
 4690            buffer5.read(cx).file().unwrap().disk_state(),
 4691            DiskState::Deleted
 4692        );
 4693    });
 4694
 4695    // Update the remote worktree. Check that it becomes consistent with the
 4696    // local worktree.
 4697    cx.executor().run_until_parked();
 4698
 4699    remote.update(cx, |remote, _| {
 4700        for update in updates.lock().drain(..) {
 4701            remote.as_remote_mut().unwrap().update_from_remote(update);
 4702        }
 4703    });
 4704    cx.executor().run_until_parked();
 4705    remote.update(cx, |remote, _| {
 4706        assert_eq!(
 4707            remote.paths().collect::<Vec<_>>(),
 4708            vec![
 4709                rel_path("a"),
 4710                rel_path("a/file1"),
 4711                rel_path("a/file2.new"),
 4712                rel_path("b"),
 4713                rel_path("d"),
 4714                rel_path("d/file3"),
 4715                rel_path("d/file4"),
 4716            ]
 4717        );
 4718    });
 4719}
 4720
 4721#[gpui::test(iterations = 10)]
 4722async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
 4723    init_test(cx);
 4724
 4725    let fs = FakeFs::new(cx.executor());
 4726    fs.insert_tree(
 4727        path!("/dir"),
 4728        json!({
 4729            "a": {
 4730                "file1": "",
 4731            }
 4732        }),
 4733    )
 4734    .await;
 4735
 4736    let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
 4737    let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
 4738    let tree_id = tree.update(cx, |tree, _| tree.id());
 4739
 4740    let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
 4741        project.update(cx, |project, cx| {
 4742            let tree = project.worktrees(cx).next().unwrap();
 4743            tree.read(cx)
 4744                .entry_for_path(rel_path(path))
 4745                .unwrap_or_else(|| panic!("no entry for path {}", path))
 4746                .id
 4747        })
 4748    };
 4749
 4750    let dir_id = id_for_path("a", cx);
 4751    let file_id = id_for_path("a/file1", cx);
 4752    let buffer = project
 4753        .update(cx, |p, cx| {
 4754            p.open_buffer((tree_id, rel_path("a/file1")), cx)
 4755        })
 4756        .await
 4757        .unwrap();
 4758    buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
 4759
 4760    project
 4761        .update(cx, |project, cx| {
 4762            project.rename_entry(dir_id, (tree_id, rel_path("b")).into(), cx)
 4763        })
 4764        .unwrap()
 4765        .await
 4766        .into_included()
 4767        .unwrap();
 4768    cx.executor().run_until_parked();
 4769
 4770    assert_eq!(id_for_path("b", cx), dir_id);
 4771    assert_eq!(id_for_path("b/file1", cx), file_id);
 4772    buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
 4773}
 4774
 4775#[gpui::test]
 4776async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
 4777    init_test(cx);
 4778
 4779    let fs = FakeFs::new(cx.executor());
 4780    fs.insert_tree(
 4781        "/dir",
 4782        json!({
 4783            "a.txt": "a-contents",
 4784            "b.txt": "b-contents",
 4785        }),
 4786    )
 4787    .await;
 4788
 4789    let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
 4790
 4791    // Spawn multiple tasks to open paths, repeating some paths.
 4792    let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
 4793        (
 4794            p.open_local_buffer("/dir/a.txt", cx),
 4795            p.open_local_buffer("/dir/b.txt", cx),
 4796            p.open_local_buffer("/dir/a.txt", cx),
 4797        )
 4798    });
 4799
 4800    let buffer_a_1 = buffer_a_1.await.unwrap();
 4801    let buffer_a_2 = buffer_a_2.await.unwrap();
 4802    let buffer_b = buffer_b.await.unwrap();
 4803    assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
 4804    assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
 4805
 4806    // There is only one buffer per path.
 4807    let buffer_a_id = buffer_a_1.entity_id();
 4808    assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
 4809
 4810    // Open the same path again while it is still open.
 4811    drop(buffer_a_1);
 4812    let buffer_a_3 = project
 4813        .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
 4814        .await
 4815        .unwrap();
 4816
 4817    // There's still only one buffer per path.
 4818    assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
 4819}
 4820
 4821#[gpui::test]
 4822async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
 4823    init_test(cx);
 4824
 4825    let fs = FakeFs::new(cx.executor());
 4826    fs.insert_tree(
 4827        path!("/dir"),
 4828        json!({
 4829            "file1": "abc",
 4830            "file2": "def",
 4831            "file3": "ghi",
 4832        }),
 4833    )
 4834    .await;
 4835
 4836    let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
 4837
 4838    let buffer1 = project
 4839        .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
 4840        .await
 4841        .unwrap();
 4842    let events = Arc::new(Mutex::new(Vec::new()));
 4843
 4844    // initially, the buffer isn't dirty.
 4845    buffer1.update(cx, |buffer, cx| {
 4846        cx.subscribe(&buffer1, {
 4847            let events = events.clone();
 4848            move |_, _, event, _| match event {
 4849                BufferEvent::Operation { .. } => {}
 4850                _ => events.lock().push(event.clone()),
 4851            }
 4852        })
 4853        .detach();
 4854
 4855        assert!(!buffer.is_dirty());
 4856        assert!(events.lock().is_empty());
 4857
 4858        buffer.edit([(1..2, "")], None, cx);
 4859    });
 4860
 4861    // after the first edit, the buffer is dirty, and emits a dirtied event.
 4862    buffer1.update(cx, |buffer, cx| {
 4863        assert!(buffer.text() == "ac");
 4864        assert!(buffer.is_dirty());
 4865        assert_eq!(
 4866            *events.lock(),
 4867            &[
 4868                language::BufferEvent::Edited,
 4869                language::BufferEvent::DirtyChanged
 4870            ]
 4871        );
 4872        events.lock().clear();
 4873        buffer.did_save(
 4874            buffer.version(),
 4875            buffer.file().unwrap().disk_state().mtime(),
 4876            cx,
 4877        );
 4878    });
 4879
 4880    // after saving, the buffer is not dirty, and emits a saved event.
 4881    buffer1.update(cx, |buffer, cx| {
 4882        assert!(!buffer.is_dirty());
 4883        assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
 4884        events.lock().clear();
 4885
 4886        buffer.edit([(1..1, "B")], None, cx);
 4887        buffer.edit([(2..2, "D")], None, cx);
 4888    });
 4889
 4890    // after editing again, the buffer is dirty, and emits another dirty event.
 4891    buffer1.update(cx, |buffer, cx| {
 4892        assert!(buffer.text() == "aBDc");
 4893        assert!(buffer.is_dirty());
 4894        assert_eq!(
 4895            *events.lock(),
 4896            &[
 4897                language::BufferEvent::Edited,
 4898                language::BufferEvent::DirtyChanged,
 4899                language::BufferEvent::Edited,
 4900            ],
 4901        );
 4902        events.lock().clear();
 4903
 4904        // After restoring the buffer to its previously-saved state,
 4905        // the buffer is not considered dirty anymore.
 4906        buffer.edit([(1..3, "")], None, cx);
 4907        assert!(buffer.text() == "ac");
 4908        assert!(!buffer.is_dirty());
 4909    });
 4910
 4911    assert_eq!(
 4912        *events.lock(),
 4913        &[
 4914            language::BufferEvent::Edited,
 4915            language::BufferEvent::DirtyChanged
 4916        ]
 4917    );
 4918
 4919    // When a file is deleted, it is not considered dirty.
 4920    let events = Arc::new(Mutex::new(Vec::new()));
 4921    let buffer2 = project
 4922        .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
 4923        .await
 4924        .unwrap();
 4925    buffer2.update(cx, |_, cx| {
 4926        cx.subscribe(&buffer2, {
 4927            let events = events.clone();
 4928            move |_, _, event, _| match event {
 4929                BufferEvent::Operation { .. } => {}
 4930                _ => events.lock().push(event.clone()),
 4931            }
 4932        })
 4933        .detach();
 4934    });
 4935
 4936    fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
 4937        .await
 4938        .unwrap();
 4939    cx.executor().run_until_parked();
 4940    buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
 4941    assert_eq!(
 4942        mem::take(&mut *events.lock()),
 4943        &[language::BufferEvent::FileHandleChanged]
 4944    );
 4945
 4946    // Buffer becomes dirty when edited.
 4947    buffer2.update(cx, |buffer, cx| {
 4948        buffer.edit([(2..3, "")], None, cx);
 4949        assert_eq!(buffer.is_dirty(), true);
 4950    });
 4951    assert_eq!(
 4952        mem::take(&mut *events.lock()),
 4953        &[
 4954            language::BufferEvent::Edited,
 4955            language::BufferEvent::DirtyChanged
 4956        ]
 4957    );
 4958
 4959    // Buffer becomes clean again when all of its content is removed, because
 4960    // the file was deleted.
 4961    buffer2.update(cx, |buffer, cx| {
 4962        buffer.edit([(0..2, "")], None, cx);
 4963        assert_eq!(buffer.is_empty(), true);
 4964        assert_eq!(buffer.is_dirty(), false);
 4965    });
 4966    assert_eq!(
 4967        *events.lock(),
 4968        &[
 4969            language::BufferEvent::Edited,
 4970            language::BufferEvent::DirtyChanged
 4971        ]
 4972    );
 4973
 4974    // When a file is already dirty when deleted, we don't emit a Dirtied event.
 4975    let events = Arc::new(Mutex::new(Vec::new()));
 4976    let buffer3 = project
 4977        .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
 4978        .await
 4979        .unwrap();
 4980    buffer3.update(cx, |_, cx| {
 4981        cx.subscribe(&buffer3, {
 4982            let events = events.clone();
 4983            move |_, _, event, _| match event {
 4984                BufferEvent::Operation { .. } => {}
 4985                _ => events.lock().push(event.clone()),
 4986            }
 4987        })
 4988        .detach();
 4989    });
 4990
 4991    buffer3.update(cx, |buffer, cx| {
 4992        buffer.edit([(0..0, "x")], None, cx);
 4993    });
 4994    events.lock().clear();
 4995    fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
 4996        .await
 4997        .unwrap();
 4998    cx.executor().run_until_parked();
 4999    assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
 5000    cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
 5001}
 5002
 5003#[gpui::test]
 5004async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
 5005    init_test(cx);
 5006
 5007    let (initial_contents, initial_offsets) =
 5008        marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
 5009    let fs = FakeFs::new(cx.executor());
 5010    fs.insert_tree(
 5011        path!("/dir"),
 5012        json!({
 5013            "the-file": initial_contents,
 5014        }),
 5015    )
 5016    .await;
 5017    let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
 5018    let buffer = project
 5019        .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
 5020        .await
 5021        .unwrap();
 5022
 5023    let anchors = initial_offsets
 5024        .iter()
 5025        .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
 5026        .collect::<Vec<_>>();
 5027
 5028    // Change the file on disk, adding two new lines of text, and removing
 5029    // one line.
 5030    buffer.update(cx, |buffer, _| {
 5031        assert!(!buffer.is_dirty());
 5032        assert!(!buffer.has_conflict());
 5033    });
 5034
 5035    let (new_contents, new_offsets) =
 5036        marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
 5037    fs.save(
 5038        path!("/dir/the-file").as_ref(),
 5039        &new_contents.as_str().into(),
 5040        LineEnding::Unix,
 5041    )
 5042    .await
 5043    .unwrap();
 5044
 5045    // Because the buffer was not modified, it is reloaded from disk. Its
 5046    // contents are edited according to the diff between the old and new
 5047    // file contents.
 5048    cx.executor().run_until_parked();
 5049    buffer.update(cx, |buffer, _| {
 5050        assert_eq!(buffer.text(), new_contents);
 5051        assert!(!buffer.is_dirty());
 5052        assert!(!buffer.has_conflict());
 5053
 5054        let anchor_offsets = anchors
 5055            .iter()
 5056            .map(|anchor| anchor.to_offset(&*buffer))
 5057            .collect::<Vec<_>>();
 5058        assert_eq!(anchor_offsets, new_offsets);
 5059    });
 5060
 5061    // Modify the buffer
 5062    buffer.update(cx, |buffer, cx| {
 5063        buffer.edit([(0..0, " ")], None, cx);
 5064        assert!(buffer.is_dirty());
 5065        assert!(!buffer.has_conflict());
 5066    });
 5067
 5068    // Change the file on disk again, adding blank lines to the beginning.
 5069    fs.save(
 5070        path!("/dir/the-file").as_ref(),
 5071        &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
 5072        LineEnding::Unix,
 5073    )
 5074    .await
 5075    .unwrap();
 5076
 5077    // Because the buffer is modified, it doesn't reload from disk, but is
 5078    // marked as having a conflict.
 5079    cx.executor().run_until_parked();
 5080    buffer.update(cx, |buffer, _| {
 5081        assert_eq!(buffer.text(), " ".to_string() + &new_contents);
 5082        assert!(buffer.has_conflict());
 5083    });
 5084}
 5085
 5086#[gpui::test]
 5087async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
 5088    init_test(cx);
 5089
 5090    let fs = FakeFs::new(cx.executor());
 5091    fs.insert_tree(
 5092        path!("/dir"),
 5093        json!({
 5094            "file1": "a\nb\nc\n",
 5095            "file2": "one\r\ntwo\r\nthree\r\n",
 5096        }),
 5097    )
 5098    .await;
 5099
 5100    let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
 5101    let buffer1 = project
 5102        .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
 5103        .await
 5104        .unwrap();
 5105    let buffer2 = project
 5106        .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
 5107        .await
 5108        .unwrap();
 5109
 5110    buffer1.update(cx, |buffer, _| {
 5111        assert_eq!(buffer.text(), "a\nb\nc\n");
 5112        assert_eq!(buffer.line_ending(), LineEnding::Unix);
 5113    });
 5114    buffer2.update(cx, |buffer, _| {
 5115        assert_eq!(buffer.text(), "one\ntwo\nthree\n");
 5116        assert_eq!(buffer.line_ending(), LineEnding::Windows);
 5117    });
 5118
 5119    // Change a file's line endings on disk from unix to windows. The buffer's
 5120    // state updates correctly.
 5121    fs.save(
 5122        path!("/dir/file1").as_ref(),
 5123        &"aaa\nb\nc\n".into(),
 5124        LineEnding::Windows,
 5125    )
 5126    .await
 5127    .unwrap();
 5128    cx.executor().run_until_parked();
 5129    buffer1.update(cx, |buffer, _| {
 5130        assert_eq!(buffer.text(), "aaa\nb\nc\n");
 5131        assert_eq!(buffer.line_ending(), LineEnding::Windows);
 5132    });
 5133
 5134    // Save a file with windows line endings. The file is written correctly.
 5135    buffer2.update(cx, |buffer, cx| {
 5136        buffer.set_text("one\ntwo\nthree\nfour\n", cx);
 5137    });
 5138    project
 5139        .update(cx, |project, cx| project.save_buffer(buffer2, cx))
 5140        .await
 5141        .unwrap();
 5142    assert_eq!(
 5143        fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
 5144        "one\r\ntwo\r\nthree\r\nfour\r\n",
 5145    );
 5146}
 5147
 5148#[gpui::test]
 5149async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
 5150    init_test(cx);
 5151
 5152    let fs = FakeFs::new(cx.executor());
 5153    fs.insert_tree(
 5154        path!("/dir"),
 5155        json!({
 5156            "a.rs": "
 5157                fn foo(mut v: Vec<usize>) {
 5158                    for x in &v {
 5159                        v.push(1);
 5160                    }
 5161                }
 5162            "
 5163            .unindent(),
 5164        }),
 5165    )
 5166    .await;
 5167
 5168    let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
 5169    let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
 5170    let buffer = project
 5171        .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
 5172        .await
 5173        .unwrap();
 5174
 5175    let buffer_uri = Uri::from_file_path(path!("/dir/a.rs")).unwrap();
 5176    let message = lsp::PublishDiagnosticsParams {
 5177        uri: buffer_uri.clone(),
 5178        diagnostics: vec![
 5179            lsp::Diagnostic {
 5180                range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
 5181                severity: Some(DiagnosticSeverity::WARNING),
 5182                message: "error 1".to_string(),
 5183                related_information: Some(vec![lsp::DiagnosticRelatedInformation {
 5184                    location: lsp::Location {
 5185                        uri: buffer_uri.clone(),
 5186                        range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
 5187                    },
 5188                    message: "error 1 hint 1".to_string(),
 5189                }]),
 5190                ..Default::default()
 5191            },
 5192            lsp::Diagnostic {
 5193                range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
 5194                severity: Some(DiagnosticSeverity::HINT),
 5195                message: "error 1 hint 1".to_string(),
 5196                related_information: Some(vec![lsp::DiagnosticRelatedInformation {
 5197                    location: lsp::Location {
 5198                        uri: buffer_uri.clone(),
 5199                        range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
 5200                    },
 5201                    message: "original diagnostic".to_string(),
 5202                }]),
 5203                ..Default::default()
 5204            },
 5205            lsp::Diagnostic {
 5206                range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
 5207                severity: Some(DiagnosticSeverity::ERROR),
 5208                message: "error 2".to_string(),
 5209                related_information: Some(vec![
 5210                    lsp::DiagnosticRelatedInformation {
 5211                        location: lsp::Location {
 5212                            uri: buffer_uri.clone(),
 5213                            range: lsp::Range::new(
 5214                                lsp::Position::new(1, 13),
 5215                                lsp::Position::new(1, 15),
 5216                            ),
 5217                        },
 5218                        message: "error 2 hint 1".to_string(),
 5219                    },
 5220                    lsp::DiagnosticRelatedInformation {
 5221                        location: lsp::Location {
 5222                            uri: buffer_uri.clone(),
 5223                            range: lsp::Range::new(
 5224                                lsp::Position::new(1, 13),
 5225                                lsp::Position::new(1, 15),
 5226                            ),
 5227                        },
 5228                        message: "error 2 hint 2".to_string(),
 5229                    },
 5230                ]),
 5231                ..Default::default()
 5232            },
 5233            lsp::Diagnostic {
 5234                range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
 5235                severity: Some(DiagnosticSeverity::HINT),
 5236                message: "error 2 hint 1".to_string(),
 5237                related_information: Some(vec![lsp::DiagnosticRelatedInformation {
 5238                    location: lsp::Location {
 5239                        uri: buffer_uri.clone(),
 5240                        range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
 5241                    },
 5242                    message: "original diagnostic".to_string(),
 5243                }]),
 5244                ..Default::default()
 5245            },
 5246            lsp::Diagnostic {
 5247                range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
 5248                severity: Some(DiagnosticSeverity::HINT),
 5249                message: "error 2 hint 2".to_string(),
 5250                related_information: Some(vec![lsp::DiagnosticRelatedInformation {
 5251                    location: lsp::Location {
 5252                        uri: buffer_uri,
 5253                        range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
 5254                    },
 5255                    message: "original diagnostic".to_string(),
 5256                }]),
 5257                ..Default::default()
 5258            },
 5259        ],
 5260        version: None,
 5261    };
 5262
 5263    lsp_store
 5264        .update(cx, |lsp_store, cx| {
 5265            lsp_store.update_diagnostics(
 5266                LanguageServerId(0),
 5267                message,
 5268                None,
 5269                DiagnosticSourceKind::Pushed,
 5270                &[],
 5271                cx,
 5272            )
 5273        })
 5274        .unwrap();
 5275    let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
 5276
 5277    assert_eq!(
 5278        buffer
 5279            .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
 5280            .collect::<Vec<_>>(),
 5281        &[
 5282            DiagnosticEntry {
 5283                range: Point::new(1, 8)..Point::new(1, 9),
 5284                diagnostic: Diagnostic {
 5285                    severity: DiagnosticSeverity::WARNING,
 5286                    message: "error 1".to_string(),
 5287                    group_id: 1,
 5288                    is_primary: true,
 5289                    source_kind: DiagnosticSourceKind::Pushed,
 5290                    ..Diagnostic::default()
 5291                }
 5292            },
 5293            DiagnosticEntry {
 5294                range: Point::new(1, 8)..Point::new(1, 9),
 5295                diagnostic: Diagnostic {
 5296                    severity: DiagnosticSeverity::HINT,
 5297                    message: "error 1 hint 1".to_string(),
 5298                    group_id: 1,
 5299                    is_primary: false,
 5300                    source_kind: DiagnosticSourceKind::Pushed,
 5301                    ..Diagnostic::default()
 5302                }
 5303            },
 5304            DiagnosticEntry {
 5305                range: Point::new(1, 13)..Point::new(1, 15),
 5306                diagnostic: Diagnostic {
 5307                    severity: DiagnosticSeverity::HINT,
 5308                    message: "error 2 hint 1".to_string(),
 5309                    group_id: 0,
 5310                    is_primary: false,
 5311                    source_kind: DiagnosticSourceKind::Pushed,
 5312                    ..Diagnostic::default()
 5313                }
 5314            },
 5315            DiagnosticEntry {
 5316                range: Point::new(1, 13)..Point::new(1, 15),
 5317                diagnostic: Diagnostic {
 5318                    severity: DiagnosticSeverity::HINT,
 5319                    message: "error 2 hint 2".to_string(),
 5320                    group_id: 0,
 5321                    is_primary: false,
 5322                    source_kind: DiagnosticSourceKind::Pushed,
 5323                    ..Diagnostic::default()
 5324                }
 5325            },
 5326            DiagnosticEntry {
 5327                range: Point::new(2, 8)..Point::new(2, 17),
 5328                diagnostic: Diagnostic {
 5329                    severity: DiagnosticSeverity::ERROR,
 5330                    message: "error 2".to_string(),
 5331                    group_id: 0,
 5332                    is_primary: true,
 5333                    source_kind: DiagnosticSourceKind::Pushed,
 5334                    ..Diagnostic::default()
 5335                }
 5336            }
 5337        ]
 5338    );
 5339
 5340    assert_eq!(
 5341        buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
 5342        &[
 5343            DiagnosticEntry {
 5344                range: Point::new(1, 13)..Point::new(1, 15),
 5345                diagnostic: Diagnostic {
 5346                    severity: DiagnosticSeverity::HINT,
 5347                    message: "error 2 hint 1".to_string(),
 5348                    group_id: 0,
 5349                    is_primary: false,
 5350                    source_kind: DiagnosticSourceKind::Pushed,
 5351                    ..Diagnostic::default()
 5352                }
 5353            },
 5354            DiagnosticEntry {
 5355                range: Point::new(1, 13)..Point::new(1, 15),
 5356                diagnostic: Diagnostic {
 5357                    severity: DiagnosticSeverity::HINT,
 5358                    message: "error 2 hint 2".to_string(),
 5359                    group_id: 0,
 5360                    is_primary: false,
 5361                    source_kind: DiagnosticSourceKind::Pushed,
 5362                    ..Diagnostic::default()
 5363                }
 5364            },
 5365            DiagnosticEntry {
 5366                range: Point::new(2, 8)..Point::new(2, 17),
 5367                diagnostic: Diagnostic {
 5368                    severity: DiagnosticSeverity::ERROR,
 5369                    message: "error 2".to_string(),
 5370                    group_id: 0,
 5371                    is_primary: true,
 5372                    source_kind: DiagnosticSourceKind::Pushed,
 5373                    ..Diagnostic::default()
 5374                }
 5375            }
 5376        ]
 5377    );
 5378
 5379    assert_eq!(
 5380        buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
 5381        &[
 5382            DiagnosticEntry {
 5383                range: Point::new(1, 8)..Point::new(1, 9),
 5384                diagnostic: Diagnostic {
 5385                    severity: DiagnosticSeverity::WARNING,
 5386                    message: "error 1".to_string(),
 5387                    group_id: 1,
 5388                    is_primary: true,
 5389                    source_kind: DiagnosticSourceKind::Pushed,
 5390                    ..Diagnostic::default()
 5391                }
 5392            },
 5393            DiagnosticEntry {
 5394                range: Point::new(1, 8)..Point::new(1, 9),
 5395                diagnostic: Diagnostic {
 5396                    severity: DiagnosticSeverity::HINT,
 5397                    message: "error 1 hint 1".to_string(),
 5398                    group_id: 1,
 5399                    is_primary: false,
 5400                    source_kind: DiagnosticSourceKind::Pushed,
 5401                    ..Diagnostic::default()
 5402                }
 5403            },
 5404        ]
 5405    );
 5406}
 5407
 5408#[gpui::test]
 5409async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
 5410    init_test(cx);
 5411
 5412    let fs = FakeFs::new(cx.executor());
 5413    fs.insert_tree(
 5414        path!("/dir"),
 5415        json!({
 5416            "one.rs": "const ONE: usize = 1;",
 5417            "two": {
 5418                "two.rs": "const TWO: usize = one::ONE + one::ONE;"
 5419            }
 5420
 5421        }),
 5422    )
 5423    .await;
 5424    let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
 5425
 5426    let language_registry = project.read_with(cx, |project, _| project.languages().clone());
 5427    language_registry.add(rust_lang());
 5428    let watched_paths = lsp::FileOperationRegistrationOptions {
 5429        filters: vec![
 5430            FileOperationFilter {
 5431                scheme: Some("file".to_owned()),
 5432                pattern: lsp::FileOperationPattern {
 5433                    glob: "**/*.rs".to_owned(),
 5434                    matches: Some(lsp::FileOperationPatternKind::File),
 5435                    options: None,
 5436                },
 5437            },
 5438            FileOperationFilter {
 5439                scheme: Some("file".to_owned()),
 5440                pattern: lsp::FileOperationPattern {
 5441                    glob: "**/**".to_owned(),
 5442                    matches: Some(lsp::FileOperationPatternKind::Folder),
 5443                    options: None,
 5444                },
 5445            },
 5446        ],
 5447    };
 5448    let mut fake_servers = language_registry.register_fake_lsp(
 5449        "Rust",
 5450        FakeLspAdapter {
 5451            capabilities: lsp::ServerCapabilities {
 5452                workspace: Some(lsp::WorkspaceServerCapabilities {
 5453                    workspace_folders: None,
 5454                    file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
 5455                        did_rename: Some(watched_paths.clone()),
 5456                        will_rename: Some(watched_paths),
 5457                        ..Default::default()
 5458                    }),
 5459                }),
 5460                ..Default::default()
 5461            },
 5462            ..Default::default()
 5463        },
 5464    );
 5465
 5466    let _ = project
 5467        .update(cx, |project, cx| {
 5468            project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
 5469        })
 5470        .await
 5471        .unwrap();
 5472
 5473    let fake_server = fake_servers.next().await.unwrap();
 5474    let response = project.update(cx, |project, cx| {
 5475        let worktree = project.worktrees(cx).next().unwrap();
 5476        let entry = worktree
 5477            .read(cx)
 5478            .entry_for_path(rel_path("one.rs"))
 5479            .unwrap();
 5480        project.rename_entry(
 5481            entry.id,
 5482            (worktree.read(cx).id(), rel_path("three.rs")).into(),
 5483            cx,
 5484        )
 5485    });
 5486    let expected_edit = lsp::WorkspaceEdit {
 5487        changes: None,
 5488        document_changes: Some(DocumentChanges::Edits({
 5489            vec![TextDocumentEdit {
 5490                edits: vec![lsp::Edit::Plain(lsp::TextEdit {
 5491                    range: lsp::Range {
 5492                        start: lsp::Position {
 5493                            line: 0,
 5494                            character: 1,
 5495                        },
 5496                        end: lsp::Position {
 5497                            line: 0,
 5498                            character: 3,
 5499                        },
 5500                    },
 5501                    new_text: "This is not a drill".to_owned(),
 5502                })],
 5503                text_document: lsp::OptionalVersionedTextDocumentIdentifier {
 5504                    uri: Uri::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
 5505                    version: Some(1337),
 5506                },
 5507            }]
 5508        })),
 5509        change_annotations: None,
 5510    };
 5511    let resolved_workspace_edit = Arc::new(OnceLock::new());
 5512    fake_server
 5513        .set_request_handler::<WillRenameFiles, _, _>({
 5514            let resolved_workspace_edit = resolved_workspace_edit.clone();
 5515            let expected_edit = expected_edit.clone();
 5516            move |params, _| {
 5517                let resolved_workspace_edit = resolved_workspace_edit.clone();
 5518                let expected_edit = expected_edit.clone();
 5519                async move {
 5520                    assert_eq!(params.files.len(), 1);
 5521                    assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
 5522                    assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
 5523                    resolved_workspace_edit.set(expected_edit.clone()).unwrap();
 5524                    Ok(Some(expected_edit))
 5525                }
 5526            }
 5527        })
 5528        .next()
 5529        .await
 5530        .unwrap();
 5531    let _ = response.await.unwrap();
 5532    fake_server
 5533        .handle_notification::<DidRenameFiles, _>(|params, _| {
 5534            assert_eq!(params.files.len(), 1);
 5535            assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
 5536            assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
 5537        })
 5538        .next()
 5539        .await
 5540        .unwrap();
 5541    assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
 5542}
 5543
 5544#[gpui::test]
 5545async fn test_rename(cx: &mut gpui::TestAppContext) {
 5546    // hi
 5547    init_test(cx);
 5548
 5549    let fs = FakeFs::new(cx.executor());
 5550    fs.insert_tree(
 5551        path!("/dir"),
 5552        json!({
 5553            "one.rs": "const ONE: usize = 1;",
 5554            "two.rs": "const TWO: usize = one::ONE + one::ONE;"
 5555        }),
 5556    )
 5557    .await;
 5558
 5559    let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
 5560
 5561    let language_registry = project.read_with(cx, |project, _| project.languages().clone());
 5562    language_registry.add(rust_lang());
 5563    let mut fake_servers = language_registry.register_fake_lsp(
 5564        "Rust",
 5565        FakeLspAdapter {
 5566            capabilities: lsp::ServerCapabilities {
 5567                rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
 5568                    prepare_provider: Some(true),
 5569                    work_done_progress_options: Default::default(),
 5570                })),
 5571                ..Default::default()
 5572            },
 5573            ..Default::default()
 5574        },
 5575    );
 5576
 5577    let (buffer, _handle) = project
 5578        .update(cx, |project, cx| {
 5579            project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
 5580        })
 5581        .await
 5582        .unwrap();
 5583
 5584    let fake_server = fake_servers.next().await.unwrap();
 5585
 5586    let response = project.update(cx, |project, cx| {
 5587        project.prepare_rename(buffer.clone(), 7, cx)
 5588    });
 5589    fake_server
 5590        .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
 5591            assert_eq!(
 5592                params.text_document.uri.as_str(),
 5593                uri!("file:///dir/one.rs")
 5594            );
 5595            assert_eq!(params.position, lsp::Position::new(0, 7));
 5596            Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
 5597                lsp::Position::new(0, 6),
 5598                lsp::Position::new(0, 9),
 5599            ))))
 5600        })
 5601        .next()
 5602        .await
 5603        .unwrap();
 5604    let response = response.await.unwrap();
 5605    let PrepareRenameResponse::Success(range) = response else {
 5606        panic!("{:?}", response);
 5607    };
 5608    let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
 5609    assert_eq!(range, 6..9);
 5610
 5611    let response = project.update(cx, |project, cx| {
 5612        project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
 5613    });
 5614    fake_server
 5615        .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
 5616            assert_eq!(
 5617                params.text_document_position.text_document.uri.as_str(),
 5618                uri!("file:///dir/one.rs")
 5619            );
 5620            assert_eq!(
 5621                params.text_document_position.position,
 5622                lsp::Position::new(0, 7)
 5623            );
 5624            assert_eq!(params.new_name, "THREE");
 5625            Ok(Some(lsp::WorkspaceEdit {
 5626                changes: Some(
 5627                    [
 5628                        (
 5629                            lsp::Uri::from_file_path(path!("/dir/one.rs")).unwrap(),
 5630                            vec![lsp::TextEdit::new(
 5631                                lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
 5632                                "THREE".to_string(),
 5633                            )],
 5634                        ),
 5635                        (
 5636                            lsp::Uri::from_file_path(path!("/dir/two.rs")).unwrap(),
 5637                            vec![
 5638                                lsp::TextEdit::new(
 5639                                    lsp::Range::new(
 5640                                        lsp::Position::new(0, 24),
 5641                                        lsp::Position::new(0, 27),
 5642                                    ),
 5643                                    "THREE".to_string(),
 5644                                ),
 5645                                lsp::TextEdit::new(
 5646                                    lsp::Range::new(
 5647                                        lsp::Position::new(0, 35),
 5648                                        lsp::Position::new(0, 38),
 5649                                    ),
 5650                                    "THREE".to_string(),
 5651                                ),
 5652                            ],
 5653                        ),
 5654                    ]
 5655                    .into_iter()
 5656                    .collect(),
 5657                ),
 5658                ..Default::default()
 5659            }))
 5660        })
 5661        .next()
 5662        .await
 5663        .unwrap();
 5664    let mut transaction = response.await.unwrap().0;
 5665    assert_eq!(transaction.len(), 2);
 5666    assert_eq!(
 5667        transaction
 5668            .remove_entry(&buffer)
 5669            .unwrap()
 5670            .0
 5671            .update(cx, |buffer, _| buffer.text()),
 5672        "const THREE: usize = 1;"
 5673    );
 5674    assert_eq!(
 5675        transaction
 5676            .into_keys()
 5677            .next()
 5678            .unwrap()
 5679            .update(cx, |buffer, _| buffer.text()),
 5680        "const TWO: usize = one::THREE + one::THREE;"
 5681    );
 5682}
 5683
 5684#[gpui::test]
 5685async fn test_search(cx: &mut gpui::TestAppContext) {
 5686    init_test(cx);
 5687
 5688    let fs = FakeFs::new(cx.executor());
 5689    fs.insert_tree(
 5690        path!("/dir"),
 5691        json!({
 5692            "one.rs": "const ONE: usize = 1;",
 5693            "two.rs": "const TWO: usize = one::ONE + one::ONE;",
 5694            "three.rs": "const THREE: usize = one::ONE + two::TWO;",
 5695            "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
 5696        }),
 5697    )
 5698    .await;
 5699    let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
 5700    assert_eq!(
 5701        search(
 5702            &project,
 5703            SearchQuery::text(
 5704                "TWO",
 5705                false,
 5706                true,
 5707                false,
 5708                Default::default(),
 5709                Default::default(),
 5710                false,
 5711                None
 5712            )
 5713            .unwrap(),
 5714            cx
 5715        )
 5716        .await
 5717        .unwrap(),
 5718        HashMap::from_iter([
 5719            (path!("dir/two.rs").to_string(), vec![6..9]),
 5720            (path!("dir/three.rs").to_string(), vec![37..40])
 5721        ])
 5722    );
 5723
 5724    let buffer_4 = project
 5725        .update(cx, |project, cx| {
 5726            project.open_local_buffer(path!("/dir/four.rs"), cx)
 5727        })
 5728        .await
 5729        .unwrap();
 5730    buffer_4.update(cx, |buffer, cx| {
 5731        let text = "two::TWO";
 5732        buffer.edit([(20..28, text), (31..43, text)], None, cx);
 5733    });
 5734
 5735    assert_eq!(
 5736        search(
 5737            &project,
 5738            SearchQuery::text(
 5739                "TWO",
 5740                false,
 5741                true,
 5742                false,
 5743                Default::default(),
 5744                Default::default(),
 5745                false,
 5746                None,
 5747            )
 5748            .unwrap(),
 5749            cx
 5750        )
 5751        .await
 5752        .unwrap(),
 5753        HashMap::from_iter([
 5754            (path!("dir/two.rs").to_string(), vec![6..9]),
 5755            (path!("dir/three.rs").to_string(), vec![37..40]),
 5756            (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
 5757        ])
 5758    );
 5759}
 5760
 5761#[gpui::test]
 5762async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
 5763    init_test(cx);
 5764
 5765    let search_query = "file";
 5766
 5767    let fs = FakeFs::new(cx.executor());
 5768    fs.insert_tree(
 5769        path!("/dir"),
 5770        json!({
 5771            "one.rs": r#"// Rust file one"#,
 5772            "one.ts": r#"// TypeScript file one"#,
 5773            "two.rs": r#"// Rust file two"#,
 5774            "two.ts": r#"// TypeScript file two"#,
 5775        }),
 5776    )
 5777    .await;
 5778    let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
 5779
 5780    assert!(
 5781        search(
 5782            &project,
 5783            SearchQuery::text(
 5784                search_query,
 5785                false,
 5786                true,
 5787                false,
 5788                PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
 5789                Default::default(),
 5790                false,
 5791                None
 5792            )
 5793            .unwrap(),
 5794            cx
 5795        )
 5796        .await
 5797        .unwrap()
 5798        .is_empty(),
 5799        "If no inclusions match, no files should be returned"
 5800    );
 5801
 5802    assert_eq!(
 5803        search(
 5804            &project,
 5805            SearchQuery::text(
 5806                search_query,
 5807                false,
 5808                true,
 5809                false,
 5810                PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
 5811                Default::default(),
 5812                false,
 5813                None
 5814            )
 5815            .unwrap(),
 5816            cx
 5817        )
 5818        .await
 5819        .unwrap(),
 5820        HashMap::from_iter([
 5821            (path!("dir/one.rs").to_string(), vec![8..12]),
 5822            (path!("dir/two.rs").to_string(), vec![8..12]),
 5823        ]),
 5824        "Rust only search should give only Rust files"
 5825    );
 5826
 5827    assert_eq!(
 5828        search(
 5829            &project,
 5830            SearchQuery::text(
 5831                search_query,
 5832                false,
 5833                true,
 5834                false,
 5835                PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
 5836                    .unwrap(),
 5837                Default::default(),
 5838                false,
 5839                None,
 5840            )
 5841            .unwrap(),
 5842            cx
 5843        )
 5844        .await
 5845        .unwrap(),
 5846        HashMap::from_iter([
 5847            (path!("dir/one.ts").to_string(), vec![14..18]),
 5848            (path!("dir/two.ts").to_string(), vec![14..18]),
 5849        ]),
 5850        "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
 5851    );
 5852
 5853    assert_eq!(
 5854        search(
 5855            &project,
 5856            SearchQuery::text(
 5857                search_query,
 5858                false,
 5859                true,
 5860                false,
 5861                PathMatcher::new(
 5862                    &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
 5863                    PathStyle::local()
 5864                )
 5865                .unwrap(),
 5866                Default::default(),
 5867                false,
 5868                None,
 5869            )
 5870            .unwrap(),
 5871            cx
 5872        )
 5873        .await
 5874        .unwrap(),
 5875        HashMap::from_iter([
 5876            (path!("dir/two.ts").to_string(), vec![14..18]),
 5877            (path!("dir/one.rs").to_string(), vec![8..12]),
 5878            (path!("dir/one.ts").to_string(), vec![14..18]),
 5879            (path!("dir/two.rs").to_string(), vec![8..12]),
 5880        ]),
 5881        "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
 5882    );
 5883}
 5884
 5885#[gpui::test]
 5886async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
 5887    init_test(cx);
 5888
 5889    let search_query = "file";
 5890
 5891    let fs = FakeFs::new(cx.executor());
 5892    fs.insert_tree(
 5893        path!("/dir"),
 5894        json!({
 5895            "one.rs": r#"// Rust file one"#,
 5896            "one.ts": r#"// TypeScript file one"#,
 5897            "two.rs": r#"// Rust file two"#,
 5898            "two.ts": r#"// TypeScript file two"#,
 5899        }),
 5900    )
 5901    .await;
 5902    let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
 5903
 5904    assert_eq!(
 5905        search(
 5906            &project,
 5907            SearchQuery::text(
 5908                search_query,
 5909                false,
 5910                true,
 5911                false,
 5912                Default::default(),
 5913                PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
 5914                false,
 5915                None,
 5916            )
 5917            .unwrap(),
 5918            cx
 5919        )
 5920        .await
 5921        .unwrap(),
 5922        HashMap::from_iter([
 5923            (path!("dir/one.rs").to_string(), vec![8..12]),
 5924            (path!("dir/one.ts").to_string(), vec![14..18]),
 5925            (path!("dir/two.rs").to_string(), vec![8..12]),
 5926            (path!("dir/two.ts").to_string(), vec![14..18]),
 5927        ]),
 5928        "If no exclusions match, all files should be returned"
 5929    );
 5930
 5931    assert_eq!(
 5932        search(
 5933            &project,
 5934            SearchQuery::text(
 5935                search_query,
 5936                false,
 5937                true,
 5938                false,
 5939                Default::default(),
 5940                PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
 5941                false,
 5942                None,
 5943            )
 5944            .unwrap(),
 5945            cx
 5946        )
 5947        .await
 5948        .unwrap(),
 5949        HashMap::from_iter([
 5950            (path!("dir/one.ts").to_string(), vec![14..18]),
 5951            (path!("dir/two.ts").to_string(), vec![14..18]),
 5952        ]),
 5953        "Rust exclusion search should give only TypeScript files"
 5954    );
 5955
 5956    assert_eq!(
 5957        search(
 5958            &project,
 5959            SearchQuery::text(
 5960                search_query,
 5961                false,
 5962                true,
 5963                false,
 5964                Default::default(),
 5965                PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
 5966                    .unwrap(),
 5967                false,
 5968                None,
 5969            )
 5970            .unwrap(),
 5971            cx
 5972        )
 5973        .await
 5974        .unwrap(),
 5975        HashMap::from_iter([
 5976            (path!("dir/one.rs").to_string(), vec![8..12]),
 5977            (path!("dir/two.rs").to_string(), vec![8..12]),
 5978        ]),
 5979        "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
 5980    );
 5981
 5982    assert!(
 5983        search(
 5984            &project,
 5985            SearchQuery::text(
 5986                search_query,
 5987                false,
 5988                true,
 5989                false,
 5990                Default::default(),
 5991                PathMatcher::new(
 5992                    &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
 5993                    PathStyle::local(),
 5994                )
 5995                .unwrap(),
 5996                false,
 5997                None,
 5998            )
 5999            .unwrap(),
 6000            cx
 6001        )
 6002        .await
 6003        .unwrap()
 6004        .is_empty(),
 6005        "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
 6006    );
 6007}
 6008
 6009#[gpui::test]
 6010async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
 6011    init_test(cx);
 6012
 6013    let search_query = "file";
 6014
 6015    let fs = FakeFs::new(cx.executor());
 6016    fs.insert_tree(
 6017        path!("/dir"),
 6018        json!({
 6019            "one.rs": r#"// Rust file one"#,
 6020            "one.ts": r#"// TypeScript file one"#,
 6021            "two.rs": r#"// Rust file two"#,
 6022            "two.ts": r#"// TypeScript file two"#,
 6023        }),
 6024    )
 6025    .await;
 6026
 6027    let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
 6028    let path_style = PathStyle::local();
 6029    let _buffer = project.update(cx, |project, cx| {
 6030        project.create_local_buffer("file", None, false, cx)
 6031    });
 6032
 6033    assert_eq!(
 6034        search(
 6035            &project,
 6036            SearchQuery::text(
 6037                search_query,
 6038                false,
 6039                true,
 6040                false,
 6041                Default::default(),
 6042                PathMatcher::new(&["*.odd".to_owned()], path_style).unwrap(),
 6043                false,
 6044                None,
 6045            )
 6046            .unwrap(),
 6047            cx
 6048        )
 6049        .await
 6050        .unwrap(),
 6051        HashMap::from_iter([
 6052            (path!("dir/one.rs").to_string(), vec![8..12]),
 6053            (path!("dir/one.ts").to_string(), vec![14..18]),
 6054            (path!("dir/two.rs").to_string(), vec![8..12]),
 6055            (path!("dir/two.ts").to_string(), vec![14..18]),
 6056        ]),
 6057        "If no exclusions match, all files should be returned"
 6058    );
 6059
 6060    assert_eq!(
 6061        search(
 6062            &project,
 6063            SearchQuery::text(
 6064                search_query,
 6065                false,
 6066                true,
 6067                false,
 6068                Default::default(),
 6069                PathMatcher::new(&["*.rs".to_owned()], path_style).unwrap(),
 6070                false,
 6071                None,
 6072            )
 6073            .unwrap(),
 6074            cx
 6075        )
 6076        .await
 6077        .unwrap(),
 6078        HashMap::from_iter([
 6079            (path!("dir/one.ts").to_string(), vec![14..18]),
 6080            (path!("dir/two.ts").to_string(), vec![14..18]),
 6081        ]),
 6082        "Rust exclusion search should give only TypeScript files"
 6083    );
 6084
 6085    assert_eq!(
 6086        search(
 6087            &project,
 6088            SearchQuery::text(
 6089                search_query,
 6090                false,
 6091                true,
 6092                false,
 6093                Default::default(),
 6094                PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], path_style).unwrap(),
 6095                false,
 6096                None,
 6097            )
 6098            .unwrap(),
 6099            cx
 6100        )
 6101        .await
 6102        .unwrap(),
 6103        HashMap::from_iter([
 6104            (path!("dir/one.rs").to_string(), vec![8..12]),
 6105            (path!("dir/two.rs").to_string(), vec![8..12]),
 6106        ]),
 6107        "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
 6108    );
 6109
 6110    assert!(
 6111        search(
 6112            &project,
 6113            SearchQuery::text(
 6114                search_query,
 6115                false,
 6116                true,
 6117                false,
 6118                Default::default(),
 6119                PathMatcher::new(
 6120                    &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
 6121                    PathStyle::local(),
 6122                )
 6123                .unwrap(),
 6124                false,
 6125                None,
 6126            )
 6127            .unwrap(),
 6128            cx
 6129        )
 6130        .await
 6131        .unwrap()
 6132        .is_empty(),
 6133        "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
 6134    );
 6135}
 6136
 6137#[gpui::test]
 6138async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
 6139    init_test(cx);
 6140
 6141    let search_query = "file";
 6142
 6143    let fs = FakeFs::new(cx.executor());
 6144    fs.insert_tree(
 6145        path!("/dir"),
 6146        json!({
 6147            "one.rs": r#"// Rust file one"#,
 6148            "one.ts": r#"// TypeScript file one"#,
 6149            "two.rs": r#"// Rust file two"#,
 6150            "two.ts": r#"// TypeScript file two"#,
 6151        }),
 6152    )
 6153    .await;
 6154    let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
 6155    assert!(
 6156        search(
 6157            &project,
 6158            SearchQuery::text(
 6159                search_query,
 6160                false,
 6161                true,
 6162                false,
 6163                PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
 6164                PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
 6165                false,
 6166                None,
 6167            )
 6168            .unwrap(),
 6169            cx
 6170        )
 6171        .await
 6172        .unwrap()
 6173        .is_empty(),
 6174        "If both no exclusions and inclusions match, exclusions should win and return nothing"
 6175    );
 6176
 6177    assert!(
 6178        search(
 6179            &project,
 6180            SearchQuery::text(
 6181                search_query,
 6182                false,
 6183                true,
 6184                false,
 6185                PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
 6186                PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
 6187                false,
 6188                None,
 6189            )
 6190            .unwrap(),
 6191            cx
 6192        )
 6193        .await
 6194        .unwrap()
 6195        .is_empty(),
 6196        "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
 6197    );
 6198
 6199    assert!(
 6200        search(
 6201            &project,
 6202            SearchQuery::text(
 6203                search_query,
 6204                false,
 6205                true,
 6206                false,
 6207                PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
 6208                    .unwrap(),
 6209                PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
 6210                    .unwrap(),
 6211                false,
 6212                None,
 6213            )
 6214            .unwrap(),
 6215            cx
 6216        )
 6217        .await
 6218        .unwrap()
 6219        .is_empty(),
 6220        "Non-matching inclusions and exclusions should not change that."
 6221    );
 6222
 6223    assert_eq!(
 6224        search(
 6225            &project,
 6226            SearchQuery::text(
 6227                search_query,
 6228                false,
 6229                true,
 6230                false,
 6231                PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
 6232                    .unwrap(),
 6233                PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()], PathStyle::local())
 6234                    .unwrap(),
 6235                false,
 6236                None,
 6237            )
 6238            .unwrap(),
 6239            cx
 6240        )
 6241        .await
 6242        .unwrap(),
 6243        HashMap::from_iter([
 6244            (path!("dir/one.ts").to_string(), vec![14..18]),
 6245            (path!("dir/two.ts").to_string(), vec![14..18]),
 6246        ]),
 6247        "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
 6248    );
 6249}
 6250
 6251#[gpui::test]
 6252async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
 6253    init_test(cx);
 6254
 6255    let fs = FakeFs::new(cx.executor());
 6256    fs.insert_tree(
 6257        path!("/worktree-a"),
 6258        json!({
 6259            "haystack.rs": r#"// NEEDLE"#,
 6260            "haystack.ts": r#"// NEEDLE"#,
 6261        }),
 6262    )
 6263    .await;
 6264    fs.insert_tree(
 6265        path!("/worktree-b"),
 6266        json!({
 6267            "haystack.rs": r#"// NEEDLE"#,
 6268            "haystack.ts": r#"// NEEDLE"#,
 6269        }),
 6270    )
 6271    .await;
 6272
 6273    let path_style = PathStyle::local();
 6274    let project = Project::test(
 6275        fs.clone(),
 6276        [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
 6277        cx,
 6278    )
 6279    .await;
 6280
 6281    assert_eq!(
 6282        search(
 6283            &project,
 6284            SearchQuery::text(
 6285                "NEEDLE",
 6286                false,
 6287                true,
 6288                false,
 6289                PathMatcher::new(&["worktree-a/*.rs".to_owned()], path_style).unwrap(),
 6290                Default::default(),
 6291                true,
 6292                None,
 6293            )
 6294            .unwrap(),
 6295            cx
 6296        )
 6297        .await
 6298        .unwrap(),
 6299        HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
 6300        "should only return results from included worktree"
 6301    );
 6302    assert_eq!(
 6303        search(
 6304            &project,
 6305            SearchQuery::text(
 6306                "NEEDLE",
 6307                false,
 6308                true,
 6309                false,
 6310                PathMatcher::new(&["worktree-b/*.rs".to_owned()], path_style).unwrap(),
 6311                Default::default(),
 6312                true,
 6313                None,
 6314            )
 6315            .unwrap(),
 6316            cx
 6317        )
 6318        .await
 6319        .unwrap(),
 6320        HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
 6321        "should only return results from included worktree"
 6322    );
 6323
 6324    assert_eq!(
 6325        search(
 6326            &project,
 6327            SearchQuery::text(
 6328                "NEEDLE",
 6329                false,
 6330                true,
 6331                false,
 6332                PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap(),
 6333                Default::default(),
 6334                false,
 6335                None,
 6336            )
 6337            .unwrap(),
 6338            cx
 6339        )
 6340        .await
 6341        .unwrap(),
 6342        HashMap::from_iter([
 6343            (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
 6344            (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
 6345        ]),
 6346        "should return results from both worktrees"
 6347    );
 6348}
 6349
 6350#[gpui::test]
 6351async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
 6352    init_test(cx);
 6353
 6354    let fs = FakeFs::new(cx.background_executor.clone());
 6355    fs.insert_tree(
 6356        path!("/dir"),
 6357        json!({
 6358            ".git": {},
 6359            ".gitignore": "**/target\n/node_modules\n",
 6360            "target": {
 6361                "index.txt": "index_key:index_value"
 6362            },
 6363            "node_modules": {
 6364                "eslint": {
 6365                    "index.ts": "const eslint_key = 'eslint value'",
 6366                    "package.json": r#"{ "some_key": "some value" }"#,
 6367                },
 6368                "prettier": {
 6369                    "index.ts": "const prettier_key = 'prettier value'",
 6370                    "package.json": r#"{ "other_key": "other value" }"#,
 6371                },
 6372            },
 6373            "package.json": r#"{ "main_key": "main value" }"#,
 6374        }),
 6375    )
 6376    .await;
 6377    let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
 6378
 6379    let query = "key";
 6380    assert_eq!(
 6381        search(
 6382            &project,
 6383            SearchQuery::text(
 6384                query,
 6385                false,
 6386                false,
 6387                false,
 6388                Default::default(),
 6389                Default::default(),
 6390                false,
 6391                None,
 6392            )
 6393            .unwrap(),
 6394            cx
 6395        )
 6396        .await
 6397        .unwrap(),
 6398        HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
 6399        "Only one non-ignored file should have the query"
 6400    );
 6401
 6402    let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
 6403    let path_style = PathStyle::local();
 6404    assert_eq!(
 6405        search(
 6406            &project,
 6407            SearchQuery::text(
 6408                query,
 6409                false,
 6410                false,
 6411                true,
 6412                Default::default(),
 6413                Default::default(),
 6414                false,
 6415                None,
 6416            )
 6417            .unwrap(),
 6418            cx
 6419        )
 6420        .await
 6421        .unwrap(),
 6422        HashMap::from_iter([
 6423            (path!("dir/package.json").to_string(), vec![8..11]),
 6424            (path!("dir/target/index.txt").to_string(), vec![6..9]),
 6425            (
 6426                path!("dir/node_modules/prettier/package.json").to_string(),
 6427                vec![9..12]
 6428            ),
 6429            (
 6430                path!("dir/node_modules/prettier/index.ts").to_string(),
 6431                vec![15..18]
 6432            ),
 6433            (
 6434                path!("dir/node_modules/eslint/index.ts").to_string(),
 6435                vec![13..16]
 6436            ),
 6437            (
 6438                path!("dir/node_modules/eslint/package.json").to_string(),
 6439                vec![8..11]
 6440            ),
 6441        ]),
 6442        "Unrestricted search with ignored directories should find every file with the query"
 6443    );
 6444
 6445    let files_to_include =
 6446        PathMatcher::new(&["node_modules/prettier/**".to_owned()], path_style).unwrap();
 6447    let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap();
 6448    let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
 6449    assert_eq!(
 6450        search(
 6451            &project,
 6452            SearchQuery::text(
 6453                query,
 6454                false,
 6455                false,
 6456                true,
 6457                files_to_include,
 6458                files_to_exclude,
 6459                false,
 6460                None,
 6461            )
 6462            .unwrap(),
 6463            cx
 6464        )
 6465        .await
 6466        .unwrap(),
 6467        HashMap::from_iter([(
 6468            path!("dir/node_modules/prettier/package.json").to_string(),
 6469            vec![9..12]
 6470        )]),
 6471        "With search including ignored prettier directory and excluding TS files, only one file should be found"
 6472    );
 6473}
 6474
 6475#[gpui::test]
 6476async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
 6477    init_test(cx);
 6478
 6479    let fs = FakeFs::new(cx.executor());
 6480    fs.insert_tree(
 6481        path!("/dir"),
 6482        json!({
 6483            "one.rs": "// ПРИВЕТ? привет!",
 6484            "two.rs": "// ПРИВЕТ.",
 6485            "three.rs": "// привет",
 6486        }),
 6487    )
 6488    .await;
 6489    let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
 6490    let unicode_case_sensitive_query = SearchQuery::text(
 6491        "привет",
 6492        false,
 6493        true,
 6494        false,
 6495        Default::default(),
 6496        Default::default(),
 6497        false,
 6498        None,
 6499    );
 6500    assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
 6501    assert_eq!(
 6502        search(&project, unicode_case_sensitive_query.unwrap(), cx)
 6503            .await
 6504            .unwrap(),
 6505        HashMap::from_iter([
 6506            (path!("dir/one.rs").to_string(), vec![17..29]),
 6507            (path!("dir/three.rs").to_string(), vec![3..15]),
 6508        ])
 6509    );
 6510
 6511    let unicode_case_insensitive_query = SearchQuery::text(
 6512        "привет",
 6513        false,
 6514        false,
 6515        false,
 6516        Default::default(),
 6517        Default::default(),
 6518        false,
 6519        None,
 6520    );
 6521    assert_matches!(
 6522        unicode_case_insensitive_query,
 6523        Ok(SearchQuery::Regex { .. })
 6524    );
 6525    assert_eq!(
 6526        search(&project, unicode_case_insensitive_query.unwrap(), cx)
 6527            .await
 6528            .unwrap(),
 6529        HashMap::from_iter([
 6530            (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
 6531            (path!("dir/two.rs").to_string(), vec![3..15]),
 6532            (path!("dir/three.rs").to_string(), vec![3..15]),
 6533        ])
 6534    );
 6535
 6536    assert_eq!(
 6537        search(
 6538            &project,
 6539            SearchQuery::text(
 6540                "привет.",
 6541                false,
 6542                false,
 6543                false,
 6544                Default::default(),
 6545                Default::default(),
 6546                false,
 6547                None,
 6548            )
 6549            .unwrap(),
 6550            cx
 6551        )
 6552        .await
 6553        .unwrap(),
 6554        HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
 6555    );
 6556}
 6557
 6558#[gpui::test]
 6559async fn test_create_entry(cx: &mut gpui::TestAppContext) {
 6560    init_test(cx);
 6561
 6562    let fs = FakeFs::new(cx.executor());
 6563    fs.insert_tree(
 6564        "/one/two",
 6565        json!({
 6566            "three": {
 6567                "a.txt": "",
 6568                "four": {}
 6569            },
 6570            "c.rs": ""
 6571        }),
 6572    )
 6573    .await;
 6574
 6575    let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
 6576    project
 6577        .update(cx, |project, cx| {
 6578            let id = project.worktrees(cx).next().unwrap().read(cx).id();
 6579            project.create_entry((id, rel_path("b..")), true, cx)
 6580        })
 6581        .await
 6582        .unwrap()
 6583        .into_included()
 6584        .unwrap();
 6585
 6586    assert_eq!(
 6587        fs.paths(true),
 6588        vec![
 6589            PathBuf::from(path!("/")),
 6590            PathBuf::from(path!("/one")),
 6591            PathBuf::from(path!("/one/two")),
 6592            PathBuf::from(path!("/one/two/c.rs")),
 6593            PathBuf::from(path!("/one/two/three")),
 6594            PathBuf::from(path!("/one/two/three/a.txt")),
 6595            PathBuf::from(path!("/one/two/three/b..")),
 6596            PathBuf::from(path!("/one/two/three/four")),
 6597        ]
 6598    );
 6599}
 6600
 6601#[gpui::test]
 6602async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
 6603    init_test(cx);
 6604
 6605    let fs = FakeFs::new(cx.executor());
 6606    fs.insert_tree(
 6607        path!("/dir"),
 6608        json!({
 6609            "a.tsx": "a",
 6610        }),
 6611    )
 6612    .await;
 6613
 6614    let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
 6615
 6616    let language_registry = project.read_with(cx, |project, _| project.languages().clone());
 6617    language_registry.add(tsx_lang());
 6618    let language_server_names = [
 6619        "TypeScriptServer",
 6620        "TailwindServer",
 6621        "ESLintServer",
 6622        "NoHoverCapabilitiesServer",
 6623    ];
 6624    let mut language_servers = [
 6625        language_registry.register_fake_lsp(
 6626            "tsx",
 6627            FakeLspAdapter {
 6628                name: language_server_names[0],
 6629                capabilities: lsp::ServerCapabilities {
 6630                    hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
 6631                    ..lsp::ServerCapabilities::default()
 6632                },
 6633                ..FakeLspAdapter::default()
 6634            },
 6635        ),
 6636        language_registry.register_fake_lsp(
 6637            "tsx",
 6638            FakeLspAdapter {
 6639                name: language_server_names[1],
 6640                capabilities: lsp::ServerCapabilities {
 6641                    hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
 6642                    ..lsp::ServerCapabilities::default()
 6643                },
 6644                ..FakeLspAdapter::default()
 6645            },
 6646        ),
 6647        language_registry.register_fake_lsp(
 6648            "tsx",
 6649            FakeLspAdapter {
 6650                name: language_server_names[2],
 6651                capabilities: lsp::ServerCapabilities {
 6652                    hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
 6653                    ..lsp::ServerCapabilities::default()
 6654                },
 6655                ..FakeLspAdapter::default()
 6656            },
 6657        ),
 6658        language_registry.register_fake_lsp(
 6659            "tsx",
 6660            FakeLspAdapter {
 6661                name: language_server_names[3],
 6662                capabilities: lsp::ServerCapabilities {
 6663                    hover_provider: None,
 6664                    ..lsp::ServerCapabilities::default()
 6665                },
 6666                ..FakeLspAdapter::default()
 6667            },
 6668        ),
 6669    ];
 6670
 6671    let (buffer, _handle) = project
 6672        .update(cx, |p, cx| {
 6673            p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
 6674        })
 6675        .await
 6676        .unwrap();
 6677    cx.executor().run_until_parked();
 6678
 6679    let mut servers_with_hover_requests = HashMap::default();
 6680    for i in 0..language_server_names.len() {
 6681        let new_server = language_servers[i].next().await.unwrap_or_else(|| {
 6682            panic!(
 6683                "Failed to get language server #{i} with name {}",
 6684                &language_server_names[i]
 6685            )
 6686        });
 6687        let new_server_name = new_server.server.name();
 6688        assert!(
 6689            !servers_with_hover_requests.contains_key(&new_server_name),
 6690            "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
 6691        );
 6692        match new_server_name.as_ref() {
 6693            "TailwindServer" | "TypeScriptServer" => {
 6694                servers_with_hover_requests.insert(
 6695                    new_server_name.clone(),
 6696                    new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
 6697                        move |_, _| {
 6698                            let name = new_server_name.clone();
 6699                            async move {
 6700                                Ok(Some(lsp::Hover {
 6701                                    contents: lsp::HoverContents::Scalar(
 6702                                        lsp::MarkedString::String(format!("{name} hover")),
 6703                                    ),
 6704                                    range: None,
 6705                                }))
 6706                            }
 6707                        },
 6708                    ),
 6709                );
 6710            }
 6711            "ESLintServer" => {
 6712                servers_with_hover_requests.insert(
 6713                    new_server_name,
 6714                    new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
 6715                        |_, _| async move { Ok(None) },
 6716                    ),
 6717                );
 6718            }
 6719            "NoHoverCapabilitiesServer" => {
 6720                let _never_handled = new_server
 6721                    .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
 6722                        panic!(
 6723                            "Should not call for hovers server with no corresponding capabilities"
 6724                        )
 6725                    });
 6726            }
 6727            unexpected => panic!("Unexpected server name: {unexpected}"),
 6728        }
 6729    }
 6730
 6731    let hover_task = project.update(cx, |project, cx| {
 6732        project.hover(&buffer, Point::new(0, 0), cx)
 6733    });
 6734    let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
 6735        |mut hover_request| async move {
 6736            hover_request
 6737                .next()
 6738                .await
 6739                .expect("All hover requests should have been triggered")
 6740        },
 6741    ))
 6742    .await;
 6743    assert_eq!(
 6744        vec!["TailwindServer hover", "TypeScriptServer hover"],
 6745        hover_task
 6746            .await
 6747            .into_iter()
 6748            .flatten()
 6749            .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
 6750            .sorted()
 6751            .collect::<Vec<_>>(),
 6752        "Should receive hover responses from all related servers with hover capabilities"
 6753    );
 6754}
 6755
 6756#[gpui::test]
 6757async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
 6758    init_test(cx);
 6759
 6760    let fs = FakeFs::new(cx.executor());
 6761    fs.insert_tree(
 6762        path!("/dir"),
 6763        json!({
 6764            "a.ts": "a",
 6765        }),
 6766    )
 6767    .await;
 6768
 6769    let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
 6770
 6771    let language_registry = project.read_with(cx, |project, _| project.languages().clone());
 6772    language_registry.add(typescript_lang());
 6773    let mut fake_language_servers = language_registry.register_fake_lsp(
 6774        "TypeScript",
 6775        FakeLspAdapter {
 6776            capabilities: lsp::ServerCapabilities {
 6777                hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
 6778                ..lsp::ServerCapabilities::default()
 6779            },
 6780            ..FakeLspAdapter::default()
 6781        },
 6782    );
 6783
 6784    let (buffer, _handle) = project
 6785        .update(cx, |p, cx| {
 6786            p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
 6787        })
 6788        .await
 6789        .unwrap();
 6790    cx.executor().run_until_parked();
 6791
 6792    let fake_server = fake_language_servers
 6793        .next()
 6794        .await
 6795        .expect("failed to get the language server");
 6796
 6797    let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
 6798        move |_, _| async move {
 6799            Ok(Some(lsp::Hover {
 6800                contents: lsp::HoverContents::Array(vec![
 6801                    lsp::MarkedString::String("".to_string()),
 6802                    lsp::MarkedString::String("      ".to_string()),
 6803                    lsp::MarkedString::String("\n\n\n".to_string()),
 6804                ]),
 6805                range: None,
 6806            }))
 6807        },
 6808    );
 6809
 6810    let hover_task = project.update(cx, |project, cx| {
 6811        project.hover(&buffer, Point::new(0, 0), cx)
 6812    });
 6813    let () = request_handled
 6814        .next()
 6815        .await
 6816        .expect("All hover requests should have been triggered");
 6817    assert_eq!(
 6818        Vec::<String>::new(),
 6819        hover_task
 6820            .await
 6821            .into_iter()
 6822            .flatten()
 6823            .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
 6824            .sorted()
 6825            .collect::<Vec<_>>(),
 6826        "Empty hover parts should be ignored"
 6827    );
 6828}
 6829
 6830#[gpui::test]
 6831async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
 6832    init_test(cx);
 6833
 6834    let fs = FakeFs::new(cx.executor());
 6835    fs.insert_tree(
 6836        path!("/dir"),
 6837        json!({
 6838            "a.ts": "a",
 6839        }),
 6840    )
 6841    .await;
 6842
 6843    let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
 6844
 6845    let language_registry = project.read_with(cx, |project, _| project.languages().clone());
 6846    language_registry.add(typescript_lang());
 6847    let mut fake_language_servers = language_registry.register_fake_lsp(
 6848        "TypeScript",
 6849        FakeLspAdapter {
 6850            capabilities: lsp::ServerCapabilities {
 6851                code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
 6852                ..lsp::ServerCapabilities::default()
 6853            },
 6854            ..FakeLspAdapter::default()
 6855        },
 6856    );
 6857
 6858    let (buffer, _handle) = project
 6859        .update(cx, |p, cx| {
 6860            p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
 6861        })
 6862        .await
 6863        .unwrap();
 6864    cx.executor().run_until_parked();
 6865
 6866    let fake_server = fake_language_servers
 6867        .next()
 6868        .await
 6869        .expect("failed to get the language server");
 6870
 6871    let mut request_handled = fake_server
 6872        .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
 6873            Ok(Some(vec![
 6874                lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
 6875                    title: "organize imports".to_string(),
 6876                    kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
 6877                    ..lsp::CodeAction::default()
 6878                }),
 6879                lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
 6880                    title: "fix code".to_string(),
 6881                    kind: Some(CodeActionKind::SOURCE_FIX_ALL),
 6882                    ..lsp::CodeAction::default()
 6883                }),
 6884            ]))
 6885        });
 6886
 6887    let code_actions_task = project.update(cx, |project, cx| {
 6888        project.code_actions(
 6889            &buffer,
 6890            0..buffer.read(cx).len(),
 6891            Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
 6892            cx,
 6893        )
 6894    });
 6895
 6896    let () = request_handled
 6897        .next()
 6898        .await
 6899        .expect("The code action request should have been triggered");
 6900
 6901    let code_actions = code_actions_task.await.unwrap().unwrap();
 6902    assert_eq!(code_actions.len(), 1);
 6903    assert_eq!(
 6904        code_actions[0].lsp_action.action_kind(),
 6905        Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
 6906    );
 6907}
 6908
 6909#[gpui::test]
 6910async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
 6911    init_test(cx);
 6912
 6913    let fs = FakeFs::new(cx.executor());
 6914    fs.insert_tree(
 6915        path!("/dir"),
 6916        json!({
 6917            "a.tsx": "a",
 6918        }),
 6919    )
 6920    .await;
 6921
 6922    let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
 6923
 6924    let language_registry = project.read_with(cx, |project, _| project.languages().clone());
 6925    language_registry.add(tsx_lang());
 6926    let language_server_names = [
 6927        "TypeScriptServer",
 6928        "TailwindServer",
 6929        "ESLintServer",
 6930        "NoActionsCapabilitiesServer",
 6931    ];
 6932
 6933    let mut language_server_rxs = [
 6934        language_registry.register_fake_lsp(
 6935            "tsx",
 6936            FakeLspAdapter {
 6937                name: language_server_names[0],
 6938                capabilities: lsp::ServerCapabilities {
 6939                    code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
 6940                    ..lsp::ServerCapabilities::default()
 6941                },
 6942                ..FakeLspAdapter::default()
 6943            },
 6944        ),
 6945        language_registry.register_fake_lsp(
 6946            "tsx",
 6947            FakeLspAdapter {
 6948                name: language_server_names[1],
 6949                capabilities: lsp::ServerCapabilities {
 6950                    code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
 6951                    ..lsp::ServerCapabilities::default()
 6952                },
 6953                ..FakeLspAdapter::default()
 6954            },
 6955        ),
 6956        language_registry.register_fake_lsp(
 6957            "tsx",
 6958            FakeLspAdapter {
 6959                name: language_server_names[2],
 6960                capabilities: lsp::ServerCapabilities {
 6961                    code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
 6962                    ..lsp::ServerCapabilities::default()
 6963                },
 6964                ..FakeLspAdapter::default()
 6965            },
 6966        ),
 6967        language_registry.register_fake_lsp(
 6968            "tsx",
 6969            FakeLspAdapter {
 6970                name: language_server_names[3],
 6971                capabilities: lsp::ServerCapabilities {
 6972                    code_action_provider: None,
 6973                    ..lsp::ServerCapabilities::default()
 6974                },
 6975                ..FakeLspAdapter::default()
 6976            },
 6977        ),
 6978    ];
 6979
 6980    let (buffer, _handle) = project
 6981        .update(cx, |p, cx| {
 6982            p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
 6983        })
 6984        .await
 6985        .unwrap();
 6986    cx.executor().run_until_parked();
 6987
 6988    let mut servers_with_actions_requests = HashMap::default();
 6989    for i in 0..language_server_names.len() {
 6990        let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
 6991            panic!(
 6992                "Failed to get language server #{i} with name {}",
 6993                &language_server_names[i]
 6994            )
 6995        });
 6996        let new_server_name = new_server.server.name();
 6997
 6998        assert!(
 6999            !servers_with_actions_requests.contains_key(&new_server_name),
 7000            "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
 7001        );
 7002        match new_server_name.0.as_ref() {
 7003            "TailwindServer" | "TypeScriptServer" => {
 7004                servers_with_actions_requests.insert(
 7005                    new_server_name.clone(),
 7006                    new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
 7007                        move |_, _| {
 7008                            let name = new_server_name.clone();
 7009                            async move {
 7010                                Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
 7011                                    lsp::CodeAction {
 7012                                        title: format!("{name} code action"),
 7013                                        ..lsp::CodeAction::default()
 7014                                    },
 7015                                )]))
 7016                            }
 7017                        },
 7018                    ),
 7019                );
 7020            }
 7021            "ESLintServer" => {
 7022                servers_with_actions_requests.insert(
 7023                    new_server_name,
 7024                    new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
 7025                        |_, _| async move { Ok(None) },
 7026                    ),
 7027                );
 7028            }
 7029            "NoActionsCapabilitiesServer" => {
 7030                let _never_handled = new_server
 7031                    .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
 7032                        panic!(
 7033                            "Should not call for code actions server with no corresponding capabilities"
 7034                        )
 7035                    });
 7036            }
 7037            unexpected => panic!("Unexpected server name: {unexpected}"),
 7038        }
 7039    }
 7040
 7041    let code_actions_task = project.update(cx, |project, cx| {
 7042        project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
 7043    });
 7044
 7045    // cx.run_until_parked();
 7046    let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
 7047        |mut code_actions_request| async move {
 7048            code_actions_request
 7049                .next()
 7050                .await
 7051                .expect("All code actions requests should have been triggered")
 7052        },
 7053    ))
 7054    .await;
 7055    assert_eq!(
 7056        vec!["TailwindServer code action", "TypeScriptServer code action"],
 7057        code_actions_task
 7058            .await
 7059            .unwrap()
 7060            .unwrap()
 7061            .into_iter()
 7062            .map(|code_action| code_action.lsp_action.title().to_owned())
 7063            .sorted()
 7064            .collect::<Vec<_>>(),
 7065        "Should receive code actions responses from all related servers with hover capabilities"
 7066    );
 7067}
 7068
 7069#[gpui::test]
 7070async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
 7071    init_test(cx);
 7072
 7073    let fs = FakeFs::new(cx.executor());
 7074    fs.insert_tree(
 7075        "/dir",
 7076        json!({
 7077            "a.rs": "let a = 1;",
 7078            "b.rs": "let b = 2;",
 7079            "c.rs": "let c = 2;",
 7080        }),
 7081    )
 7082    .await;
 7083
 7084    let project = Project::test(
 7085        fs,
 7086        [
 7087            "/dir/a.rs".as_ref(),
 7088            "/dir/b.rs".as_ref(),
 7089            "/dir/c.rs".as_ref(),
 7090        ],
 7091        cx,
 7092    )
 7093    .await;
 7094
 7095    // check the initial state and get the worktrees
 7096    let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
 7097        let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
 7098        assert_eq!(worktrees.len(), 3);
 7099
 7100        let worktree_a = worktrees[0].read(cx);
 7101        let worktree_b = worktrees[1].read(cx);
 7102        let worktree_c = worktrees[2].read(cx);
 7103
 7104        // check they start in the right order
 7105        assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
 7106        assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
 7107        assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
 7108
 7109        (
 7110            worktrees[0].clone(),
 7111            worktrees[1].clone(),
 7112            worktrees[2].clone(),
 7113        )
 7114    });
 7115
 7116    // move first worktree to after the second
 7117    // [a, b, c] -> [b, a, c]
 7118    project
 7119        .update(cx, |project, cx| {
 7120            let first = worktree_a.read(cx);
 7121            let second = worktree_b.read(cx);
 7122            project.move_worktree(first.id(), second.id(), cx)
 7123        })
 7124        .expect("moving first after second");
 7125
 7126    // check the state after moving
 7127    project.update(cx, |project, cx| {
 7128        let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
 7129        assert_eq!(worktrees.len(), 3);
 7130
 7131        let first = worktrees[0].read(cx);
 7132        let second = worktrees[1].read(cx);
 7133        let third = worktrees[2].read(cx);
 7134
 7135        // check they are now in the right order
 7136        assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
 7137        assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
 7138        assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
 7139    });
 7140
 7141    // move the second worktree to before the first
 7142    // [b, a, c] -> [a, b, c]
 7143    project
 7144        .update(cx, |project, cx| {
 7145            let second = worktree_a.read(cx);
 7146            let first = worktree_b.read(cx);
 7147            project.move_worktree(first.id(), second.id(), cx)
 7148        })
 7149        .expect("moving second before first");
 7150
 7151    // check the state after moving
 7152    project.update(cx, |project, cx| {
 7153        let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
 7154        assert_eq!(worktrees.len(), 3);
 7155
 7156        let first = worktrees[0].read(cx);
 7157        let second = worktrees[1].read(cx);
 7158        let third = worktrees[2].read(cx);
 7159
 7160        // check they are now in the right order
 7161        assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
 7162        assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
 7163        assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
 7164    });
 7165
 7166    // move the second worktree to after the third
 7167    // [a, b, c] -> [a, c, b]
 7168    project
 7169        .update(cx, |project, cx| {
 7170            let second = worktree_b.read(cx);
 7171            let third = worktree_c.read(cx);
 7172            project.move_worktree(second.id(), third.id(), cx)
 7173        })
 7174        .expect("moving second after third");
 7175
 7176    // check the state after moving
 7177    project.update(cx, |project, cx| {
 7178        let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
 7179        assert_eq!(worktrees.len(), 3);
 7180
 7181        let first = worktrees[0].read(cx);
 7182        let second = worktrees[1].read(cx);
 7183        let third = worktrees[2].read(cx);
 7184
 7185        // check they are now in the right order
 7186        assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
 7187        assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
 7188        assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
 7189    });
 7190
 7191    // move the third worktree to before the second
 7192    // [a, c, b] -> [a, b, c]
 7193    project
 7194        .update(cx, |project, cx| {
 7195            let third = worktree_c.read(cx);
 7196            let second = worktree_b.read(cx);
 7197            project.move_worktree(third.id(), second.id(), cx)
 7198        })
 7199        .expect("moving third before second");
 7200
 7201    // check the state after moving
 7202    project.update(cx, |project, cx| {
 7203        let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
 7204        assert_eq!(worktrees.len(), 3);
 7205
 7206        let first = worktrees[0].read(cx);
 7207        let second = worktrees[1].read(cx);
 7208        let third = worktrees[2].read(cx);
 7209
 7210        // check they are now in the right order
 7211        assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
 7212        assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
 7213        assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
 7214    });
 7215
 7216    // move the first worktree to after the third
 7217    // [a, b, c] -> [b, c, a]
 7218    project
 7219        .update(cx, |project, cx| {
 7220            let first = worktree_a.read(cx);
 7221            let third = worktree_c.read(cx);
 7222            project.move_worktree(first.id(), third.id(), cx)
 7223        })
 7224        .expect("moving first after third");
 7225
 7226    // check the state after moving
 7227    project.update(cx, |project, cx| {
 7228        let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
 7229        assert_eq!(worktrees.len(), 3);
 7230
 7231        let first = worktrees[0].read(cx);
 7232        let second = worktrees[1].read(cx);
 7233        let third = worktrees[2].read(cx);
 7234
 7235        // check they are now in the right order
 7236        assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
 7237        assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
 7238        assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
 7239    });
 7240
 7241    // move the third worktree to before the first
 7242    // [b, c, a] -> [a, b, c]
 7243    project
 7244        .update(cx, |project, cx| {
 7245            let third = worktree_a.read(cx);
 7246            let first = worktree_b.read(cx);
 7247            project.move_worktree(third.id(), first.id(), cx)
 7248        })
 7249        .expect("moving third before first");
 7250
 7251    // check the state after moving
 7252    project.update(cx, |project, cx| {
 7253        let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
 7254        assert_eq!(worktrees.len(), 3);
 7255
 7256        let first = worktrees[0].read(cx);
 7257        let second = worktrees[1].read(cx);
 7258        let third = worktrees[2].read(cx);
 7259
 7260        // check they are now in the right order
 7261        assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
 7262        assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
 7263        assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
 7264    });
 7265}
 7266
 7267#[gpui::test]
 7268async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
 7269    init_test(cx);
 7270
 7271    let staged_contents = r#"
 7272        fn main() {
 7273            println!("hello world");
 7274        }
 7275    "#
 7276    .unindent();
 7277    let file_contents = r#"
 7278        // print goodbye
 7279        fn main() {
 7280            println!("goodbye world");
 7281        }
 7282    "#
 7283    .unindent();
 7284
 7285    let fs = FakeFs::new(cx.background_executor.clone());
 7286    fs.insert_tree(
 7287        "/dir",
 7288        json!({
 7289            ".git": {},
 7290           "src": {
 7291               "main.rs": file_contents,
 7292           }
 7293        }),
 7294    )
 7295    .await;
 7296
 7297    fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
 7298
 7299    let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
 7300
 7301    let buffer = project
 7302        .update(cx, |project, cx| {
 7303            project.open_local_buffer("/dir/src/main.rs", cx)
 7304        })
 7305        .await
 7306        .unwrap();
 7307    let unstaged_diff = project
 7308        .update(cx, |project, cx| {
 7309            project.open_unstaged_diff(buffer.clone(), cx)
 7310        })
 7311        .await
 7312        .unwrap();
 7313
 7314    cx.run_until_parked();
 7315    unstaged_diff.update(cx, |unstaged_diff, cx| {
 7316        let snapshot = buffer.read(cx).snapshot();
 7317        assert_hunks(
 7318            unstaged_diff.snapshot(cx).hunks(&snapshot),
 7319            &snapshot,
 7320            &unstaged_diff.base_text_string(cx).unwrap(),
 7321            &[
 7322                (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
 7323                (
 7324                    2..3,
 7325                    "    println!(\"hello world\");\n",
 7326                    "    println!(\"goodbye world\");\n",
 7327                    DiffHunkStatus::modified_none(),
 7328                ),
 7329            ],
 7330        );
 7331    });
 7332
 7333    let staged_contents = r#"
 7334        // print goodbye
 7335        fn main() {
 7336        }
 7337    "#
 7338    .unindent();
 7339
 7340    fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
 7341
 7342    cx.run_until_parked();
 7343    unstaged_diff.update(cx, |unstaged_diff, cx| {
 7344        let snapshot = buffer.read(cx).snapshot();
 7345        assert_hunks(
 7346            unstaged_diff
 7347                .snapshot(cx)
 7348                .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
 7349            &snapshot,
 7350            &unstaged_diff.base_text(cx).text(),
 7351            &[(
 7352                2..3,
 7353                "",
 7354                "    println!(\"goodbye world\");\n",
 7355                DiffHunkStatus::added_none(),
 7356            )],
 7357        );
 7358    });
 7359}
 7360
 7361#[gpui::test]
 7362async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
 7363    init_test(cx);
 7364
 7365    let committed_contents = r#"
 7366        fn main() {
 7367            println!("hello world");
 7368        }
 7369    "#
 7370    .unindent();
 7371    let staged_contents = r#"
 7372        fn main() {
 7373            println!("goodbye world");
 7374        }
 7375    "#
 7376    .unindent();
 7377    let file_contents = r#"
 7378        // print goodbye
 7379        fn main() {
 7380            println!("goodbye world");
 7381        }
 7382    "#
 7383    .unindent();
 7384
 7385    let fs = FakeFs::new(cx.background_executor.clone());
 7386    fs.insert_tree(
 7387        "/dir",
 7388        json!({
 7389            ".git": {},
 7390           "src": {
 7391               "modification.rs": file_contents,
 7392           }
 7393        }),
 7394    )
 7395    .await;
 7396
 7397    fs.set_head_for_repo(
 7398        Path::new("/dir/.git"),
 7399        &[
 7400            ("src/modification.rs", committed_contents),
 7401            ("src/deletion.rs", "// the-deleted-contents\n".into()),
 7402        ],
 7403        "deadbeef",
 7404    );
 7405    fs.set_index_for_repo(
 7406        Path::new("/dir/.git"),
 7407        &[
 7408            ("src/modification.rs", staged_contents),
 7409            ("src/deletion.rs", "// the-deleted-contents\n".into()),
 7410        ],
 7411    );
 7412
 7413    let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
 7414    let language_registry = project.read_with(cx, |project, _| project.languages().clone());
 7415    let language = rust_lang();
 7416    language_registry.add(language.clone());
 7417
 7418    let buffer_1 = project
 7419        .update(cx, |project, cx| {
 7420            project.open_local_buffer("/dir/src/modification.rs", cx)
 7421        })
 7422        .await
 7423        .unwrap();
 7424    let diff_1 = project
 7425        .update(cx, |project, cx| {
 7426            project.open_uncommitted_diff(buffer_1.clone(), cx)
 7427        })
 7428        .await
 7429        .unwrap();
 7430    diff_1.read_with(cx, |diff, cx| {
 7431        assert_eq!(diff.base_text(cx).language().cloned(), Some(language))
 7432    });
 7433    cx.run_until_parked();
 7434    diff_1.update(cx, |diff, cx| {
 7435        let snapshot = buffer_1.read(cx).snapshot();
 7436        assert_hunks(
 7437            diff.snapshot(cx)
 7438                .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
 7439            &snapshot,
 7440            &diff.base_text_string(cx).unwrap(),
 7441            &[
 7442                (
 7443                    0..1,
 7444                    "",
 7445                    "// print goodbye\n",
 7446                    DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
 7447                ),
 7448                (
 7449                    2..3,
 7450                    "    println!(\"hello world\");\n",
 7451                    "    println!(\"goodbye world\");\n",
 7452                    DiffHunkStatus::modified_none(),
 7453                ),
 7454            ],
 7455        );
 7456    });
 7457
 7458    // Reset HEAD to a version that differs from both the buffer and the index.
 7459    let committed_contents = r#"
 7460        // print goodbye
 7461        fn main() {
 7462        }
 7463    "#
 7464    .unindent();
 7465    fs.set_head_for_repo(
 7466        Path::new("/dir/.git"),
 7467        &[
 7468            ("src/modification.rs", committed_contents.clone()),
 7469            ("src/deletion.rs", "// the-deleted-contents\n".into()),
 7470        ],
 7471        "deadbeef",
 7472    );
 7473
 7474    // Buffer now has an unstaged hunk.
 7475    cx.run_until_parked();
 7476    diff_1.update(cx, |diff, cx| {
 7477        let snapshot = buffer_1.read(cx).snapshot();
 7478        assert_hunks(
 7479            diff.snapshot(cx)
 7480                .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
 7481            &snapshot,
 7482            &diff.base_text(cx).text(),
 7483            &[(
 7484                2..3,
 7485                "",
 7486                "    println!(\"goodbye world\");\n",
 7487                DiffHunkStatus::added_none(),
 7488            )],
 7489        );
 7490    });
 7491
 7492    // Open a buffer for a file that's been deleted.
 7493    let buffer_2 = project
 7494        .update(cx, |project, cx| {
 7495            project.open_local_buffer("/dir/src/deletion.rs", cx)
 7496        })
 7497        .await
 7498        .unwrap();
 7499    let diff_2 = project
 7500        .update(cx, |project, cx| {
 7501            project.open_uncommitted_diff(buffer_2.clone(), cx)
 7502        })
 7503        .await
 7504        .unwrap();
 7505    cx.run_until_parked();
 7506    diff_2.update(cx, |diff, cx| {
 7507        let snapshot = buffer_2.read(cx).snapshot();
 7508        assert_hunks(
 7509            diff.snapshot(cx)
 7510                .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
 7511            &snapshot,
 7512            &diff.base_text_string(cx).unwrap(),
 7513            &[(
 7514                0..0,
 7515                "// the-deleted-contents\n",
 7516                "",
 7517                DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
 7518            )],
 7519        );
 7520    });
 7521
 7522    // Stage the deletion of this file
 7523    fs.set_index_for_repo(
 7524        Path::new("/dir/.git"),
 7525        &[("src/modification.rs", committed_contents.clone())],
 7526    );
 7527    cx.run_until_parked();
 7528    diff_2.update(cx, |diff, cx| {
 7529        let snapshot = buffer_2.read(cx).snapshot();
 7530        assert_hunks(
 7531            diff.snapshot(cx)
 7532                .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
 7533            &snapshot,
 7534            &diff.base_text_string(cx).unwrap(),
 7535            &[(
 7536                0..0,
 7537                "// the-deleted-contents\n",
 7538                "",
 7539                DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
 7540            )],
 7541        );
 7542    });
 7543}
 7544
 7545#[gpui::test]
 7546async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
 7547    use DiffHunkSecondaryStatus::*;
 7548    init_test(cx);
 7549
 7550    let committed_contents = r#"
 7551        zero
 7552        one
 7553        two
 7554        three
 7555        four
 7556        five
 7557    "#
 7558    .unindent();
 7559    let file_contents = r#"
 7560        one
 7561        TWO
 7562        three
 7563        FOUR
 7564        five
 7565    "#
 7566    .unindent();
 7567
 7568    let fs = FakeFs::new(cx.background_executor.clone());
 7569    fs.insert_tree(
 7570        "/dir",
 7571        json!({
 7572            ".git": {},
 7573            "file.txt": file_contents.clone()
 7574        }),
 7575    )
 7576    .await;
 7577
 7578    fs.set_head_and_index_for_repo(
 7579        path!("/dir/.git").as_ref(),
 7580        &[("file.txt", committed_contents.clone())],
 7581    );
 7582
 7583    let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
 7584
 7585    let buffer = project
 7586        .update(cx, |project, cx| {
 7587            project.open_local_buffer("/dir/file.txt", cx)
 7588        })
 7589        .await
 7590        .unwrap();
 7591    let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
 7592    let uncommitted_diff = project
 7593        .update(cx, |project, cx| {
 7594            project.open_uncommitted_diff(buffer.clone(), cx)
 7595        })
 7596        .await
 7597        .unwrap();
 7598    let mut diff_events = cx.events(&uncommitted_diff);
 7599
 7600    // The hunks are initially unstaged.
 7601    uncommitted_diff.read_with(cx, |diff, cx| {
 7602        assert_hunks(
 7603            diff.snapshot(cx).hunks(&snapshot),
 7604            &snapshot,
 7605            &diff.base_text_string(cx).unwrap(),
 7606            &[
 7607                (
 7608                    0..0,
 7609                    "zero\n",
 7610                    "",
 7611                    DiffHunkStatus::deleted(HasSecondaryHunk),
 7612                ),
 7613                (
 7614                    1..2,
 7615                    "two\n",
 7616                    "TWO\n",
 7617                    DiffHunkStatus::modified(HasSecondaryHunk),
 7618                ),
 7619                (
 7620                    3..4,
 7621                    "four\n",
 7622                    "FOUR\n",
 7623                    DiffHunkStatus::modified(HasSecondaryHunk),
 7624                ),
 7625            ],
 7626        );
 7627    });
 7628
 7629    // Stage a hunk. It appears as optimistically staged.
 7630    uncommitted_diff.update(cx, |diff, cx| {
 7631        let range =
 7632            snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
 7633        let hunks = diff
 7634            .snapshot(cx)
 7635            .hunks_intersecting_range(range, &snapshot)
 7636            .collect::<Vec<_>>();
 7637        diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
 7638
 7639        assert_hunks(
 7640            diff.snapshot(cx).hunks(&snapshot),
 7641            &snapshot,
 7642            &diff.base_text_string(cx).unwrap(),
 7643            &[
 7644                (
 7645                    0..0,
 7646                    "zero\n",
 7647                    "",
 7648                    DiffHunkStatus::deleted(HasSecondaryHunk),
 7649                ),
 7650                (
 7651                    1..2,
 7652                    "two\n",
 7653                    "TWO\n",
 7654                    DiffHunkStatus::modified(SecondaryHunkRemovalPending),
 7655                ),
 7656                (
 7657                    3..4,
 7658                    "four\n",
 7659                    "FOUR\n",
 7660                    DiffHunkStatus::modified(HasSecondaryHunk),
 7661                ),
 7662            ],
 7663        );
 7664    });
 7665
 7666    // The diff emits a change event for the range of the staged hunk.
 7667    assert!(matches!(
 7668        diff_events.next().await.unwrap(),
 7669        BufferDiffEvent::HunksStagedOrUnstaged(_)
 7670    ));
 7671    let event = diff_events.next().await.unwrap();
 7672    if let BufferDiffEvent::DiffChanged {
 7673        changed_range: Some(changed_range),
 7674        base_text_changed_range: _,
 7675    } = event
 7676    {
 7677        let changed_range = changed_range.to_point(&snapshot);
 7678        assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
 7679    } else {
 7680        panic!("Unexpected event {event:?}");
 7681    }
 7682
 7683    // When the write to the index completes, it appears as staged.
 7684    cx.run_until_parked();
 7685    uncommitted_diff.update(cx, |diff, cx| {
 7686        assert_hunks(
 7687            diff.snapshot(cx).hunks(&snapshot),
 7688            &snapshot,
 7689            &diff.base_text_string(cx).unwrap(),
 7690            &[
 7691                (
 7692                    0..0,
 7693                    "zero\n",
 7694                    "",
 7695                    DiffHunkStatus::deleted(HasSecondaryHunk),
 7696                ),
 7697                (
 7698                    1..2,
 7699                    "two\n",
 7700                    "TWO\n",
 7701                    DiffHunkStatus::modified(NoSecondaryHunk),
 7702                ),
 7703                (
 7704                    3..4,
 7705                    "four\n",
 7706                    "FOUR\n",
 7707                    DiffHunkStatus::modified(HasSecondaryHunk),
 7708                ),
 7709            ],
 7710        );
 7711    });
 7712
 7713    // The diff emits a change event for the changed index text.
 7714    let event = diff_events.next().await.unwrap();
 7715    if let BufferDiffEvent::DiffChanged {
 7716        changed_range: Some(changed_range),
 7717        base_text_changed_range: _,
 7718    } = event
 7719    {
 7720        let changed_range = changed_range.to_point(&snapshot);
 7721        assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
 7722    } else {
 7723        panic!("Unexpected event {event:?}");
 7724    }
 7725
 7726    // Simulate a problem writing to the git index.
 7727    fs.set_error_message_for_index_write(
 7728        "/dir/.git".as_ref(),
 7729        Some("failed to write git index".into()),
 7730    );
 7731
 7732    // Stage another hunk.
 7733    uncommitted_diff.update(cx, |diff, cx| {
 7734        let range =
 7735            snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
 7736        let hunks = diff
 7737            .snapshot(cx)
 7738            .hunks_intersecting_range(range, &snapshot)
 7739            .collect::<Vec<_>>();
 7740        diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
 7741
 7742        assert_hunks(
 7743            diff.snapshot(cx).hunks(&snapshot),
 7744            &snapshot,
 7745            &diff.base_text_string(cx).unwrap(),
 7746            &[
 7747                (
 7748                    0..0,
 7749                    "zero\n",
 7750                    "",
 7751                    DiffHunkStatus::deleted(HasSecondaryHunk),
 7752                ),
 7753                (
 7754                    1..2,
 7755                    "two\n",
 7756                    "TWO\n",
 7757                    DiffHunkStatus::modified(NoSecondaryHunk),
 7758                ),
 7759                (
 7760                    3..4,
 7761                    "four\n",
 7762                    "FOUR\n",
 7763                    DiffHunkStatus::modified(SecondaryHunkRemovalPending),
 7764                ),
 7765            ],
 7766        );
 7767    });
 7768    assert!(matches!(
 7769        diff_events.next().await.unwrap(),
 7770        BufferDiffEvent::HunksStagedOrUnstaged(_)
 7771    ));
 7772    let event = diff_events.next().await.unwrap();
 7773    if let BufferDiffEvent::DiffChanged {
 7774        changed_range: Some(changed_range),
 7775        base_text_changed_range: _,
 7776    } = event
 7777    {
 7778        let changed_range = changed_range.to_point(&snapshot);
 7779        assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
 7780    } else {
 7781        panic!("Unexpected event {event:?}");
 7782    }
 7783
 7784    // When the write fails, the hunk returns to being unstaged.
 7785    cx.run_until_parked();
 7786    uncommitted_diff.update(cx, |diff, cx| {
 7787        assert_hunks(
 7788            diff.snapshot(cx).hunks(&snapshot),
 7789            &snapshot,
 7790            &diff.base_text_string(cx).unwrap(),
 7791            &[
 7792                (
 7793                    0..0,
 7794                    "zero\n",
 7795                    "",
 7796                    DiffHunkStatus::deleted(HasSecondaryHunk),
 7797                ),
 7798                (
 7799                    1..2,
 7800                    "two\n",
 7801                    "TWO\n",
 7802                    DiffHunkStatus::modified(NoSecondaryHunk),
 7803                ),
 7804                (
 7805                    3..4,
 7806                    "four\n",
 7807                    "FOUR\n",
 7808                    DiffHunkStatus::modified(HasSecondaryHunk),
 7809                ),
 7810            ],
 7811        );
 7812    });
 7813
 7814    let event = diff_events.next().await.unwrap();
 7815    if let BufferDiffEvent::DiffChanged {
 7816        changed_range: Some(changed_range),
 7817        base_text_changed_range: _,
 7818    } = event
 7819    {
 7820        let changed_range = changed_range.to_point(&snapshot);
 7821        assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
 7822    } else {
 7823        panic!("Unexpected event {event:?}");
 7824    }
 7825
 7826    // Allow writing to the git index to succeed again.
 7827    fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
 7828
 7829    // Stage two hunks with separate operations.
 7830    uncommitted_diff.update(cx, |diff, cx| {
 7831        let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
 7832        diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
 7833        diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
 7834    });
 7835
 7836    // Both staged hunks appear as pending.
 7837    uncommitted_diff.update(cx, |diff, cx| {
 7838        assert_hunks(
 7839            diff.snapshot(cx).hunks(&snapshot),
 7840            &snapshot,
 7841            &diff.base_text_string(cx).unwrap(),
 7842            &[
 7843                (
 7844                    0..0,
 7845                    "zero\n",
 7846                    "",
 7847                    DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
 7848                ),
 7849                (
 7850                    1..2,
 7851                    "two\n",
 7852                    "TWO\n",
 7853                    DiffHunkStatus::modified(NoSecondaryHunk),
 7854                ),
 7855                (
 7856                    3..4,
 7857                    "four\n",
 7858                    "FOUR\n",
 7859                    DiffHunkStatus::modified(SecondaryHunkRemovalPending),
 7860                ),
 7861            ],
 7862        );
 7863    });
 7864
 7865    // Both staging operations take effect.
 7866    cx.run_until_parked();
 7867    uncommitted_diff.update(cx, |diff, cx| {
 7868        assert_hunks(
 7869            diff.snapshot(cx).hunks(&snapshot),
 7870            &snapshot,
 7871            &diff.base_text_string(cx).unwrap(),
 7872            &[
 7873                (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
 7874                (
 7875                    1..2,
 7876                    "two\n",
 7877                    "TWO\n",
 7878                    DiffHunkStatus::modified(NoSecondaryHunk),
 7879                ),
 7880                (
 7881                    3..4,
 7882                    "four\n",
 7883                    "FOUR\n",
 7884                    DiffHunkStatus::modified(NoSecondaryHunk),
 7885                ),
 7886            ],
 7887        );
 7888    });
 7889}
 7890
 7891#[gpui::test(seeds(340, 472))]
 7892async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
 7893    use DiffHunkSecondaryStatus::*;
 7894    init_test(cx);
 7895
 7896    let committed_contents = r#"
 7897        zero
 7898        one
 7899        two
 7900        three
 7901        four
 7902        five
 7903    "#
 7904    .unindent();
 7905    let file_contents = r#"
 7906        one
 7907        TWO
 7908        three
 7909        FOUR
 7910        five
 7911    "#
 7912    .unindent();
 7913
 7914    let fs = FakeFs::new(cx.background_executor.clone());
 7915    fs.insert_tree(
 7916        "/dir",
 7917        json!({
 7918            ".git": {},
 7919            "file.txt": file_contents.clone()
 7920        }),
 7921    )
 7922    .await;
 7923
 7924    fs.set_head_for_repo(
 7925        "/dir/.git".as_ref(),
 7926        &[("file.txt", committed_contents.clone())],
 7927        "deadbeef",
 7928    );
 7929    fs.set_index_for_repo(
 7930        "/dir/.git".as_ref(),
 7931        &[("file.txt", committed_contents.clone())],
 7932    );
 7933
 7934    let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
 7935
 7936    let buffer = project
 7937        .update(cx, |project, cx| {
 7938            project.open_local_buffer("/dir/file.txt", cx)
 7939        })
 7940        .await
 7941        .unwrap();
 7942    let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
 7943    let uncommitted_diff = project
 7944        .update(cx, |project, cx| {
 7945            project.open_uncommitted_diff(buffer.clone(), cx)
 7946        })
 7947        .await
 7948        .unwrap();
 7949
 7950    // The hunks are initially unstaged.
 7951    uncommitted_diff.read_with(cx, |diff, cx| {
 7952        assert_hunks(
 7953            diff.snapshot(cx).hunks(&snapshot),
 7954            &snapshot,
 7955            &diff.base_text_string(cx).unwrap(),
 7956            &[
 7957                (
 7958                    0..0,
 7959                    "zero\n",
 7960                    "",
 7961                    DiffHunkStatus::deleted(HasSecondaryHunk),
 7962                ),
 7963                (
 7964                    1..2,
 7965                    "two\n",
 7966                    "TWO\n",
 7967                    DiffHunkStatus::modified(HasSecondaryHunk),
 7968                ),
 7969                (
 7970                    3..4,
 7971                    "four\n",
 7972                    "FOUR\n",
 7973                    DiffHunkStatus::modified(HasSecondaryHunk),
 7974                ),
 7975            ],
 7976        );
 7977    });
 7978
 7979    // Pause IO events
 7980    fs.pause_events();
 7981
 7982    // Stage the first hunk.
 7983    uncommitted_diff.update(cx, |diff, cx| {
 7984        let hunk = diff.snapshot(cx).hunks(&snapshot).next().unwrap();
 7985        diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
 7986        assert_hunks(
 7987            diff.snapshot(cx).hunks(&snapshot),
 7988            &snapshot,
 7989            &diff.base_text_string(cx).unwrap(),
 7990            &[
 7991                (
 7992                    0..0,
 7993                    "zero\n",
 7994                    "",
 7995                    DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
 7996                ),
 7997                (
 7998                    1..2,
 7999                    "two\n",
 8000                    "TWO\n",
 8001                    DiffHunkStatus::modified(HasSecondaryHunk),
 8002                ),
 8003                (
 8004                    3..4,
 8005                    "four\n",
 8006                    "FOUR\n",
 8007                    DiffHunkStatus::modified(HasSecondaryHunk),
 8008                ),
 8009            ],
 8010        );
 8011    });
 8012
 8013    // Stage the second hunk *before* receiving the FS event for the first hunk.
 8014    cx.run_until_parked();
 8015    uncommitted_diff.update(cx, |diff, cx| {
 8016        let hunk = diff.snapshot(cx).hunks(&snapshot).nth(1).unwrap();
 8017        diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
 8018        assert_hunks(
 8019            diff.snapshot(cx).hunks(&snapshot),
 8020            &snapshot,
 8021            &diff.base_text_string(cx).unwrap(),
 8022            &[
 8023                (
 8024                    0..0,
 8025                    "zero\n",
 8026                    "",
 8027                    DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
 8028                ),
 8029                (
 8030                    1..2,
 8031                    "two\n",
 8032                    "TWO\n",
 8033                    DiffHunkStatus::modified(SecondaryHunkRemovalPending),
 8034                ),
 8035                (
 8036                    3..4,
 8037                    "four\n",
 8038                    "FOUR\n",
 8039                    DiffHunkStatus::modified(HasSecondaryHunk),
 8040                ),
 8041            ],
 8042        );
 8043    });
 8044
 8045    // Process the FS event for staging the first hunk (second event is still pending).
 8046    fs.flush_events(1);
 8047    cx.run_until_parked();
 8048
 8049    // Stage the third hunk before receiving the second FS event.
 8050    uncommitted_diff.update(cx, |diff, cx| {
 8051        let hunk = diff.snapshot(cx).hunks(&snapshot).nth(2).unwrap();
 8052        diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
 8053    });
 8054
 8055    // Wait for all remaining IO.
 8056    cx.run_until_parked();
 8057    fs.flush_events(fs.buffered_event_count());
 8058
 8059    // Now all hunks are staged.
 8060    cx.run_until_parked();
 8061    uncommitted_diff.update(cx, |diff, cx| {
 8062        assert_hunks(
 8063            diff.snapshot(cx).hunks(&snapshot),
 8064            &snapshot,
 8065            &diff.base_text_string(cx).unwrap(),
 8066            &[
 8067                (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
 8068                (
 8069                    1..2,
 8070                    "two\n",
 8071                    "TWO\n",
 8072                    DiffHunkStatus::modified(NoSecondaryHunk),
 8073                ),
 8074                (
 8075                    3..4,
 8076                    "four\n",
 8077                    "FOUR\n",
 8078                    DiffHunkStatus::modified(NoSecondaryHunk),
 8079                ),
 8080            ],
 8081        );
 8082    });
 8083}
 8084
 8085#[gpui::test(iterations = 25)]
 8086async fn test_staging_random_hunks(
 8087    mut rng: StdRng,
 8088    executor: BackgroundExecutor,
 8089    cx: &mut gpui::TestAppContext,
 8090) {
 8091    let operations = env::var("OPERATIONS")
 8092        .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
 8093        .unwrap_or(20);
 8094
 8095    // Try to induce races between diff recalculation and index writes.
 8096    if rng.random_bool(0.5) {
 8097        executor.deprioritize(*CALCULATE_DIFF_TASK);
 8098    }
 8099
 8100    use DiffHunkSecondaryStatus::*;
 8101    init_test(cx);
 8102
 8103    let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
 8104    let index_text = committed_text.clone();
 8105    let buffer_text = (0..30)
 8106        .map(|i| match i % 5 {
 8107            0 => format!("line {i} (modified)\n"),
 8108            _ => format!("line {i}\n"),
 8109        })
 8110        .collect::<String>();
 8111
 8112    let fs = FakeFs::new(cx.background_executor.clone());
 8113    fs.insert_tree(
 8114        path!("/dir"),
 8115        json!({
 8116            ".git": {},
 8117            "file.txt": buffer_text.clone()
 8118        }),
 8119    )
 8120    .await;
 8121    fs.set_head_for_repo(
 8122        path!("/dir/.git").as_ref(),
 8123        &[("file.txt", committed_text.clone())],
 8124        "deadbeef",
 8125    );
 8126    fs.set_index_for_repo(
 8127        path!("/dir/.git").as_ref(),
 8128        &[("file.txt", index_text.clone())],
 8129    );
 8130    let repo = fs
 8131        .open_repo(path!("/dir/.git").as_ref(), Some("git".as_ref()))
 8132        .unwrap();
 8133
 8134    let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
 8135    let buffer = project
 8136        .update(cx, |project, cx| {
 8137            project.open_local_buffer(path!("/dir/file.txt"), cx)
 8138        })
 8139        .await
 8140        .unwrap();
 8141    let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
 8142    let uncommitted_diff = project
 8143        .update(cx, |project, cx| {
 8144            project.open_uncommitted_diff(buffer.clone(), cx)
 8145        })
 8146        .await
 8147        .unwrap();
 8148
 8149    let mut hunks = uncommitted_diff.update(cx, |diff, cx| {
 8150        diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
 8151    });
 8152    assert_eq!(hunks.len(), 6);
 8153
 8154    for _i in 0..operations {
 8155        let hunk_ix = rng.random_range(0..hunks.len());
 8156        let hunk = &mut hunks[hunk_ix];
 8157        let row = hunk.range.start.row;
 8158
 8159        if hunk.status().has_secondary_hunk() {
 8160            log::info!("staging hunk at {row}");
 8161            uncommitted_diff.update(cx, |diff, cx| {
 8162                diff.stage_or_unstage_hunks(true, std::slice::from_ref(hunk), &snapshot, true, cx);
 8163            });
 8164            hunk.secondary_status = SecondaryHunkRemovalPending;
 8165        } else {
 8166            log::info!("unstaging hunk at {row}");
 8167            uncommitted_diff.update(cx, |diff, cx| {
 8168                diff.stage_or_unstage_hunks(false, std::slice::from_ref(hunk), &snapshot, true, cx);
 8169            });
 8170            hunk.secondary_status = SecondaryHunkAdditionPending;
 8171        }
 8172
 8173        for _ in 0..rng.random_range(0..10) {
 8174            log::info!("yielding");
 8175            cx.executor().simulate_random_delay().await;
 8176        }
 8177    }
 8178
 8179    cx.executor().run_until_parked();
 8180
 8181    for hunk in &mut hunks {
 8182        if hunk.secondary_status == SecondaryHunkRemovalPending {
 8183            hunk.secondary_status = NoSecondaryHunk;
 8184        } else if hunk.secondary_status == SecondaryHunkAdditionPending {
 8185            hunk.secondary_status = HasSecondaryHunk;
 8186        }
 8187    }
 8188
 8189    log::info!(
 8190        "index text:\n{}",
 8191        repo.load_index_text(RepoPath::from_rel_path(rel_path("file.txt")))
 8192            .await
 8193            .unwrap()
 8194    );
 8195
 8196    uncommitted_diff.update(cx, |diff, cx| {
 8197        let expected_hunks = hunks
 8198            .iter()
 8199            .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
 8200            .collect::<Vec<_>>();
 8201        let actual_hunks = diff
 8202            .snapshot(cx)
 8203            .hunks(&snapshot)
 8204            .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
 8205            .collect::<Vec<_>>();
 8206        assert_eq!(actual_hunks, expected_hunks);
 8207    });
 8208}
 8209
 8210#[gpui::test]
 8211async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
 8212    init_test(cx);
 8213
 8214    let committed_contents = r#"
 8215        fn main() {
 8216            println!("hello from HEAD");
 8217        }
 8218    "#
 8219    .unindent();
 8220    let file_contents = r#"
 8221        fn main() {
 8222            println!("hello from the working copy");
 8223        }
 8224    "#
 8225    .unindent();
 8226
 8227    let fs = FakeFs::new(cx.background_executor.clone());
 8228    fs.insert_tree(
 8229        "/dir",
 8230        json!({
 8231            ".git": {},
 8232           "src": {
 8233               "main.rs": file_contents,
 8234           }
 8235        }),
 8236    )
 8237    .await;
 8238
 8239    fs.set_head_for_repo(
 8240        Path::new("/dir/.git"),
 8241        &[("src/main.rs", committed_contents.clone())],
 8242        "deadbeef",
 8243    );
 8244    fs.set_index_for_repo(
 8245        Path::new("/dir/.git"),
 8246        &[("src/main.rs", committed_contents.clone())],
 8247    );
 8248
 8249    let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
 8250
 8251    let buffer = project
 8252        .update(cx, |project, cx| {
 8253            project.open_local_buffer("/dir/src/main.rs", cx)
 8254        })
 8255        .await
 8256        .unwrap();
 8257    let uncommitted_diff = project
 8258        .update(cx, |project, cx| {
 8259            project.open_uncommitted_diff(buffer.clone(), cx)
 8260        })
 8261        .await
 8262        .unwrap();
 8263
 8264    cx.run_until_parked();
 8265    uncommitted_diff.update(cx, |uncommitted_diff, cx| {
 8266        let snapshot = buffer.read(cx).snapshot();
 8267        assert_hunks(
 8268            uncommitted_diff.snapshot(cx).hunks(&snapshot),
 8269            &snapshot,
 8270            &uncommitted_diff.base_text_string(cx).unwrap(),
 8271            &[(
 8272                1..2,
 8273                "    println!(\"hello from HEAD\");\n",
 8274                "    println!(\"hello from the working copy\");\n",
 8275                DiffHunkStatus {
 8276                    kind: DiffHunkStatusKind::Modified,
 8277                    secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
 8278                },
 8279            )],
 8280        );
 8281    });
 8282}
 8283
 8284// TODO: Should we test this on Windows also?
 8285#[gpui::test]
 8286#[cfg(not(windows))]
 8287async fn test_staging_hunk_preserve_executable_permission(cx: &mut gpui::TestAppContext) {
 8288    use std::os::unix::fs::PermissionsExt;
 8289    init_test(cx);
 8290    cx.executor().allow_parking();
 8291    let committed_contents = "bar\n";
 8292    let file_contents = "baz\n";
 8293    let root = TempTree::new(json!({
 8294        "project": {
 8295            "foo": committed_contents
 8296        },
 8297    }));
 8298
 8299    let work_dir = root.path().join("project");
 8300    let file_path = work_dir.join("foo");
 8301    let repo = git_init(work_dir.as_path());
 8302    let mut perms = std::fs::metadata(&file_path).unwrap().permissions();
 8303    perms.set_mode(0o755);
 8304    std::fs::set_permissions(&file_path, perms).unwrap();
 8305    git_add("foo", &repo);
 8306    git_commit("Initial commit", &repo);
 8307    std::fs::write(&file_path, file_contents).unwrap();
 8308
 8309    let project = Project::test(
 8310        Arc::new(RealFs::new(None, cx.executor())),
 8311        [root.path()],
 8312        cx,
 8313    )
 8314    .await;
 8315
 8316    let buffer = project
 8317        .update(cx, |project, cx| {
 8318            project.open_local_buffer(file_path.as_path(), cx)
 8319        })
 8320        .await
 8321        .unwrap();
 8322
 8323    let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
 8324
 8325    let uncommitted_diff = project
 8326        .update(cx, |project, cx| {
 8327            project.open_uncommitted_diff(buffer.clone(), cx)
 8328        })
 8329        .await
 8330        .unwrap();
 8331
 8332    uncommitted_diff.update(cx, |diff, cx| {
 8333        let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
 8334        diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
 8335    });
 8336
 8337    cx.run_until_parked();
 8338
 8339    let output = smol::process::Command::new("git")
 8340        .current_dir(&work_dir)
 8341        .args(["diff", "--staged"])
 8342        .output()
 8343        .await
 8344        .unwrap();
 8345
 8346    let staged_diff = String::from_utf8_lossy(&output.stdout);
 8347
 8348    assert!(
 8349        !staged_diff.contains("new mode 100644"),
 8350        "Staging should not change file mode from 755 to 644.\ngit diff --staged:\n{}",
 8351        staged_diff
 8352    );
 8353
 8354    let output = smol::process::Command::new("git")
 8355        .current_dir(&work_dir)
 8356        .args(["ls-files", "-s"])
 8357        .output()
 8358        .await
 8359        .unwrap();
 8360    let index_contents = String::from_utf8_lossy(&output.stdout);
 8361
 8362    assert!(
 8363        index_contents.contains("100755"),
 8364        "Index should show file as executable (100755).\ngit ls-files -s:\n{}",
 8365        index_contents
 8366    );
 8367}
 8368
 8369#[gpui::test]
 8370async fn test_repository_and_path_for_project_path(
 8371    background_executor: BackgroundExecutor,
 8372    cx: &mut gpui::TestAppContext,
 8373) {
 8374    init_test(cx);
 8375    let fs = FakeFs::new(background_executor);
 8376    fs.insert_tree(
 8377        path!("/root"),
 8378        json!({
 8379            "c.txt": "",
 8380            "dir1": {
 8381                ".git": {},
 8382                "deps": {
 8383                    "dep1": {
 8384                        ".git": {},
 8385                        "src": {
 8386                            "a.txt": ""
 8387                        }
 8388                    }
 8389                },
 8390                "src": {
 8391                    "b.txt": ""
 8392                }
 8393            },
 8394        }),
 8395    )
 8396    .await;
 8397
 8398    let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
 8399    let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
 8400    let tree_id = tree.read_with(cx, |tree, _| tree.id());
 8401    project
 8402        .update(cx, |project, cx| project.git_scans_complete(cx))
 8403        .await;
 8404    cx.run_until_parked();
 8405
 8406    project.read_with(cx, |project, cx| {
 8407        let git_store = project.git_store().read(cx);
 8408        let pairs = [
 8409            ("c.txt", None),
 8410            ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
 8411            (
 8412                "dir1/deps/dep1/src/a.txt",
 8413                Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
 8414            ),
 8415        ];
 8416        let expected = pairs
 8417            .iter()
 8418            .map(|(path, result)| {
 8419                (
 8420                    path,
 8421                    result.map(|(repo, repo_path)| {
 8422                        (Path::new(repo).into(), RepoPath::new(repo_path).unwrap())
 8423                    }),
 8424                )
 8425            })
 8426            .collect::<Vec<_>>();
 8427        let actual = pairs
 8428            .iter()
 8429            .map(|(path, _)| {
 8430                let project_path = (tree_id, rel_path(path)).into();
 8431                let result = maybe!({
 8432                    let (repo, repo_path) =
 8433                        git_store.repository_and_path_for_project_path(&project_path, cx)?;
 8434                    Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
 8435                });
 8436                (path, result)
 8437            })
 8438            .collect::<Vec<_>>();
 8439        pretty_assertions::assert_eq!(expected, actual);
 8440    });
 8441
 8442    fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
 8443        .await
 8444        .unwrap();
 8445    cx.run_until_parked();
 8446
 8447    project.read_with(cx, |project, cx| {
 8448        let git_store = project.git_store().read(cx);
 8449        assert_eq!(
 8450            git_store.repository_and_path_for_project_path(
 8451                &(tree_id, rel_path("dir1/src/b.txt")).into(),
 8452                cx
 8453            ),
 8454            None
 8455        );
 8456    });
 8457}
 8458
 8459#[gpui::test]
 8460async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
 8461    init_test(cx);
 8462    let fs = FakeFs::new(cx.background_executor.clone());
 8463    let home = paths::home_dir();
 8464    fs.insert_tree(
 8465        home,
 8466        json!({
 8467            ".git": {},
 8468            "project": {
 8469                "a.txt": "A"
 8470            },
 8471        }),
 8472    )
 8473    .await;
 8474
 8475    let project = Project::test(fs.clone(), [home.join("project").as_ref()], cx).await;
 8476    let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
 8477    let tree_id = tree.read_with(cx, |tree, _| tree.id());
 8478
 8479    project
 8480        .update(cx, |project, cx| project.git_scans_complete(cx))
 8481        .await;
 8482    tree.flush_fs_events(cx).await;
 8483
 8484    project.read_with(cx, |project, cx| {
 8485        let containing = project
 8486            .git_store()
 8487            .read(cx)
 8488            .repository_and_path_for_project_path(&(tree_id, rel_path("a.txt")).into(), cx);
 8489        assert!(containing.is_none());
 8490    });
 8491
 8492    let project = Project::test(fs.clone(), [home.as_ref()], cx).await;
 8493    let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
 8494    let tree_id = tree.read_with(cx, |tree, _| tree.id());
 8495    project
 8496        .update(cx, |project, cx| project.git_scans_complete(cx))
 8497        .await;
 8498    tree.flush_fs_events(cx).await;
 8499
 8500    project.read_with(cx, |project, cx| {
 8501        let containing = project
 8502            .git_store()
 8503            .read(cx)
 8504            .repository_and_path_for_project_path(&(tree_id, rel_path("project/a.txt")).into(), cx);
 8505        assert_eq!(
 8506            containing
 8507                .unwrap()
 8508                .0
 8509                .read(cx)
 8510                .work_directory_abs_path
 8511                .as_ref(),
 8512            home,
 8513        );
 8514    });
 8515}
 8516
 8517#[gpui::test]
 8518async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
 8519    init_test(cx);
 8520    cx.executor().allow_parking();
 8521
 8522    let root = TempTree::new(json!({
 8523        "project": {
 8524            "a.txt": "a",    // Modified
 8525            "b.txt": "bb",   // Added
 8526            "c.txt": "ccc",  // Unchanged
 8527            "d.txt": "dddd", // Deleted
 8528        },
 8529    }));
 8530
 8531    // Set up git repository before creating the project.
 8532    let work_dir = root.path().join("project");
 8533    let repo = git_init(work_dir.as_path());
 8534    git_add("a.txt", &repo);
 8535    git_add("c.txt", &repo);
 8536    git_add("d.txt", &repo);
 8537    git_commit("Initial commit", &repo);
 8538    std::fs::remove_file(work_dir.join("d.txt")).unwrap();
 8539    std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
 8540
 8541    let project = Project::test(
 8542        Arc::new(RealFs::new(None, cx.executor())),
 8543        [root.path()],
 8544        cx,
 8545    )
 8546    .await;
 8547
 8548    let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
 8549    tree.flush_fs_events(cx).await;
 8550    project
 8551        .update(cx, |project, cx| project.git_scans_complete(cx))
 8552        .await;
 8553    cx.executor().run_until_parked();
 8554
 8555    let repository = project.read_with(cx, |project, cx| {
 8556        project.repositories(cx).values().next().unwrap().clone()
 8557    });
 8558
 8559    // Check that the right git state is observed on startup
 8560    repository.read_with(cx, |repository, _| {
 8561        let entries = repository.cached_status().collect::<Vec<_>>();
 8562        assert_eq!(
 8563            entries,
 8564            [
 8565                StatusEntry {
 8566                    repo_path: repo_path("a.txt"),
 8567                    status: StatusCode::Modified.worktree(),
 8568                },
 8569                StatusEntry {
 8570                    repo_path: repo_path("b.txt"),
 8571                    status: FileStatus::Untracked,
 8572                },
 8573                StatusEntry {
 8574                    repo_path: repo_path("d.txt"),
 8575                    status: StatusCode::Deleted.worktree(),
 8576                },
 8577            ]
 8578        );
 8579    });
 8580
 8581    std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
 8582
 8583    tree.flush_fs_events(cx).await;
 8584    project
 8585        .update(cx, |project, cx| project.git_scans_complete(cx))
 8586        .await;
 8587    cx.executor().run_until_parked();
 8588
 8589    repository.read_with(cx, |repository, _| {
 8590        let entries = repository.cached_status().collect::<Vec<_>>();
 8591        assert_eq!(
 8592            entries,
 8593            [
 8594                StatusEntry {
 8595                    repo_path: repo_path("a.txt"),
 8596                    status: StatusCode::Modified.worktree(),
 8597                },
 8598                StatusEntry {
 8599                    repo_path: repo_path("b.txt"),
 8600                    status: FileStatus::Untracked,
 8601                },
 8602                StatusEntry {
 8603                    repo_path: repo_path("c.txt"),
 8604                    status: StatusCode::Modified.worktree(),
 8605                },
 8606                StatusEntry {
 8607                    repo_path: repo_path("d.txt"),
 8608                    status: StatusCode::Deleted.worktree(),
 8609                },
 8610            ]
 8611        );
 8612    });
 8613
 8614    git_add("a.txt", &repo);
 8615    git_add("c.txt", &repo);
 8616    git_remove_index(Path::new("d.txt"), &repo);
 8617    git_commit("Another commit", &repo);
 8618    tree.flush_fs_events(cx).await;
 8619    project
 8620        .update(cx, |project, cx| project.git_scans_complete(cx))
 8621        .await;
 8622    cx.executor().run_until_parked();
 8623
 8624    std::fs::remove_file(work_dir.join("a.txt")).unwrap();
 8625    std::fs::remove_file(work_dir.join("b.txt")).unwrap();
 8626    tree.flush_fs_events(cx).await;
 8627    project
 8628        .update(cx, |project, cx| project.git_scans_complete(cx))
 8629        .await;
 8630    cx.executor().run_until_parked();
 8631
 8632    repository.read_with(cx, |repository, _cx| {
 8633        let entries = repository.cached_status().collect::<Vec<_>>();
 8634
 8635        // Deleting an untracked entry, b.txt, should leave no status
 8636        // a.txt was tracked, and so should have a status
 8637        assert_eq!(
 8638            entries,
 8639            [StatusEntry {
 8640                repo_path: repo_path("a.txt"),
 8641                status: StatusCode::Deleted.worktree(),
 8642            }]
 8643        );
 8644    });
 8645}
 8646
 8647#[gpui::test]
 8648#[ignore]
 8649async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
 8650    init_test(cx);
 8651    cx.executor().allow_parking();
 8652
 8653    let root = TempTree::new(json!({
 8654        "project": {
 8655            "sub": {},
 8656            "a.txt": "",
 8657        },
 8658    }));
 8659
 8660    let work_dir = root.path().join("project");
 8661    let repo = git_init(work_dir.as_path());
 8662    // a.txt exists in HEAD and the working copy but is deleted in the index.
 8663    git_add("a.txt", &repo);
 8664    git_commit("Initial commit", &repo);
 8665    git_remove_index("a.txt".as_ref(), &repo);
 8666    // `sub` is a nested git repository.
 8667    let _sub = git_init(&work_dir.join("sub"));
 8668
 8669    let project = Project::test(
 8670        Arc::new(RealFs::new(None, cx.executor())),
 8671        [root.path()],
 8672        cx,
 8673    )
 8674    .await;
 8675
 8676    let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
 8677    tree.flush_fs_events(cx).await;
 8678    project
 8679        .update(cx, |project, cx| project.git_scans_complete(cx))
 8680        .await;
 8681    cx.executor().run_until_parked();
 8682
 8683    let repository = project.read_with(cx, |project, cx| {
 8684        project
 8685            .repositories(cx)
 8686            .values()
 8687            .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
 8688            .unwrap()
 8689            .clone()
 8690    });
 8691
 8692    repository.read_with(cx, |repository, _cx| {
 8693        let entries = repository.cached_status().collect::<Vec<_>>();
 8694
 8695        // `sub` doesn't appear in our computed statuses.
 8696        // a.txt appears with a combined `DA` status.
 8697        assert_eq!(
 8698            entries,
 8699            [StatusEntry {
 8700                repo_path: repo_path("a.txt"),
 8701                status: TrackedStatus {
 8702                    index_status: StatusCode::Deleted,
 8703                    worktree_status: StatusCode::Added
 8704                }
 8705                .into(),
 8706            }]
 8707        )
 8708    });
 8709}
 8710
 8711#[track_caller]
 8712/// We merge lhs into rhs.
 8713fn merge_pending_ops_snapshots(
 8714    source: Vec<pending_op::PendingOps>,
 8715    mut target: Vec<pending_op::PendingOps>,
 8716) -> Vec<pending_op::PendingOps> {
 8717    for s_ops in source {
 8718        if let Some(idx) = target.iter().zip(0..).find_map(|(ops, idx)| {
 8719            if ops.repo_path == s_ops.repo_path {
 8720                Some(idx)
 8721            } else {
 8722                None
 8723            }
 8724        }) {
 8725            let t_ops = &mut target[idx];
 8726            for s_op in s_ops.ops {
 8727                if let Some(op_idx) = t_ops
 8728                    .ops
 8729                    .iter()
 8730                    .zip(0..)
 8731                    .find_map(|(op, idx)| if op.id == s_op.id { Some(idx) } else { None })
 8732                {
 8733                    let t_op = &mut t_ops.ops[op_idx];
 8734                    match (s_op.job_status, t_op.job_status) {
 8735                        (pending_op::JobStatus::Running, _) => {}
 8736                        (s_st, pending_op::JobStatus::Running) => t_op.job_status = s_st,
 8737                        (s_st, t_st) if s_st == t_st => {}
 8738                        _ => unreachable!(),
 8739                    }
 8740                } else {
 8741                    t_ops.ops.push(s_op);
 8742                }
 8743            }
 8744            t_ops.ops.sort_by(|l, r| l.id.cmp(&r.id));
 8745        } else {
 8746            target.push(s_ops);
 8747        }
 8748    }
 8749    target
 8750}
 8751
 8752#[gpui::test]
 8753async fn test_repository_pending_ops_staging(
 8754    executor: gpui::BackgroundExecutor,
 8755    cx: &mut gpui::TestAppContext,
 8756) {
 8757    init_test(cx);
 8758
 8759    let fs = FakeFs::new(executor);
 8760    fs.insert_tree(
 8761        path!("/root"),
 8762        json!({
 8763            "my-repo": {
 8764                ".git": {},
 8765                "a.txt": "a",
 8766            }
 8767
 8768        }),
 8769    )
 8770    .await;
 8771
 8772    fs.set_status_for_repo(
 8773        path!("/root/my-repo/.git").as_ref(),
 8774        &[("a.txt", FileStatus::Untracked)],
 8775    );
 8776
 8777    let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
 8778    let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
 8779    project.update(cx, |project, cx| {
 8780        let pending_ops_all = pending_ops_all.clone();
 8781        cx.subscribe(project.git_store(), move |_, _, e, _| {
 8782            if let GitStoreEvent::RepositoryUpdated(
 8783                _,
 8784                RepositoryEvent::PendingOpsChanged { pending_ops },
 8785                _,
 8786            ) = e
 8787            {
 8788                let merged = merge_pending_ops_snapshots(
 8789                    pending_ops.items(()),
 8790                    pending_ops_all.lock().items(()),
 8791                );
 8792                *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
 8793            }
 8794        })
 8795        .detach();
 8796    });
 8797    project
 8798        .update(cx, |project, cx| project.git_scans_complete(cx))
 8799        .await;
 8800
 8801    let repo = project.read_with(cx, |project, cx| {
 8802        project.repositories(cx).values().next().unwrap().clone()
 8803    });
 8804
 8805    // Ensure we have no pending ops for any of the untracked files
 8806    repo.read_with(cx, |repo, _cx| {
 8807        assert!(repo.pending_ops().next().is_none());
 8808    });
 8809
 8810    let mut id = 1u16;
 8811
 8812    let mut assert_stage = async |path: RepoPath, stage| {
 8813        let git_status = if stage {
 8814            pending_op::GitStatus::Staged
 8815        } else {
 8816            pending_op::GitStatus::Unstaged
 8817        };
 8818        repo.update(cx, |repo, cx| {
 8819            let task = if stage {
 8820                repo.stage_entries(vec![path.clone()], cx)
 8821            } else {
 8822                repo.unstage_entries(vec![path.clone()], cx)
 8823            };
 8824            let ops = repo.pending_ops_for_path(&path).unwrap();
 8825            assert_eq!(
 8826                ops.ops.last(),
 8827                Some(&pending_op::PendingOp {
 8828                    id: id.into(),
 8829                    git_status,
 8830                    job_status: pending_op::JobStatus::Running
 8831                })
 8832            );
 8833            task
 8834        })
 8835        .await
 8836        .unwrap();
 8837
 8838        repo.read_with(cx, |repo, _cx| {
 8839            let ops = repo.pending_ops_for_path(&path).unwrap();
 8840            assert_eq!(
 8841                ops.ops.last(),
 8842                Some(&pending_op::PendingOp {
 8843                    id: id.into(),
 8844                    git_status,
 8845                    job_status: pending_op::JobStatus::Finished
 8846                })
 8847            );
 8848        });
 8849
 8850        id += 1;
 8851    };
 8852
 8853    assert_stage(repo_path("a.txt"), true).await;
 8854    assert_stage(repo_path("a.txt"), false).await;
 8855    assert_stage(repo_path("a.txt"), true).await;
 8856    assert_stage(repo_path("a.txt"), false).await;
 8857    assert_stage(repo_path("a.txt"), true).await;
 8858
 8859    cx.run_until_parked();
 8860
 8861    assert_eq!(
 8862        pending_ops_all
 8863            .lock()
 8864            .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
 8865            .unwrap()
 8866            .ops,
 8867        vec![
 8868            pending_op::PendingOp {
 8869                id: 1u16.into(),
 8870                git_status: pending_op::GitStatus::Staged,
 8871                job_status: pending_op::JobStatus::Finished
 8872            },
 8873            pending_op::PendingOp {
 8874                id: 2u16.into(),
 8875                git_status: pending_op::GitStatus::Unstaged,
 8876                job_status: pending_op::JobStatus::Finished
 8877            },
 8878            pending_op::PendingOp {
 8879                id: 3u16.into(),
 8880                git_status: pending_op::GitStatus::Staged,
 8881                job_status: pending_op::JobStatus::Finished
 8882            },
 8883            pending_op::PendingOp {
 8884                id: 4u16.into(),
 8885                git_status: pending_op::GitStatus::Unstaged,
 8886                job_status: pending_op::JobStatus::Finished
 8887            },
 8888            pending_op::PendingOp {
 8889                id: 5u16.into(),
 8890                git_status: pending_op::GitStatus::Staged,
 8891                job_status: pending_op::JobStatus::Finished
 8892            }
 8893        ],
 8894    );
 8895
 8896    repo.update(cx, |repo, _cx| {
 8897        let git_statuses = repo.cached_status().collect::<Vec<_>>();
 8898
 8899        assert_eq!(
 8900            git_statuses,
 8901            [StatusEntry {
 8902                repo_path: repo_path("a.txt"),
 8903                status: TrackedStatus {
 8904                    index_status: StatusCode::Added,
 8905                    worktree_status: StatusCode::Unmodified
 8906                }
 8907                .into(),
 8908            }]
 8909        );
 8910    });
 8911}
 8912
 8913#[gpui::test]
 8914async fn test_repository_pending_ops_long_running_staging(
 8915    executor: gpui::BackgroundExecutor,
 8916    cx: &mut gpui::TestAppContext,
 8917) {
 8918    init_test(cx);
 8919
 8920    let fs = FakeFs::new(executor);
 8921    fs.insert_tree(
 8922        path!("/root"),
 8923        json!({
 8924            "my-repo": {
 8925                ".git": {},
 8926                "a.txt": "a",
 8927            }
 8928
 8929        }),
 8930    )
 8931    .await;
 8932
 8933    fs.set_status_for_repo(
 8934        path!("/root/my-repo/.git").as_ref(),
 8935        &[("a.txt", FileStatus::Untracked)],
 8936    );
 8937
 8938    let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
 8939    let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
 8940    project.update(cx, |project, cx| {
 8941        let pending_ops_all = pending_ops_all.clone();
 8942        cx.subscribe(project.git_store(), move |_, _, e, _| {
 8943            if let GitStoreEvent::RepositoryUpdated(
 8944                _,
 8945                RepositoryEvent::PendingOpsChanged { pending_ops },
 8946                _,
 8947            ) = e
 8948            {
 8949                let merged = merge_pending_ops_snapshots(
 8950                    pending_ops.items(()),
 8951                    pending_ops_all.lock().items(()),
 8952                );
 8953                *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
 8954            }
 8955        })
 8956        .detach();
 8957    });
 8958
 8959    project
 8960        .update(cx, |project, cx| project.git_scans_complete(cx))
 8961        .await;
 8962
 8963    let repo = project.read_with(cx, |project, cx| {
 8964        project.repositories(cx).values().next().unwrap().clone()
 8965    });
 8966
 8967    repo.update(cx, |repo, cx| {
 8968        repo.stage_entries(vec![repo_path("a.txt")], cx)
 8969    })
 8970    .detach();
 8971
 8972    repo.update(cx, |repo, cx| {
 8973        repo.stage_entries(vec![repo_path("a.txt")], cx)
 8974    })
 8975    .unwrap()
 8976    .with_timeout(Duration::from_secs(1), &cx.executor())
 8977    .await
 8978    .unwrap();
 8979
 8980    cx.run_until_parked();
 8981
 8982    assert_eq!(
 8983        pending_ops_all
 8984            .lock()
 8985            .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
 8986            .unwrap()
 8987            .ops,
 8988        vec![
 8989            pending_op::PendingOp {
 8990                id: 1u16.into(),
 8991                git_status: pending_op::GitStatus::Staged,
 8992                job_status: pending_op::JobStatus::Skipped
 8993            },
 8994            pending_op::PendingOp {
 8995                id: 2u16.into(),
 8996                git_status: pending_op::GitStatus::Staged,
 8997                job_status: pending_op::JobStatus::Finished
 8998            }
 8999        ],
 9000    );
 9001
 9002    repo.update(cx, |repo, _cx| {
 9003        let git_statuses = repo.cached_status().collect::<Vec<_>>();
 9004
 9005        assert_eq!(
 9006            git_statuses,
 9007            [StatusEntry {
 9008                repo_path: repo_path("a.txt"),
 9009                status: TrackedStatus {
 9010                    index_status: StatusCode::Added,
 9011                    worktree_status: StatusCode::Unmodified
 9012                }
 9013                .into(),
 9014            }]
 9015        );
 9016    });
 9017}
 9018
 9019#[gpui::test]
 9020async fn test_repository_pending_ops_stage_all(
 9021    executor: gpui::BackgroundExecutor,
 9022    cx: &mut gpui::TestAppContext,
 9023) {
 9024    init_test(cx);
 9025
 9026    let fs = FakeFs::new(executor);
 9027    fs.insert_tree(
 9028        path!("/root"),
 9029        json!({
 9030            "my-repo": {
 9031                ".git": {},
 9032                "a.txt": "a",
 9033                "b.txt": "b"
 9034            }
 9035
 9036        }),
 9037    )
 9038    .await;
 9039
 9040    fs.set_status_for_repo(
 9041        path!("/root/my-repo/.git").as_ref(),
 9042        &[
 9043            ("a.txt", FileStatus::Untracked),
 9044            ("b.txt", FileStatus::Untracked),
 9045        ],
 9046    );
 9047
 9048    let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
 9049    let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
 9050    project.update(cx, |project, cx| {
 9051        let pending_ops_all = pending_ops_all.clone();
 9052        cx.subscribe(project.git_store(), move |_, _, e, _| {
 9053            if let GitStoreEvent::RepositoryUpdated(
 9054                _,
 9055                RepositoryEvent::PendingOpsChanged { pending_ops },
 9056                _,
 9057            ) = e
 9058            {
 9059                let merged = merge_pending_ops_snapshots(
 9060                    pending_ops.items(()),
 9061                    pending_ops_all.lock().items(()),
 9062                );
 9063                *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
 9064            }
 9065        })
 9066        .detach();
 9067    });
 9068    project
 9069        .update(cx, |project, cx| project.git_scans_complete(cx))
 9070        .await;
 9071
 9072    let repo = project.read_with(cx, |project, cx| {
 9073        project.repositories(cx).values().next().unwrap().clone()
 9074    });
 9075
 9076    repo.update(cx, |repo, cx| {
 9077        repo.stage_entries(vec![repo_path("a.txt")], cx)
 9078    })
 9079    .await
 9080    .unwrap();
 9081    repo.update(cx, |repo, cx| repo.stage_all(cx))
 9082        .await
 9083        .unwrap();
 9084    repo.update(cx, |repo, cx| repo.unstage_all(cx))
 9085        .await
 9086        .unwrap();
 9087
 9088    cx.run_until_parked();
 9089
 9090    assert_eq!(
 9091        pending_ops_all
 9092            .lock()
 9093            .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
 9094            .unwrap()
 9095            .ops,
 9096        vec![
 9097            pending_op::PendingOp {
 9098                id: 1u16.into(),
 9099                git_status: pending_op::GitStatus::Staged,
 9100                job_status: pending_op::JobStatus::Finished
 9101            },
 9102            pending_op::PendingOp {
 9103                id: 2u16.into(),
 9104                git_status: pending_op::GitStatus::Unstaged,
 9105                job_status: pending_op::JobStatus::Finished
 9106            },
 9107        ],
 9108    );
 9109    assert_eq!(
 9110        pending_ops_all
 9111            .lock()
 9112            .get(&worktree::PathKey(repo_path("b.txt").as_ref().clone()), ())
 9113            .unwrap()
 9114            .ops,
 9115        vec![
 9116            pending_op::PendingOp {
 9117                id: 1u16.into(),
 9118                git_status: pending_op::GitStatus::Staged,
 9119                job_status: pending_op::JobStatus::Finished
 9120            },
 9121            pending_op::PendingOp {
 9122                id: 2u16.into(),
 9123                git_status: pending_op::GitStatus::Unstaged,
 9124                job_status: pending_op::JobStatus::Finished
 9125            },
 9126        ],
 9127    );
 9128
 9129    repo.update(cx, |repo, _cx| {
 9130        let git_statuses = repo.cached_status().collect::<Vec<_>>();
 9131
 9132        assert_eq!(
 9133            git_statuses,
 9134            [
 9135                StatusEntry {
 9136                    repo_path: repo_path("a.txt"),
 9137                    status: FileStatus::Untracked,
 9138                },
 9139                StatusEntry {
 9140                    repo_path: repo_path("b.txt"),
 9141                    status: FileStatus::Untracked,
 9142                },
 9143            ]
 9144        );
 9145    });
 9146}
 9147
 9148#[gpui::test]
 9149async fn test_repository_subfolder_git_status(
 9150    executor: gpui::BackgroundExecutor,
 9151    cx: &mut gpui::TestAppContext,
 9152) {
 9153    init_test(cx);
 9154
 9155    let fs = FakeFs::new(executor);
 9156    fs.insert_tree(
 9157        path!("/root"),
 9158        json!({
 9159            "my-repo": {
 9160                ".git": {},
 9161                "a.txt": "a",
 9162                "sub-folder-1": {
 9163                    "sub-folder-2": {
 9164                        "c.txt": "cc",
 9165                        "d": {
 9166                            "e.txt": "eee"
 9167                        }
 9168                    },
 9169                }
 9170            },
 9171        }),
 9172    )
 9173    .await;
 9174
 9175    const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
 9176    const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
 9177
 9178    fs.set_status_for_repo(
 9179        path!("/root/my-repo/.git").as_ref(),
 9180        &[(E_TXT, FileStatus::Untracked)],
 9181    );
 9182
 9183    let project = Project::test(
 9184        fs.clone(),
 9185        [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
 9186        cx,
 9187    )
 9188    .await;
 9189
 9190    project
 9191        .update(cx, |project, cx| project.git_scans_complete(cx))
 9192        .await;
 9193    cx.run_until_parked();
 9194
 9195    let repository = project.read_with(cx, |project, cx| {
 9196        project.repositories(cx).values().next().unwrap().clone()
 9197    });
 9198
 9199    // Ensure that the git status is loaded correctly
 9200    repository.read_with(cx, |repository, _cx| {
 9201        assert_eq!(
 9202            repository.work_directory_abs_path,
 9203            Path::new(path!("/root/my-repo")).into()
 9204        );
 9205
 9206        assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
 9207        assert_eq!(
 9208            repository
 9209                .status_for_path(&repo_path(E_TXT))
 9210                .unwrap()
 9211                .status,
 9212            FileStatus::Untracked
 9213        );
 9214    });
 9215
 9216    fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
 9217    project
 9218        .update(cx, |project, cx| project.git_scans_complete(cx))
 9219        .await;
 9220    cx.run_until_parked();
 9221
 9222    repository.read_with(cx, |repository, _cx| {
 9223        assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
 9224        assert_eq!(repository.status_for_path(&repo_path(E_TXT)), None);
 9225    });
 9226}
 9227
 9228// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
 9229#[cfg(any())]
 9230#[gpui::test]
 9231async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
 9232    init_test(cx);
 9233    cx.executor().allow_parking();
 9234
 9235    let root = TempTree::new(json!({
 9236        "project": {
 9237            "a.txt": "a",
 9238        },
 9239    }));
 9240    let root_path = root.path();
 9241
 9242    let repo = git_init(&root_path.join("project"));
 9243    git_add("a.txt", &repo);
 9244    git_commit("init", &repo);
 9245
 9246    let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
 9247
 9248    let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
 9249    tree.flush_fs_events(cx).await;
 9250    project
 9251        .update(cx, |project, cx| project.git_scans_complete(cx))
 9252        .await;
 9253    cx.executor().run_until_parked();
 9254
 9255    let repository = project.read_with(cx, |project, cx| {
 9256        project.repositories(cx).values().next().unwrap().clone()
 9257    });
 9258
 9259    git_branch("other-branch", &repo);
 9260    git_checkout("refs/heads/other-branch", &repo);
 9261    std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
 9262    git_add("a.txt", &repo);
 9263    git_commit("capitalize", &repo);
 9264    let commit = repo
 9265        .head()
 9266        .expect("Failed to get HEAD")
 9267        .peel_to_commit()
 9268        .expect("HEAD is not a commit");
 9269    git_checkout("refs/heads/main", &repo);
 9270    std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
 9271    git_add("a.txt", &repo);
 9272    git_commit("improve letter", &repo);
 9273    git_cherry_pick(&commit, &repo);
 9274    std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
 9275        .expect("No CHERRY_PICK_HEAD");
 9276    pretty_assertions::assert_eq!(
 9277        git_status(&repo),
 9278        collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
 9279    );
 9280    tree.flush_fs_events(cx).await;
 9281    project
 9282        .update(cx, |project, cx| project.git_scans_complete(cx))
 9283        .await;
 9284    cx.executor().run_until_parked();
 9285    let conflicts = repository.update(cx, |repository, _| {
 9286        repository
 9287            .merge_conflicts
 9288            .iter()
 9289            .cloned()
 9290            .collect::<Vec<_>>()
 9291    });
 9292    pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
 9293
 9294    git_add("a.txt", &repo);
 9295    // Attempt to manually simulate what `git cherry-pick --continue` would do.
 9296    git_commit("whatevs", &repo);
 9297    std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
 9298        .expect("Failed to remove CHERRY_PICK_HEAD");
 9299    pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
 9300    tree.flush_fs_events(cx).await;
 9301    let conflicts = repository.update(cx, |repository, _| {
 9302        repository
 9303            .merge_conflicts
 9304            .iter()
 9305            .cloned()
 9306            .collect::<Vec<_>>()
 9307    });
 9308    pretty_assertions::assert_eq!(conflicts, []);
 9309}
 9310
 9311#[gpui::test]
 9312async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
 9313    init_test(cx);
 9314    let fs = FakeFs::new(cx.background_executor.clone());
 9315    fs.insert_tree(
 9316        path!("/root"),
 9317        json!({
 9318            ".git": {},
 9319            ".gitignore": "*.txt\n",
 9320            "a.xml": "<a></a>",
 9321            "b.txt": "Some text"
 9322        }),
 9323    )
 9324    .await;
 9325
 9326    fs.set_head_and_index_for_repo(
 9327        path!("/root/.git").as_ref(),
 9328        &[
 9329            (".gitignore", "*.txt\n".into()),
 9330            ("a.xml", "<a></a>".into()),
 9331        ],
 9332    );
 9333
 9334    let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
 9335
 9336    let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
 9337    tree.flush_fs_events(cx).await;
 9338    project
 9339        .update(cx, |project, cx| project.git_scans_complete(cx))
 9340        .await;
 9341    cx.executor().run_until_parked();
 9342
 9343    let repository = project.read_with(cx, |project, cx| {
 9344        project.repositories(cx).values().next().unwrap().clone()
 9345    });
 9346
 9347    // One file is unmodified, the other is ignored.
 9348    cx.read(|cx| {
 9349        assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
 9350        assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
 9351    });
 9352
 9353    // Change the gitignore, and stage the newly non-ignored file.
 9354    fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
 9355        .await
 9356        .unwrap();
 9357    fs.set_index_for_repo(
 9358        Path::new(path!("/root/.git")),
 9359        &[
 9360            (".gitignore", "*.txt\n".into()),
 9361            ("a.xml", "<a></a>".into()),
 9362            ("b.txt", "Some text".into()),
 9363        ],
 9364    );
 9365
 9366    cx.executor().run_until_parked();
 9367    cx.read(|cx| {
 9368        assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
 9369        assert_entry_git_state(
 9370            tree.read(cx),
 9371            repository.read(cx),
 9372            "b.txt",
 9373            Some(StatusCode::Added),
 9374            false,
 9375        );
 9376    });
 9377}
 9378
 9379// NOTE:
 9380// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
 9381// a directory which some program has already open.
 9382// This is a limitation of the Windows.
 9383// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
 9384// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
 9385#[gpui::test]
 9386#[cfg_attr(target_os = "windows", ignore)]
 9387async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
 9388    init_test(cx);
 9389    cx.executor().allow_parking();
 9390    let root = TempTree::new(json!({
 9391        "projects": {
 9392            "project1": {
 9393                "a": "",
 9394                "b": "",
 9395            }
 9396        },
 9397
 9398    }));
 9399    let root_path = root.path();
 9400
 9401    let repo = git_init(&root_path.join("projects/project1"));
 9402    git_add("a", &repo);
 9403    git_commit("init", &repo);
 9404    std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
 9405
 9406    let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
 9407
 9408    let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
 9409    tree.flush_fs_events(cx).await;
 9410    project
 9411        .update(cx, |project, cx| project.git_scans_complete(cx))
 9412        .await;
 9413    cx.executor().run_until_parked();
 9414
 9415    let repository = project.read_with(cx, |project, cx| {
 9416        project.repositories(cx).values().next().unwrap().clone()
 9417    });
 9418
 9419    repository.read_with(cx, |repository, _| {
 9420        assert_eq!(
 9421            repository.work_directory_abs_path.as_ref(),
 9422            root_path.join("projects/project1").as_path()
 9423        );
 9424        assert_eq!(
 9425            repository
 9426                .status_for_path(&repo_path("a"))
 9427                .map(|entry| entry.status),
 9428            Some(StatusCode::Modified.worktree()),
 9429        );
 9430        assert_eq!(
 9431            repository
 9432                .status_for_path(&repo_path("b"))
 9433                .map(|entry| entry.status),
 9434            Some(FileStatus::Untracked),
 9435        );
 9436    });
 9437
 9438    std::fs::rename(
 9439        root_path.join("projects/project1"),
 9440        root_path.join("projects/project2"),
 9441    )
 9442    .unwrap();
 9443    tree.flush_fs_events(cx).await;
 9444
 9445    repository.read_with(cx, |repository, _| {
 9446        assert_eq!(
 9447            repository.work_directory_abs_path.as_ref(),
 9448            root_path.join("projects/project2").as_path()
 9449        );
 9450        assert_eq!(
 9451            repository.status_for_path(&repo_path("a")).unwrap().status,
 9452            StatusCode::Modified.worktree(),
 9453        );
 9454        assert_eq!(
 9455            repository.status_for_path(&repo_path("b")).unwrap().status,
 9456            FileStatus::Untracked,
 9457        );
 9458    });
 9459}
 9460
 9461// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
 9462// you can't rename a directory which some program has already open. This is a
 9463// limitation of the Windows. See:
 9464// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
 9465// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
 9466#[gpui::test]
 9467#[cfg_attr(target_os = "windows", ignore)]
 9468async fn test_file_status(cx: &mut gpui::TestAppContext) {
 9469    init_test(cx);
 9470    cx.executor().allow_parking();
 9471    const IGNORE_RULE: &str = "**/target";
 9472
 9473    let root = TempTree::new(json!({
 9474        "project": {
 9475            "a.txt": "a",
 9476            "b.txt": "bb",
 9477            "c": {
 9478                "d": {
 9479                    "e.txt": "eee"
 9480                }
 9481            },
 9482            "f.txt": "ffff",
 9483            "target": {
 9484                "build_file": "???"
 9485            },
 9486            ".gitignore": IGNORE_RULE
 9487        },
 9488
 9489    }));
 9490    let root_path = root.path();
 9491
 9492    const A_TXT: &str = "a.txt";
 9493    const B_TXT: &str = "b.txt";
 9494    const E_TXT: &str = "c/d/e.txt";
 9495    const F_TXT: &str = "f.txt";
 9496    const DOTGITIGNORE: &str = ".gitignore";
 9497    const BUILD_FILE: &str = "target/build_file";
 9498
 9499    // Set up git repository before creating the worktree.
 9500    let work_dir = root.path().join("project");
 9501    let mut repo = git_init(work_dir.as_path());
 9502    repo.add_ignore_rule(IGNORE_RULE).unwrap();
 9503    git_add(A_TXT, &repo);
 9504    git_add(E_TXT, &repo);
 9505    git_add(DOTGITIGNORE, &repo);
 9506    git_commit("Initial commit", &repo);
 9507
 9508    let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
 9509
 9510    let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
 9511    tree.flush_fs_events(cx).await;
 9512    project
 9513        .update(cx, |project, cx| project.git_scans_complete(cx))
 9514        .await;
 9515    cx.executor().run_until_parked();
 9516
 9517    let repository = project.read_with(cx, |project, cx| {
 9518        project.repositories(cx).values().next().unwrap().clone()
 9519    });
 9520
 9521    // Check that the right git state is observed on startup
 9522    repository.read_with(cx, |repository, _cx| {
 9523        assert_eq!(
 9524            repository.work_directory_abs_path.as_ref(),
 9525            root_path.join("project").as_path()
 9526        );
 9527
 9528        assert_eq!(
 9529            repository
 9530                .status_for_path(&repo_path(B_TXT))
 9531                .unwrap()
 9532                .status,
 9533            FileStatus::Untracked,
 9534        );
 9535        assert_eq!(
 9536            repository
 9537                .status_for_path(&repo_path(F_TXT))
 9538                .unwrap()
 9539                .status,
 9540            FileStatus::Untracked,
 9541        );
 9542    });
 9543
 9544    // Modify a file in the working copy.
 9545    std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
 9546    tree.flush_fs_events(cx).await;
 9547    project
 9548        .update(cx, |project, cx| project.git_scans_complete(cx))
 9549        .await;
 9550    cx.executor().run_until_parked();
 9551
 9552    // The worktree detects that the file's git status has changed.
 9553    repository.read_with(cx, |repository, _| {
 9554        assert_eq!(
 9555            repository
 9556                .status_for_path(&repo_path(A_TXT))
 9557                .unwrap()
 9558                .status,
 9559            StatusCode::Modified.worktree(),
 9560        );
 9561    });
 9562
 9563    // Create a commit in the git repository.
 9564    git_add(A_TXT, &repo);
 9565    git_add(B_TXT, &repo);
 9566    git_commit("Committing modified and added", &repo);
 9567    tree.flush_fs_events(cx).await;
 9568    project
 9569        .update(cx, |project, cx| project.git_scans_complete(cx))
 9570        .await;
 9571    cx.executor().run_until_parked();
 9572
 9573    // The worktree detects that the files' git status have changed.
 9574    repository.read_with(cx, |repository, _cx| {
 9575        assert_eq!(
 9576            repository
 9577                .status_for_path(&repo_path(F_TXT))
 9578                .unwrap()
 9579                .status,
 9580            FileStatus::Untracked,
 9581        );
 9582        assert_eq!(repository.status_for_path(&repo_path(B_TXT)), None);
 9583        assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
 9584    });
 9585
 9586    // Modify files in the working copy and perform git operations on other files.
 9587    git_reset(0, &repo);
 9588    git_remove_index(Path::new(B_TXT), &repo);
 9589    git_stash(&mut repo);
 9590    std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
 9591    std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
 9592    tree.flush_fs_events(cx).await;
 9593    project
 9594        .update(cx, |project, cx| project.git_scans_complete(cx))
 9595        .await;
 9596    cx.executor().run_until_parked();
 9597
 9598    // Check that more complex repo changes are tracked
 9599    repository.read_with(cx, |repository, _cx| {
 9600        assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
 9601        assert_eq!(
 9602            repository
 9603                .status_for_path(&repo_path(B_TXT))
 9604                .unwrap()
 9605                .status,
 9606            FileStatus::Untracked,
 9607        );
 9608        assert_eq!(
 9609            repository
 9610                .status_for_path(&repo_path(E_TXT))
 9611                .unwrap()
 9612                .status,
 9613            StatusCode::Modified.worktree(),
 9614        );
 9615    });
 9616
 9617    std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
 9618    std::fs::remove_dir_all(work_dir.join("c")).unwrap();
 9619    std::fs::write(
 9620        work_dir.join(DOTGITIGNORE),
 9621        [IGNORE_RULE, "f.txt"].join("\n"),
 9622    )
 9623    .unwrap();
 9624
 9625    git_add(Path::new(DOTGITIGNORE), &repo);
 9626    git_commit("Committing modified git ignore", &repo);
 9627
 9628    tree.flush_fs_events(cx).await;
 9629    cx.executor().run_until_parked();
 9630
 9631    let mut renamed_dir_name = "first_directory/second_directory";
 9632    const RENAMED_FILE: &str = "rf.txt";
 9633
 9634    std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
 9635    std::fs::write(
 9636        work_dir.join(renamed_dir_name).join(RENAMED_FILE),
 9637        "new-contents",
 9638    )
 9639    .unwrap();
 9640
 9641    tree.flush_fs_events(cx).await;
 9642    project
 9643        .update(cx, |project, cx| project.git_scans_complete(cx))
 9644        .await;
 9645    cx.executor().run_until_parked();
 9646
 9647    repository.read_with(cx, |repository, _cx| {
 9648        assert_eq!(
 9649            repository
 9650                .status_for_path(&RepoPath::from_rel_path(
 9651                    &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
 9652                ))
 9653                .unwrap()
 9654                .status,
 9655            FileStatus::Untracked,
 9656        );
 9657    });
 9658
 9659    renamed_dir_name = "new_first_directory/second_directory";
 9660
 9661    std::fs::rename(
 9662        work_dir.join("first_directory"),
 9663        work_dir.join("new_first_directory"),
 9664    )
 9665    .unwrap();
 9666
 9667    tree.flush_fs_events(cx).await;
 9668    project
 9669        .update(cx, |project, cx| project.git_scans_complete(cx))
 9670        .await;
 9671    cx.executor().run_until_parked();
 9672
 9673    repository.read_with(cx, |repository, _cx| {
 9674        assert_eq!(
 9675            repository
 9676                .status_for_path(&RepoPath::from_rel_path(
 9677                    &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
 9678                ))
 9679                .unwrap()
 9680                .status,
 9681            FileStatus::Untracked,
 9682        );
 9683    });
 9684}
 9685
 9686#[gpui::test]
 9687#[ignore]
 9688async fn test_ignored_dirs_events(cx: &mut gpui::TestAppContext) {
 9689    init_test(cx);
 9690    cx.executor().allow_parking();
 9691
 9692    const IGNORE_RULE: &str = "**/target";
 9693
 9694    let root = TempTree::new(json!({
 9695        "project": {
 9696            "src": {
 9697                "main.rs": "fn main() {}"
 9698            },
 9699            "target": {
 9700                "debug": {
 9701                    "important_text.txt": "important text",
 9702                },
 9703            },
 9704            ".gitignore": IGNORE_RULE
 9705        },
 9706
 9707    }));
 9708    let root_path = root.path();
 9709
 9710    // Set up git repository before creating the worktree.
 9711    let work_dir = root.path().join("project");
 9712    let repo = git_init(work_dir.as_path());
 9713    repo.add_ignore_rule(IGNORE_RULE).unwrap();
 9714    git_add("src/main.rs", &repo);
 9715    git_add(".gitignore", &repo);
 9716    git_commit("Initial commit", &repo);
 9717
 9718    let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
 9719    let repository_updates = Arc::new(Mutex::new(Vec::new()));
 9720    let project_events = Arc::new(Mutex::new(Vec::new()));
 9721    project.update(cx, |project, cx| {
 9722        let repo_events = repository_updates.clone();
 9723        cx.subscribe(project.git_store(), move |_, _, e, _| {
 9724            if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
 9725                repo_events.lock().push(e.clone());
 9726            }
 9727        })
 9728        .detach();
 9729        let project_events = project_events.clone();
 9730        cx.subscribe_self(move |_, e, _| {
 9731            if let Event::WorktreeUpdatedEntries(_, updates) = e {
 9732                project_events.lock().extend(
 9733                    updates
 9734                        .iter()
 9735                        .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
 9736                        .filter(|(path, _)| path != "fs-event-sentinel"),
 9737                );
 9738            }
 9739        })
 9740        .detach();
 9741    });
 9742
 9743    let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
 9744    tree.flush_fs_events(cx).await;
 9745    tree.update(cx, |tree, cx| {
 9746        tree.load_file(rel_path("project/target/debug/important_text.txt"), cx)
 9747    })
 9748    .await
 9749    .unwrap();
 9750    tree.update(cx, |tree, _| {
 9751        assert_eq!(
 9752            tree.entries(true, 0)
 9753                .map(|entry| (entry.path.as_ref(), entry.is_ignored))
 9754                .collect::<Vec<_>>(),
 9755            vec![
 9756                (rel_path(""), false),
 9757                (rel_path("project/"), false),
 9758                (rel_path("project/.gitignore"), false),
 9759                (rel_path("project/src"), false),
 9760                (rel_path("project/src/main.rs"), false),
 9761                (rel_path("project/target"), true),
 9762                (rel_path("project/target/debug"), true),
 9763                (rel_path("project/target/debug/important_text.txt"), true),
 9764            ]
 9765        );
 9766    });
 9767
 9768    assert_eq!(
 9769        repository_updates.lock().drain(..).collect::<Vec<_>>(),
 9770        vec![
 9771            RepositoryEvent::StatusesChanged,
 9772            RepositoryEvent::MergeHeadsChanged,
 9773        ],
 9774        "Initial worktree scan should produce a repo update event"
 9775    );
 9776    assert_eq!(
 9777        project_events.lock().drain(..).collect::<Vec<_>>(),
 9778        vec![
 9779            ("project/target".to_string(), PathChange::Loaded),
 9780            ("project/target/debug".to_string(), PathChange::Loaded),
 9781            (
 9782                "project/target/debug/important_text.txt".to_string(),
 9783                PathChange::Loaded
 9784            ),
 9785        ],
 9786        "Initial project changes should show that all not-ignored and all opened files are loaded"
 9787    );
 9788
 9789    let deps_dir = work_dir.join("target").join("debug").join("deps");
 9790    std::fs::create_dir_all(&deps_dir).unwrap();
 9791    tree.flush_fs_events(cx).await;
 9792    project
 9793        .update(cx, |project, cx| project.git_scans_complete(cx))
 9794        .await;
 9795    cx.executor().run_until_parked();
 9796    std::fs::write(deps_dir.join("aa.tmp"), "something tmp").unwrap();
 9797    tree.flush_fs_events(cx).await;
 9798    project
 9799        .update(cx, |project, cx| project.git_scans_complete(cx))
 9800        .await;
 9801    cx.executor().run_until_parked();
 9802    std::fs::remove_dir_all(&deps_dir).unwrap();
 9803    tree.flush_fs_events(cx).await;
 9804    project
 9805        .update(cx, |project, cx| project.git_scans_complete(cx))
 9806        .await;
 9807    cx.executor().run_until_parked();
 9808
 9809    tree.update(cx, |tree, _| {
 9810        assert_eq!(
 9811            tree.entries(true, 0)
 9812                .map(|entry| (entry.path.as_ref(), entry.is_ignored))
 9813                .collect::<Vec<_>>(),
 9814            vec![
 9815                (rel_path(""), false),
 9816                (rel_path("project/"), false),
 9817                (rel_path("project/.gitignore"), false),
 9818                (rel_path("project/src"), false),
 9819                (rel_path("project/src/main.rs"), false),
 9820                (rel_path("project/target"), true),
 9821                (rel_path("project/target/debug"), true),
 9822                (rel_path("project/target/debug/important_text.txt"), true),
 9823            ],
 9824            "No stray temp files should be left after the flycheck changes"
 9825        );
 9826    });
 9827
 9828    assert_eq!(
 9829        repository_updates
 9830            .lock()
 9831            .iter()
 9832            .cloned()
 9833            .collect::<Vec<_>>(),
 9834        Vec::new(),
 9835        "No further RepositoryUpdated events should happen, as only ignored dirs' contents was changed",
 9836    );
 9837    assert_eq!(
 9838        project_events.lock().as_slice(),
 9839        vec![
 9840            ("project/target/debug/deps".to_string(), PathChange::Added),
 9841            ("project/target/debug/deps".to_string(), PathChange::Removed),
 9842        ],
 9843        "Due to `debug` directory being tracked, it should get updates for entries inside it.
 9844        No updates for more nested directories should happen as those are ignored",
 9845    );
 9846}
 9847
 9848// todo(jk): turning this test off until we rework it in such a way so that it is not so susceptible
 9849// to different timings/ordering of events.
 9850#[ignore]
 9851#[gpui::test]
 9852async fn test_odd_events_for_ignored_dirs(
 9853    executor: BackgroundExecutor,
 9854    cx: &mut gpui::TestAppContext,
 9855) {
 9856    init_test(cx);
 9857    let fs = FakeFs::new(executor);
 9858    fs.insert_tree(
 9859        path!("/root"),
 9860        json!({
 9861            ".git": {},
 9862            ".gitignore": "**/target/",
 9863            "src": {
 9864                "main.rs": "fn main() {}",
 9865            },
 9866            "target": {
 9867                "debug": {
 9868                    "foo.txt": "foo",
 9869                    "deps": {}
 9870                }
 9871            }
 9872        }),
 9873    )
 9874    .await;
 9875    fs.set_head_and_index_for_repo(
 9876        path!("/root/.git").as_ref(),
 9877        &[
 9878            (".gitignore", "**/target/".into()),
 9879            ("src/main.rs", "fn main() {}".into()),
 9880        ],
 9881    );
 9882
 9883    let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
 9884    let repository_updates = Arc::new(Mutex::new(Vec::new()));
 9885    let project_events = Arc::new(Mutex::new(Vec::new()));
 9886    project.update(cx, |project, cx| {
 9887        let repository_updates = repository_updates.clone();
 9888        cx.subscribe(project.git_store(), move |_, _, e, _| {
 9889            if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
 9890                repository_updates.lock().push(e.clone());
 9891            }
 9892        })
 9893        .detach();
 9894        let project_events = project_events.clone();
 9895        cx.subscribe_self(move |_, e, _| {
 9896            if let Event::WorktreeUpdatedEntries(_, updates) = e {
 9897                project_events.lock().extend(
 9898                    updates
 9899                        .iter()
 9900                        .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
 9901                        .filter(|(path, _)| path != "fs-event-sentinel"),
 9902                );
 9903            }
 9904        })
 9905        .detach();
 9906    });
 9907
 9908    let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
 9909    tree.update(cx, |tree, cx| {
 9910        tree.load_file(rel_path("target/debug/foo.txt"), cx)
 9911    })
 9912    .await
 9913    .unwrap();
 9914    tree.flush_fs_events(cx).await;
 9915    project
 9916        .update(cx, |project, cx| project.git_scans_complete(cx))
 9917        .await;
 9918    cx.run_until_parked();
 9919    tree.update(cx, |tree, _| {
 9920        assert_eq!(
 9921            tree.entries(true, 0)
 9922                .map(|entry| (entry.path.as_ref(), entry.is_ignored))
 9923                .collect::<Vec<_>>(),
 9924            vec![
 9925                (rel_path(""), false),
 9926                (rel_path(".gitignore"), false),
 9927                (rel_path("src"), false),
 9928                (rel_path("src/main.rs"), false),
 9929                (rel_path("target"), true),
 9930                (rel_path("target/debug"), true),
 9931                (rel_path("target/debug/deps"), true),
 9932                (rel_path("target/debug/foo.txt"), true),
 9933            ]
 9934        );
 9935    });
 9936
 9937    assert_eq!(
 9938        repository_updates.lock().drain(..).collect::<Vec<_>>(),
 9939        vec![
 9940            RepositoryEvent::MergeHeadsChanged,
 9941            RepositoryEvent::BranchChanged,
 9942            RepositoryEvent::StatusesChanged,
 9943            RepositoryEvent::StatusesChanged,
 9944        ],
 9945        "Initial worktree scan should produce a repo update event"
 9946    );
 9947    assert_eq!(
 9948        project_events.lock().drain(..).collect::<Vec<_>>(),
 9949        vec![
 9950            ("target".to_string(), PathChange::Loaded),
 9951            ("target/debug".to_string(), PathChange::Loaded),
 9952            ("target/debug/deps".to_string(), PathChange::Loaded),
 9953            ("target/debug/foo.txt".to_string(), PathChange::Loaded),
 9954        ],
 9955        "All non-ignored entries and all opened firs should be getting a project event",
 9956    );
 9957
 9958    // Emulate a flycheck spawn: it emits a `INODE_META_MOD`-flagged FS event on target/debug/deps, then creates and removes temp files inside.
 9959    // This may happen multiple times during a single flycheck, but once is enough for testing.
 9960    fs.emit_fs_event("/root/target/debug/deps", None);
 9961    tree.flush_fs_events(cx).await;
 9962    project
 9963        .update(cx, |project, cx| project.git_scans_complete(cx))
 9964        .await;
 9965    cx.executor().run_until_parked();
 9966
 9967    assert_eq!(
 9968        repository_updates
 9969            .lock()
 9970            .iter()
 9971            .cloned()
 9972            .collect::<Vec<_>>(),
 9973        Vec::new(),
 9974        "No further RepositoryUpdated events should happen, as only ignored dirs received FS events",
 9975    );
 9976    assert_eq!(
 9977        project_events.lock().as_slice(),
 9978        Vec::new(),
 9979        "No further project events should happen, as only ignored dirs received FS events",
 9980    );
 9981}
 9982
 9983#[gpui::test]
 9984async fn test_repos_in_invisible_worktrees(
 9985    executor: BackgroundExecutor,
 9986    cx: &mut gpui::TestAppContext,
 9987) {
 9988    init_test(cx);
 9989    let fs = FakeFs::new(executor);
 9990    fs.insert_tree(
 9991        path!("/root"),
 9992        json!({
 9993            "dir1": {
 9994                ".git": {},
 9995                "dep1": {
 9996                    ".git": {},
 9997                    "src": {
 9998                        "a.txt": "",
 9999                    },
10000                },
10001                "b.txt": "",
10002            },
10003        }),
10004    )
10005    .await;
10006
10007    let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
10008    let _visible_worktree =
10009        project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10010    project
10011        .update(cx, |project, cx| project.git_scans_complete(cx))
10012        .await;
10013
10014    let repos = project.read_with(cx, |project, cx| {
10015        project
10016            .repositories(cx)
10017            .values()
10018            .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10019            .collect::<Vec<_>>()
10020    });
10021    pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
10022
10023    let (_invisible_worktree, _) = project
10024        .update(cx, |project, cx| {
10025            project.worktree_store.update(cx, |worktree_store, cx| {
10026                worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
10027            })
10028        })
10029        .await
10030        .expect("failed to create worktree");
10031    project
10032        .update(cx, |project, cx| project.git_scans_complete(cx))
10033        .await;
10034
10035    let repos = project.read_with(cx, |project, cx| {
10036        project
10037            .repositories(cx)
10038            .values()
10039            .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10040            .collect::<Vec<_>>()
10041    });
10042    pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
10043}
10044
10045#[gpui::test(iterations = 10)]
10046async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
10047    init_test(cx);
10048    cx.update(|cx| {
10049        cx.update_global::<SettingsStore, _>(|store, cx| {
10050            store.update_user_settings(cx, |settings| {
10051                settings.project.worktree.file_scan_exclusions = Some(Vec::new());
10052            });
10053        });
10054    });
10055    let fs = FakeFs::new(cx.background_executor.clone());
10056    fs.insert_tree(
10057        path!("/root"),
10058        json!({
10059            ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
10060            "tree": {
10061                ".git": {},
10062                ".gitignore": "ignored-dir\n",
10063                "tracked-dir": {
10064                    "tracked-file1": "",
10065                    "ancestor-ignored-file1": "",
10066                },
10067                "ignored-dir": {
10068                    "ignored-file1": ""
10069                }
10070            }
10071        }),
10072    )
10073    .await;
10074    fs.set_head_and_index_for_repo(
10075        path!("/root/tree/.git").as_ref(),
10076        &[
10077            (".gitignore", "ignored-dir\n".into()),
10078            ("tracked-dir/tracked-file1", "".into()),
10079        ],
10080    );
10081
10082    let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
10083
10084    let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10085    tree.flush_fs_events(cx).await;
10086    project
10087        .update(cx, |project, cx| project.git_scans_complete(cx))
10088        .await;
10089    cx.executor().run_until_parked();
10090
10091    let repository = project.read_with(cx, |project, cx| {
10092        project.repositories(cx).values().next().unwrap().clone()
10093    });
10094
10095    tree.read_with(cx, |tree, _| {
10096        tree.as_local()
10097            .unwrap()
10098            .manually_refresh_entries_for_paths(vec![rel_path("ignored-dir").into()])
10099    })
10100    .recv()
10101    .await;
10102
10103    cx.read(|cx| {
10104        assert_entry_git_state(
10105            tree.read(cx),
10106            repository.read(cx),
10107            "tracked-dir/tracked-file1",
10108            None,
10109            false,
10110        );
10111        assert_entry_git_state(
10112            tree.read(cx),
10113            repository.read(cx),
10114            "tracked-dir/ancestor-ignored-file1",
10115            None,
10116            false,
10117        );
10118        assert_entry_git_state(
10119            tree.read(cx),
10120            repository.read(cx),
10121            "ignored-dir/ignored-file1",
10122            None,
10123            true,
10124        );
10125    });
10126
10127    fs.create_file(
10128        path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
10129        Default::default(),
10130    )
10131    .await
10132    .unwrap();
10133    fs.set_index_for_repo(
10134        path!("/root/tree/.git").as_ref(),
10135        &[
10136            (".gitignore", "ignored-dir\n".into()),
10137            ("tracked-dir/tracked-file1", "".into()),
10138            ("tracked-dir/tracked-file2", "".into()),
10139        ],
10140    );
10141    fs.create_file(
10142        path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
10143        Default::default(),
10144    )
10145    .await
10146    .unwrap();
10147    fs.create_file(
10148        path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
10149        Default::default(),
10150    )
10151    .await
10152    .unwrap();
10153
10154    cx.executor().run_until_parked();
10155    cx.read(|cx| {
10156        assert_entry_git_state(
10157            tree.read(cx),
10158            repository.read(cx),
10159            "tracked-dir/tracked-file2",
10160            Some(StatusCode::Added),
10161            false,
10162        );
10163        assert_entry_git_state(
10164            tree.read(cx),
10165            repository.read(cx),
10166            "tracked-dir/ancestor-ignored-file2",
10167            None,
10168            false,
10169        );
10170        assert_entry_git_state(
10171            tree.read(cx),
10172            repository.read(cx),
10173            "ignored-dir/ignored-file2",
10174            None,
10175            true,
10176        );
10177        assert!(
10178            tree.read(cx)
10179                .entry_for_path(&rel_path(".git"))
10180                .unwrap()
10181                .is_ignored
10182        );
10183    });
10184}
10185
10186#[gpui::test]
10187async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
10188    init_test(cx);
10189
10190    let fs = FakeFs::new(cx.executor());
10191    fs.insert_tree(
10192        path!("/project"),
10193        json!({
10194            ".git": {
10195                "worktrees": {
10196                    "some-worktree": {
10197                        "commondir": "../..\n",
10198                        // For is_git_dir
10199                        "HEAD": "",
10200                        "config": ""
10201                    }
10202                },
10203                "modules": {
10204                    "subdir": {
10205                        "some-submodule": {
10206                            // For is_git_dir
10207                            "HEAD": "",
10208                            "config": "",
10209                        }
10210                    }
10211                }
10212            },
10213            "src": {
10214                "a.txt": "A",
10215            },
10216            "some-worktree": {
10217                ".git": "gitdir: ../.git/worktrees/some-worktree\n",
10218                "src": {
10219                    "b.txt": "B",
10220                }
10221            },
10222            "subdir": {
10223                "some-submodule": {
10224                    ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
10225                    "c.txt": "C",
10226                }
10227            }
10228        }),
10229    )
10230    .await;
10231
10232    let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
10233    let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
10234    scan_complete.await;
10235
10236    let mut repositories = project.update(cx, |project, cx| {
10237        project
10238            .repositories(cx)
10239            .values()
10240            .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10241            .collect::<Vec<_>>()
10242    });
10243    repositories.sort();
10244    pretty_assertions::assert_eq!(
10245        repositories,
10246        [
10247            Path::new(path!("/project")).into(),
10248            Path::new(path!("/project/some-worktree")).into(),
10249            Path::new(path!("/project/subdir/some-submodule")).into(),
10250        ]
10251    );
10252
10253    // Generate a git-related event for the worktree and check that it's refreshed.
10254    fs.with_git_state(
10255        path!("/project/some-worktree/.git").as_ref(),
10256        true,
10257        |state| {
10258            state
10259                .head_contents
10260                .insert(repo_path("src/b.txt"), "b".to_owned());
10261            state
10262                .index_contents
10263                .insert(repo_path("src/b.txt"), "b".to_owned());
10264        },
10265    )
10266    .unwrap();
10267    cx.run_until_parked();
10268
10269    let buffer = project
10270        .update(cx, |project, cx| {
10271            project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
10272        })
10273        .await
10274        .unwrap();
10275    let (worktree_repo, barrier) = project.update(cx, |project, cx| {
10276        let (repo, _) = project
10277            .git_store()
10278            .read(cx)
10279            .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
10280            .unwrap();
10281        pretty_assertions::assert_eq!(
10282            repo.read(cx).work_directory_abs_path,
10283            Path::new(path!("/project/some-worktree")).into(),
10284        );
10285        let barrier = repo.update(cx, |repo, _| repo.barrier());
10286        (repo.clone(), barrier)
10287    });
10288    barrier.await.unwrap();
10289    worktree_repo.update(cx, |repo, _| {
10290        pretty_assertions::assert_eq!(
10291            repo.status_for_path(&repo_path("src/b.txt"))
10292                .unwrap()
10293                .status,
10294            StatusCode::Modified.worktree(),
10295        );
10296    });
10297
10298    // The same for the submodule.
10299    fs.with_git_state(
10300        path!("/project/subdir/some-submodule/.git").as_ref(),
10301        true,
10302        |state| {
10303            state
10304                .head_contents
10305                .insert(repo_path("c.txt"), "c".to_owned());
10306            state
10307                .index_contents
10308                .insert(repo_path("c.txt"), "c".to_owned());
10309        },
10310    )
10311    .unwrap();
10312    cx.run_until_parked();
10313
10314    let buffer = project
10315        .update(cx, |project, cx| {
10316            project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
10317        })
10318        .await
10319        .unwrap();
10320    let (submodule_repo, barrier) = project.update(cx, |project, cx| {
10321        let (repo, _) = project
10322            .git_store()
10323            .read(cx)
10324            .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
10325            .unwrap();
10326        pretty_assertions::assert_eq!(
10327            repo.read(cx).work_directory_abs_path,
10328            Path::new(path!("/project/subdir/some-submodule")).into(),
10329        );
10330        let barrier = repo.update(cx, |repo, _| repo.barrier());
10331        (repo.clone(), barrier)
10332    });
10333    barrier.await.unwrap();
10334    submodule_repo.update(cx, |repo, _| {
10335        pretty_assertions::assert_eq!(
10336            repo.status_for_path(&repo_path("c.txt")).unwrap().status,
10337            StatusCode::Modified.worktree(),
10338        );
10339    });
10340}
10341
10342#[gpui::test]
10343async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
10344    init_test(cx);
10345    let fs = FakeFs::new(cx.background_executor.clone());
10346    fs.insert_tree(
10347        path!("/root"),
10348        json!({
10349            "project": {
10350                ".git": {},
10351                "child1": {
10352                    "a.txt": "A",
10353                },
10354                "child2": {
10355                    "b.txt": "B",
10356                }
10357            }
10358        }),
10359    )
10360    .await;
10361
10362    let project = Project::test(
10363        fs.clone(),
10364        [
10365            path!("/root/project/child1").as_ref(),
10366            path!("/root/project/child2").as_ref(),
10367        ],
10368        cx,
10369    )
10370    .await;
10371
10372    let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10373    tree.flush_fs_events(cx).await;
10374    project
10375        .update(cx, |project, cx| project.git_scans_complete(cx))
10376        .await;
10377    cx.executor().run_until_parked();
10378
10379    let repos = project.read_with(cx, |project, cx| {
10380        project
10381            .repositories(cx)
10382            .values()
10383            .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10384            .collect::<Vec<_>>()
10385    });
10386    pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
10387}
10388
10389#[gpui::test]
10390async fn test_buffer_changed_file_path_updates_git_diff(cx: &mut gpui::TestAppContext) {
10391    init_test(cx);
10392
10393    let file_1_committed = String::from(r#"file_1_committed"#);
10394    let file_1_staged = String::from(r#"file_1_staged"#);
10395    let file_2_committed = String::from(r#"file_2_committed"#);
10396    let file_2_staged = String::from(r#"file_2_staged"#);
10397    let buffer_contents = String::from(r#"buffer"#);
10398
10399    let fs = FakeFs::new(cx.background_executor.clone());
10400    fs.insert_tree(
10401        path!("/dir"),
10402        json!({
10403            ".git": {},
10404           "src": {
10405               "file_1.rs": file_1_committed.clone(),
10406               "file_2.rs": file_2_committed.clone(),
10407           }
10408        }),
10409    )
10410    .await;
10411
10412    fs.set_head_for_repo(
10413        path!("/dir/.git").as_ref(),
10414        &[
10415            ("src/file_1.rs", file_1_committed.clone()),
10416            ("src/file_2.rs", file_2_committed.clone()),
10417        ],
10418        "deadbeef",
10419    );
10420    fs.set_index_for_repo(
10421        path!("/dir/.git").as_ref(),
10422        &[
10423            ("src/file_1.rs", file_1_staged.clone()),
10424            ("src/file_2.rs", file_2_staged.clone()),
10425        ],
10426    );
10427
10428    let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
10429
10430    let buffer = project
10431        .update(cx, |project, cx| {
10432            project.open_local_buffer(path!("/dir/src/file_1.rs"), cx)
10433        })
10434        .await
10435        .unwrap();
10436
10437    buffer.update(cx, |buffer, cx| {
10438        buffer.edit([(0..buffer.len(), buffer_contents.as_str())], None, cx);
10439    });
10440
10441    let unstaged_diff = project
10442        .update(cx, |project, cx| {
10443            project.open_unstaged_diff(buffer.clone(), cx)
10444        })
10445        .await
10446        .unwrap();
10447
10448    cx.run_until_parked();
10449
10450    unstaged_diff.update(cx, |unstaged_diff, cx| {
10451        let base_text = unstaged_diff.base_text_string(cx).unwrap();
10452        assert_eq!(base_text, file_1_staged, "Should start with file_1 staged");
10453    });
10454
10455    // Save the buffer as `file_2.rs`, which should trigger the
10456    // `BufferChangedFilePath` event.
10457    project
10458        .update(cx, |project, cx| {
10459            let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
10460            let path = ProjectPath {
10461                worktree_id,
10462                path: rel_path("src/file_2.rs").into(),
10463            };
10464            project.save_buffer_as(buffer.clone(), path, cx)
10465        })
10466        .await
10467        .unwrap();
10468
10469    cx.run_until_parked();
10470
10471    // Verify that the diff bases have been updated to file_2's contents due to
10472    // the `BufferChangedFilePath` event being handled.
10473    unstaged_diff.update(cx, |unstaged_diff, cx| {
10474        let snapshot = buffer.read(cx).snapshot();
10475        let base_text = unstaged_diff.base_text_string(cx).unwrap();
10476        assert_eq!(
10477            base_text, file_2_staged,
10478            "Diff bases should be automatically updated to file_2 staged content"
10479        );
10480
10481        let hunks: Vec<_> = unstaged_diff.snapshot(cx).hunks(&snapshot).collect();
10482        assert!(!hunks.is_empty(), "Should have diff hunks for file_2");
10483    });
10484
10485    let uncommitted_diff = project
10486        .update(cx, |project, cx| {
10487            project.open_uncommitted_diff(buffer.clone(), cx)
10488        })
10489        .await
10490        .unwrap();
10491
10492    cx.run_until_parked();
10493
10494    uncommitted_diff.update(cx, |uncommitted_diff, cx| {
10495        let base_text = uncommitted_diff.base_text_string(cx).unwrap();
10496        assert_eq!(
10497            base_text, file_2_committed,
10498            "Uncommitted diff should compare against file_2 committed content"
10499        );
10500    });
10501}
10502
10503async fn search(
10504    project: &Entity<Project>,
10505    query: SearchQuery,
10506    cx: &mut gpui::TestAppContext,
10507) -> Result<HashMap<String, Vec<Range<usize>>>> {
10508    let search_rx = project.update(cx, |project, cx| project.search(query, cx));
10509    let mut results = HashMap::default();
10510    while let Ok(search_result) = search_rx.rx.recv().await {
10511        match search_result {
10512            SearchResult::Buffer { buffer, ranges } => {
10513                results.entry(buffer).or_insert(ranges);
10514            }
10515            SearchResult::LimitReached => {}
10516        }
10517    }
10518    Ok(results
10519        .into_iter()
10520        .map(|(buffer, ranges)| {
10521            buffer.update(cx, |buffer, cx| {
10522                let path = buffer
10523                    .file()
10524                    .unwrap()
10525                    .full_path(cx)
10526                    .to_string_lossy()
10527                    .to_string();
10528                let ranges = ranges
10529                    .into_iter()
10530                    .map(|range| range.to_offset(buffer))
10531                    .collect::<Vec<_>>();
10532                (path, ranges)
10533            })
10534        })
10535        .collect())
10536}
10537
10538pub fn init_test(cx: &mut gpui::TestAppContext) {
10539    zlog::init_test();
10540
10541    cx.update(|cx| {
10542        let settings_store = SettingsStore::test(cx);
10543        cx.set_global(settings_store);
10544        release_channel::init(semver::Version::new(0, 0, 0), cx);
10545    });
10546}
10547
10548fn json_lang() -> Arc<Language> {
10549    Arc::new(Language::new(
10550        LanguageConfig {
10551            name: "JSON".into(),
10552            matcher: LanguageMatcher {
10553                path_suffixes: vec!["json".to_string()],
10554                ..Default::default()
10555            },
10556            ..Default::default()
10557        },
10558        None,
10559    ))
10560}
10561
10562fn js_lang() -> Arc<Language> {
10563    Arc::new(Language::new(
10564        LanguageConfig {
10565            name: "JavaScript".into(),
10566            matcher: LanguageMatcher {
10567                path_suffixes: vec!["js".to_string()],
10568                ..Default::default()
10569            },
10570            ..Default::default()
10571        },
10572        None,
10573    ))
10574}
10575
10576fn python_lang(fs: Arc<FakeFs>) -> Arc<Language> {
10577    struct PythonMootToolchainLister(Arc<FakeFs>);
10578    #[async_trait]
10579    impl ToolchainLister for PythonMootToolchainLister {
10580        async fn list(
10581            &self,
10582            worktree_root: PathBuf,
10583            subroot_relative_path: Arc<RelPath>,
10584            _: Option<HashMap<String, String>>,
10585            _: &dyn Fs,
10586        ) -> ToolchainList {
10587            // This lister will always return a path .venv directories within ancestors
10588            let ancestors = subroot_relative_path.ancestors().collect::<Vec<_>>();
10589            let mut toolchains = vec![];
10590            for ancestor in ancestors {
10591                let venv_path = worktree_root.join(ancestor.as_std_path()).join(".venv");
10592                if self.0.is_dir(&venv_path).await {
10593                    toolchains.push(Toolchain {
10594                        name: SharedString::new("Python Venv"),
10595                        path: venv_path.to_string_lossy().into_owned().into(),
10596                        language_name: LanguageName(SharedString::new_static("Python")),
10597                        as_json: serde_json::Value::Null,
10598                    })
10599                }
10600            }
10601            ToolchainList {
10602                toolchains,
10603                ..Default::default()
10604            }
10605        }
10606        async fn resolve(
10607            &self,
10608            _: PathBuf,
10609            _: Option<HashMap<String, String>>,
10610            _: &dyn Fs,
10611        ) -> anyhow::Result<Toolchain> {
10612            Err(anyhow::anyhow!("Not implemented"))
10613        }
10614        fn meta(&self) -> ToolchainMetadata {
10615            ToolchainMetadata {
10616                term: SharedString::new_static("Virtual Environment"),
10617                new_toolchain_placeholder: SharedString::new_static(
10618                    "A path to the python3 executable within a virtual environment, or path to virtual environment itself",
10619                ),
10620                manifest_name: ManifestName::from(SharedString::new_static("pyproject.toml")),
10621            }
10622        }
10623        fn activation_script(&self, _: &Toolchain, _: ShellKind, _: &gpui::App) -> Vec<String> {
10624            vec![]
10625        }
10626    }
10627    Arc::new(
10628        Language::new(
10629            LanguageConfig {
10630                name: "Python".into(),
10631                matcher: LanguageMatcher {
10632                    path_suffixes: vec!["py".to_string()],
10633                    ..Default::default()
10634                },
10635                ..Default::default()
10636            },
10637            None, // We're not testing Python parsing with this language.
10638        )
10639        .with_manifest(Some(ManifestName::from(SharedString::new_static(
10640            "pyproject.toml",
10641        ))))
10642        .with_toolchain_lister(Some(Arc::new(PythonMootToolchainLister(fs)))),
10643    )
10644}
10645
10646fn typescript_lang() -> Arc<Language> {
10647    Arc::new(Language::new(
10648        LanguageConfig {
10649            name: "TypeScript".into(),
10650            matcher: LanguageMatcher {
10651                path_suffixes: vec!["ts".to_string()],
10652                ..Default::default()
10653            },
10654            ..Default::default()
10655        },
10656        Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
10657    ))
10658}
10659
10660fn tsx_lang() -> Arc<Language> {
10661    Arc::new(Language::new(
10662        LanguageConfig {
10663            name: "tsx".into(),
10664            matcher: LanguageMatcher {
10665                path_suffixes: vec!["tsx".to_string()],
10666                ..Default::default()
10667            },
10668            ..Default::default()
10669        },
10670        Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
10671    ))
10672}
10673
10674fn get_all_tasks(
10675    project: &Entity<Project>,
10676    task_contexts: Arc<TaskContexts>,
10677    cx: &mut App,
10678) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
10679    let new_tasks = project.update(cx, |project, cx| {
10680        project.task_store.update(cx, |task_store, cx| {
10681            task_store.task_inventory().unwrap().update(cx, |this, cx| {
10682                this.used_and_current_resolved_tasks(task_contexts, cx)
10683            })
10684        })
10685    });
10686
10687    cx.background_spawn(async move {
10688        let (mut old, new) = new_tasks.await;
10689        old.extend(new);
10690        old
10691    })
10692}
10693
10694#[track_caller]
10695fn assert_entry_git_state(
10696    tree: &Worktree,
10697    repository: &Repository,
10698    path: &str,
10699    index_status: Option<StatusCode>,
10700    is_ignored: bool,
10701) {
10702    assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
10703    let entry = tree
10704        .entry_for_path(&rel_path(path))
10705        .unwrap_or_else(|| panic!("entry {path} not found"));
10706    let status = repository
10707        .status_for_path(&repo_path(path))
10708        .map(|entry| entry.status);
10709    let expected = index_status.map(|index_status| {
10710        TrackedStatus {
10711            index_status,
10712            worktree_status: StatusCode::Unmodified,
10713        }
10714        .into()
10715    });
10716    assert_eq!(
10717        status, expected,
10718        "expected {path} to have git status: {expected:?}"
10719    );
10720    assert_eq!(
10721        entry.is_ignored, is_ignored,
10722        "expected {path} to have is_ignored: {is_ignored}"
10723    );
10724}
10725
10726#[track_caller]
10727fn git_init(path: &Path) -> git2::Repository {
10728    let mut init_opts = RepositoryInitOptions::new();
10729    init_opts.initial_head("main");
10730    git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
10731}
10732
10733#[track_caller]
10734fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
10735    let path = path.as_ref();
10736    let mut index = repo.index().expect("Failed to get index");
10737    index.add_path(path).expect("Failed to add file");
10738    index.write().expect("Failed to write index");
10739}
10740
10741#[track_caller]
10742fn git_remove_index(path: &Path, repo: &git2::Repository) {
10743    let mut index = repo.index().expect("Failed to get index");
10744    index.remove_path(path).expect("Failed to add file");
10745    index.write().expect("Failed to write index");
10746}
10747
10748#[track_caller]
10749fn git_commit(msg: &'static str, repo: &git2::Repository) {
10750    use git2::Signature;
10751
10752    let signature = Signature::now("test", "test@zed.dev").unwrap();
10753    let oid = repo.index().unwrap().write_tree().unwrap();
10754    let tree = repo.find_tree(oid).unwrap();
10755    if let Ok(head) = repo.head() {
10756        let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
10757
10758        let parent_commit = parent_obj.as_commit().unwrap();
10759
10760        repo.commit(
10761            Some("HEAD"),
10762            &signature,
10763            &signature,
10764            msg,
10765            &tree,
10766            &[parent_commit],
10767        )
10768        .expect("Failed to commit with parent");
10769    } else {
10770        repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
10771            .expect("Failed to commit");
10772    }
10773}
10774
10775#[cfg(any())]
10776#[track_caller]
10777fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
10778    repo.cherrypick(commit, None).expect("Failed to cherrypick");
10779}
10780
10781#[track_caller]
10782fn git_stash(repo: &mut git2::Repository) {
10783    use git2::Signature;
10784
10785    let signature = Signature::now("test", "test@zed.dev").unwrap();
10786    repo.stash_save(&signature, "N/A", None)
10787        .expect("Failed to stash");
10788}
10789
10790#[track_caller]
10791fn git_reset(offset: usize, repo: &git2::Repository) {
10792    let head = repo.head().expect("Couldn't get repo head");
10793    let object = head.peel(git2::ObjectType::Commit).unwrap();
10794    let commit = object.as_commit().unwrap();
10795    let new_head = commit
10796        .parents()
10797        .inspect(|parnet| {
10798            parnet.message();
10799        })
10800        .nth(offset)
10801        .expect("Not enough history");
10802    repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
10803        .expect("Could not reset");
10804}
10805
10806#[cfg(any())]
10807#[track_caller]
10808fn git_branch(name: &str, repo: &git2::Repository) {
10809    let head = repo
10810        .head()
10811        .expect("Couldn't get repo head")
10812        .peel_to_commit()
10813        .expect("HEAD is not a commit");
10814    repo.branch(name, &head, false).expect("Failed to commit");
10815}
10816
10817#[cfg(any())]
10818#[track_caller]
10819fn git_checkout(name: &str, repo: &git2::Repository) {
10820    repo.set_head(name).expect("Failed to set head");
10821    repo.checkout_head(None).expect("Failed to check out head");
10822}
10823
10824#[cfg(any())]
10825#[track_caller]
10826fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
10827    repo.statuses(None)
10828        .unwrap()
10829        .iter()
10830        .map(|status| (status.path().unwrap().to_string(), status.status()))
10831        .collect()
10832}
10833
10834#[gpui::test]
10835async fn test_find_project_path_abs(
10836    background_executor: BackgroundExecutor,
10837    cx: &mut gpui::TestAppContext,
10838) {
10839    // find_project_path should work with absolute paths
10840    init_test(cx);
10841
10842    let fs = FakeFs::new(background_executor);
10843    fs.insert_tree(
10844        path!("/root"),
10845        json!({
10846            "project1": {
10847                "file1.txt": "content1",
10848                "subdir": {
10849                    "file2.txt": "content2"
10850                }
10851            },
10852            "project2": {
10853                "file3.txt": "content3"
10854            }
10855        }),
10856    )
10857    .await;
10858
10859    let project = Project::test(
10860        fs.clone(),
10861        [
10862            path!("/root/project1").as_ref(),
10863            path!("/root/project2").as_ref(),
10864        ],
10865        cx,
10866    )
10867    .await;
10868
10869    // Make sure the worktrees are fully initialized
10870    project
10871        .update(cx, |project, cx| project.git_scans_complete(cx))
10872        .await;
10873    cx.run_until_parked();
10874
10875    let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
10876        project.read_with(cx, |project, cx| {
10877            let worktrees: Vec<_> = project.worktrees(cx).collect();
10878            let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
10879            let id1 = worktrees[0].read(cx).id();
10880            let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
10881            let id2 = worktrees[1].read(cx).id();
10882            (abs_path1, id1, abs_path2, id2)
10883        });
10884
10885    project.update(cx, |project, cx| {
10886        let abs_path = project1_abs_path.join("file1.txt");
10887        let found_path = project.find_project_path(abs_path, cx).unwrap();
10888        assert_eq!(found_path.worktree_id, project1_id);
10889        assert_eq!(&*found_path.path, rel_path("file1.txt"));
10890
10891        let abs_path = project1_abs_path.join("subdir").join("file2.txt");
10892        let found_path = project.find_project_path(abs_path, cx).unwrap();
10893        assert_eq!(found_path.worktree_id, project1_id);
10894        assert_eq!(&*found_path.path, rel_path("subdir/file2.txt"));
10895
10896        let abs_path = project2_abs_path.join("file3.txt");
10897        let found_path = project.find_project_path(abs_path, cx).unwrap();
10898        assert_eq!(found_path.worktree_id, project2_id);
10899        assert_eq!(&*found_path.path, rel_path("file3.txt"));
10900
10901        let abs_path = project1_abs_path.join("nonexistent.txt");
10902        let found_path = project.find_project_path(abs_path, cx);
10903        assert!(
10904            found_path.is_some(),
10905            "Should find project path for nonexistent file in worktree"
10906        );
10907
10908        // Test with an absolute path outside any worktree
10909        let abs_path = Path::new("/some/other/path");
10910        let found_path = project.find_project_path(abs_path, cx);
10911        assert!(
10912            found_path.is_none(),
10913            "Should not find project path for path outside any worktree"
10914        );
10915    });
10916}
10917
10918#[gpui::test]
10919async fn test_git_worktree_remove(cx: &mut gpui::TestAppContext) {
10920    init_test(cx);
10921
10922    let fs = FakeFs::new(cx.executor());
10923    fs.insert_tree(
10924        path!("/root"),
10925        json!({
10926            "a": {
10927                ".git": {},
10928                "src": {
10929                    "main.rs": "fn main() {}",
10930                }
10931            },
10932            "b": {
10933                ".git": {},
10934                "src": {
10935                    "main.rs": "fn main() {}",
10936                },
10937                "script": {
10938                    "run.sh": "#!/bin/bash"
10939                }
10940            }
10941        }),
10942    )
10943    .await;
10944
10945    let project = Project::test(
10946        fs.clone(),
10947        [
10948            path!("/root/a").as_ref(),
10949            path!("/root/b/script").as_ref(),
10950            path!("/root/b").as_ref(),
10951        ],
10952        cx,
10953    )
10954    .await;
10955    let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
10956    scan_complete.await;
10957
10958    let worktrees = project.update(cx, |project, cx| project.worktrees(cx).collect::<Vec<_>>());
10959    assert_eq!(worktrees.len(), 3);
10960
10961    let worktree_id_by_abs_path = worktrees
10962        .into_iter()
10963        .map(|worktree| worktree.read_with(cx, |w, _| (w.abs_path(), w.id())))
10964        .collect::<HashMap<_, _>>();
10965    let worktree_id = worktree_id_by_abs_path
10966        .get(Path::new(path!("/root/b/script")))
10967        .unwrap();
10968
10969    let repos = project.update(cx, |p, cx| p.git_store().read(cx).repositories().clone());
10970    assert_eq!(repos.len(), 2);
10971
10972    project.update(cx, |project, cx| {
10973        project.remove_worktree(*worktree_id, cx);
10974    });
10975    cx.run_until_parked();
10976
10977    let mut repo_paths = project
10978        .update(cx, |p, cx| p.git_store().read(cx).repositories().clone())
10979        .values()
10980        .map(|repo| repo.read_with(cx, |r, _| r.work_directory_abs_path.clone()))
10981        .collect::<Vec<_>>();
10982    repo_paths.sort();
10983
10984    pretty_assertions::assert_eq!(
10985        repo_paths,
10986        [
10987            Path::new(path!("/root/a")).into(),
10988            Path::new(path!("/root/b")).into(),
10989        ]
10990    );
10991
10992    let active_repo_path = project
10993        .read_with(cx, |p, cx| {
10994            p.active_repository(cx)
10995                .map(|r| r.read(cx).work_directory_abs_path.clone())
10996        })
10997        .unwrap();
10998    assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/a")));
10999
11000    let worktree_id = worktree_id_by_abs_path
11001        .get(Path::new(path!("/root/a")))
11002        .unwrap();
11003    project.update(cx, |project, cx| {
11004        project.remove_worktree(*worktree_id, cx);
11005    });
11006    cx.run_until_parked();
11007
11008    let active_repo_path = project
11009        .read_with(cx, |p, cx| {
11010            p.active_repository(cx)
11011                .map(|r| r.read(cx).work_directory_abs_path.clone())
11012        })
11013        .unwrap();
11014    assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/b")));
11015
11016    let worktree_id = worktree_id_by_abs_path
11017        .get(Path::new(path!("/root/b")))
11018        .unwrap();
11019    project.update(cx, |project, cx| {
11020        project.remove_worktree(*worktree_id, cx);
11021    });
11022    cx.run_until_parked();
11023
11024    let active_repo_path = project.read_with(cx, |p, cx| {
11025        p.active_repository(cx)
11026            .map(|r| r.read(cx).work_directory_abs_path.clone())
11027    });
11028    assert!(active_repo_path.is_none());
11029}
11030
11031#[gpui::test]
11032async fn test_optimistic_hunks_in_staged_files(cx: &mut gpui::TestAppContext) {
11033    use DiffHunkSecondaryStatus::*;
11034    init_test(cx);
11035
11036    let committed_contents = r#"
11037        one
11038        two
11039        three
11040    "#
11041    .unindent();
11042    let file_contents = r#"
11043        one
11044        TWO
11045        three
11046    "#
11047    .unindent();
11048
11049    let fs = FakeFs::new(cx.background_executor.clone());
11050    fs.insert_tree(
11051        path!("/dir"),
11052        json!({
11053            ".git": {},
11054            "file.txt": file_contents.clone()
11055        }),
11056    )
11057    .await;
11058
11059    fs.set_head_and_index_for_repo(
11060        path!("/dir/.git").as_ref(),
11061        &[("file.txt", committed_contents.clone())],
11062    );
11063
11064    let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
11065
11066    let buffer = project
11067        .update(cx, |project, cx| {
11068            project.open_local_buffer(path!("/dir/file.txt"), cx)
11069        })
11070        .await
11071        .unwrap();
11072    let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
11073    let uncommitted_diff = project
11074        .update(cx, |project, cx| {
11075            project.open_uncommitted_diff(buffer.clone(), cx)
11076        })
11077        .await
11078        .unwrap();
11079
11080    // The hunk is initially unstaged.
11081    uncommitted_diff.read_with(cx, |diff, cx| {
11082        assert_hunks(
11083            diff.snapshot(cx).hunks(&snapshot),
11084            &snapshot,
11085            &diff.base_text_string(cx).unwrap(),
11086            &[(
11087                1..2,
11088                "two\n",
11089                "TWO\n",
11090                DiffHunkStatus::modified(HasSecondaryHunk),
11091            )],
11092        );
11093    });
11094
11095    // Get the repository handle.
11096    let repo = project.read_with(cx, |project, cx| {
11097        project.repositories(cx).values().next().unwrap().clone()
11098    });
11099
11100    // Stage the file.
11101    let stage_task = repo.update(cx, |repo, cx| {
11102        repo.stage_entries(vec![repo_path("file.txt")], cx)
11103    });
11104
11105    // Run a few ticks to let the job start and mark hunks as pending,
11106    // but don't run_until_parked which would complete the entire operation.
11107    for _ in 0..10 {
11108        cx.executor().tick();
11109        let [hunk]: [_; 1] = uncommitted_diff
11110            .read_with(cx, |diff, cx| {
11111                diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
11112            })
11113            .try_into()
11114            .unwrap();
11115        match hunk.secondary_status {
11116            HasSecondaryHunk => {}
11117            SecondaryHunkRemovalPending => break,
11118            NoSecondaryHunk => panic!("hunk was not optimistically staged"),
11119            _ => panic!("unexpected hunk state"),
11120        }
11121    }
11122    uncommitted_diff.read_with(cx, |diff, cx| {
11123        assert_hunks(
11124            diff.snapshot(cx).hunks(&snapshot),
11125            &snapshot,
11126            &diff.base_text_string(cx).unwrap(),
11127            &[(
11128                1..2,
11129                "two\n",
11130                "TWO\n",
11131                DiffHunkStatus::modified(SecondaryHunkRemovalPending),
11132            )],
11133        );
11134    });
11135
11136    // Let the staging complete.
11137    stage_task.await.unwrap();
11138    cx.run_until_parked();
11139
11140    // The hunk is now fully staged.
11141    uncommitted_diff.read_with(cx, |diff, cx| {
11142        assert_hunks(
11143            diff.snapshot(cx).hunks(&snapshot),
11144            &snapshot,
11145            &diff.base_text_string(cx).unwrap(),
11146            &[(
11147                1..2,
11148                "two\n",
11149                "TWO\n",
11150                DiffHunkStatus::modified(NoSecondaryHunk),
11151            )],
11152        );
11153    });
11154
11155    // Simulate a commit by updating HEAD to match the current file contents.
11156    // The FakeGitRepository's commit method is a no-op, so we need to manually
11157    // update HEAD to simulate the commit completing.
11158    fs.set_head_for_repo(
11159        path!("/dir/.git").as_ref(),
11160        &[("file.txt", file_contents.clone())],
11161        "newhead",
11162    );
11163    cx.run_until_parked();
11164
11165    // After committing, there are no more hunks.
11166    uncommitted_diff.read_with(cx, |diff, cx| {
11167        assert_hunks(
11168            diff.snapshot(cx).hunks(&snapshot),
11169            &snapshot,
11170            &diff.base_text_string(cx).unwrap(),
11171            &[] as &[(Range<u32>, &str, &str, DiffHunkStatus)],
11172        );
11173    });
11174}
11175
11176#[gpui::test]
11177async fn test_read_only_files_setting(cx: &mut gpui::TestAppContext) {
11178    init_test(cx);
11179
11180    // Configure read_only_files setting
11181    cx.update(|cx| {
11182        cx.update_global::<SettingsStore, _>(|store, cx| {
11183            store.update_user_settings(cx, |settings| {
11184                settings.project.worktree.read_only_files = Some(vec![
11185                    "**/generated/**".to_string(),
11186                    "**/*.gen.rs".to_string(),
11187                ]);
11188            });
11189        });
11190    });
11191
11192    let fs = FakeFs::new(cx.background_executor.clone());
11193    fs.insert_tree(
11194        path!("/root"),
11195        json!({
11196            "src": {
11197                "main.rs": "fn main() {}",
11198                "types.gen.rs": "// Generated file",
11199            },
11200            "generated": {
11201                "schema.rs": "// Auto-generated schema",
11202            }
11203        }),
11204    )
11205    .await;
11206
11207    let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
11208
11209    // Open a regular file - should be read-write
11210    let regular_buffer = project
11211        .update(cx, |project, cx| {
11212            project.open_local_buffer(path!("/root/src/main.rs"), cx)
11213        })
11214        .await
11215        .unwrap();
11216
11217    regular_buffer.read_with(cx, |buffer, _| {
11218        assert!(!buffer.read_only(), "Regular file should not be read-only");
11219    });
11220
11221    // Open a file matching *.gen.rs pattern - should be read-only
11222    let gen_buffer = project
11223        .update(cx, |project, cx| {
11224            project.open_local_buffer(path!("/root/src/types.gen.rs"), cx)
11225        })
11226        .await
11227        .unwrap();
11228
11229    gen_buffer.read_with(cx, |buffer, _| {
11230        assert!(
11231            buffer.read_only(),
11232            "File matching *.gen.rs pattern should be read-only"
11233        );
11234    });
11235
11236    // Open a file in generated directory - should be read-only
11237    let generated_buffer = project
11238        .update(cx, |project, cx| {
11239            project.open_local_buffer(path!("/root/generated/schema.rs"), cx)
11240        })
11241        .await
11242        .unwrap();
11243
11244    generated_buffer.read_with(cx, |buffer, _| {
11245        assert!(
11246            buffer.read_only(),
11247            "File in generated directory should be read-only"
11248        );
11249    });
11250}
11251
11252#[gpui::test]
11253async fn test_read_only_files_empty_setting(cx: &mut gpui::TestAppContext) {
11254    init_test(cx);
11255
11256    // Explicitly set read_only_files to empty (default behavior)
11257    cx.update(|cx| {
11258        cx.update_global::<SettingsStore, _>(|store, cx| {
11259            store.update_user_settings(cx, |settings| {
11260                settings.project.worktree.read_only_files = Some(vec![]);
11261            });
11262        });
11263    });
11264
11265    let fs = FakeFs::new(cx.background_executor.clone());
11266    fs.insert_tree(
11267        path!("/root"),
11268        json!({
11269            "src": {
11270                "main.rs": "fn main() {}",
11271            },
11272            "generated": {
11273                "schema.rs": "// Auto-generated schema",
11274            }
11275        }),
11276    )
11277    .await;
11278
11279    let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
11280
11281    // All files should be read-write when read_only_files is empty
11282    let main_buffer = project
11283        .update(cx, |project, cx| {
11284            project.open_local_buffer(path!("/root/src/main.rs"), cx)
11285        })
11286        .await
11287        .unwrap();
11288
11289    main_buffer.read_with(cx, |buffer, _| {
11290        assert!(
11291            !buffer.read_only(),
11292            "Files should not be read-only when read_only_files is empty"
11293        );
11294    });
11295
11296    let generated_buffer = project
11297        .update(cx, |project, cx| {
11298            project.open_local_buffer(path!("/root/generated/schema.rs"), cx)
11299        })
11300        .await
11301        .unwrap();
11302
11303    generated_buffer.read_with(cx, |buffer, _| {
11304        assert!(
11305            !buffer.read_only(),
11306            "Generated files should not be read-only when read_only_files is empty"
11307        );
11308    });
11309}
11310
11311#[gpui::test]
11312async fn test_read_only_files_with_lock_files(cx: &mut gpui::TestAppContext) {
11313    init_test(cx);
11314
11315    // Configure to make lock files read-only
11316    cx.update(|cx| {
11317        cx.update_global::<SettingsStore, _>(|store, cx| {
11318            store.update_user_settings(cx, |settings| {
11319                settings.project.worktree.read_only_files = Some(vec![
11320                    "**/*.lock".to_string(),
11321                    "**/package-lock.json".to_string(),
11322                ]);
11323            });
11324        });
11325    });
11326
11327    let fs = FakeFs::new(cx.background_executor.clone());
11328    fs.insert_tree(
11329        path!("/root"),
11330        json!({
11331            "Cargo.lock": "# Lock file",
11332            "Cargo.toml": "[package]",
11333            "package-lock.json": "{}",
11334            "package.json": "{}",
11335        }),
11336    )
11337    .await;
11338
11339    let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
11340
11341    // Cargo.lock should be read-only
11342    let cargo_lock = project
11343        .update(cx, |project, cx| {
11344            project.open_local_buffer(path!("/root/Cargo.lock"), cx)
11345        })
11346        .await
11347        .unwrap();
11348
11349    cargo_lock.read_with(cx, |buffer, _| {
11350        assert!(buffer.read_only(), "Cargo.lock should be read-only");
11351    });
11352
11353    // Cargo.toml should be read-write
11354    let cargo_toml = project
11355        .update(cx, |project, cx| {
11356            project.open_local_buffer(path!("/root/Cargo.toml"), cx)
11357        })
11358        .await
11359        .unwrap();
11360
11361    cargo_toml.read_with(cx, |buffer, _| {
11362        assert!(!buffer.read_only(), "Cargo.toml should not be read-only");
11363    });
11364
11365    // package-lock.json should be read-only
11366    let package_lock = project
11367        .update(cx, |project, cx| {
11368            project.open_local_buffer(path!("/root/package-lock.json"), cx)
11369        })
11370        .await
11371        .unwrap();
11372
11373    package_lock.read_with(cx, |buffer, _| {
11374        assert!(buffer.read_only(), "package-lock.json should be read-only");
11375    });
11376
11377    // package.json should be read-write
11378    let package_json = project
11379        .update(cx, |project, cx| {
11380            project.open_local_buffer(path!("/root/package.json"), cx)
11381        })
11382        .await
11383        .unwrap();
11384
11385    package_json.read_with(cx, |buffer, _| {
11386        assert!(!buffer.read_only(), "package.json should not be read-only");
11387    });
11388}