1#![allow(clippy::format_collect)]
2
3use crate::{
4 Event, git_store::StatusEntry, task_inventory::TaskContexts, task_store::TaskSettingsLocation,
5 *,
6};
7use buffer_diff::{
8 BufferDiffEvent, DiffHunkSecondaryStatus, DiffHunkStatus, DiffHunkStatusKind, assert_hunks,
9};
10use fs::FakeFs;
11use futures::{StreamExt, future};
12use git::{
13 repository::RepoPath,
14 status::{StatusCode, TrackedStatus},
15};
16use git2::RepositoryInitOptions;
17use gpui::{App, BackgroundExecutor, SemanticVersion, UpdateGlobal};
18use http_client::Url;
19use language::{
20 Diagnostic, DiagnosticEntry, DiagnosticSet, DiskState, FakeLspAdapter, LanguageConfig,
21 LanguageMatcher, LanguageName, LineEnding, OffsetRangeExt, Point, ToPoint,
22 language_settings::{AllLanguageSettings, LanguageSettingsContent, language_settings},
23 tree_sitter_rust, tree_sitter_typescript,
24};
25use lsp::{
26 DiagnosticSeverity, DocumentChanges, FileOperationFilter, NumberOrString, TextDocumentEdit,
27 WillRenameFiles, notification::DidRenameFiles,
28};
29use parking_lot::Mutex;
30use paths::{config_dir, tasks_file};
31use postage::stream::Stream as _;
32use pretty_assertions::{assert_eq, assert_matches};
33use serde_json::json;
34#[cfg(not(windows))]
35use std::os;
36use std::{mem, num::NonZeroU32, ops::Range, str::FromStr, sync::OnceLock, task::Poll};
37use task::{ResolvedTask, TaskContext};
38use unindent::Unindent as _;
39use util::{
40 TryFutureExt as _, assert_set_eq, path,
41 paths::PathMatcher,
42 separator,
43 test::{TempTree, marked_text_offsets},
44 uri,
45};
46use worktree::WorktreeModelHandle as _;
47
48#[gpui::test]
49async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
50 cx.executor().allow_parking();
51
52 let (tx, mut rx) = futures::channel::mpsc::unbounded();
53 let _thread = std::thread::spawn(move || {
54 #[cfg(not(target_os = "windows"))]
55 std::fs::metadata("/tmp").unwrap();
56 #[cfg(target_os = "windows")]
57 std::fs::metadata("C:/Windows").unwrap();
58 std::thread::sleep(Duration::from_millis(1000));
59 tx.unbounded_send(1).unwrap();
60 });
61 rx.next().await.unwrap();
62}
63
64#[gpui::test]
65async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
66 cx.executor().allow_parking();
67
68 let io_task = smol::unblock(move || {
69 println!("sleeping on thread {:?}", std::thread::current().id());
70 std::thread::sleep(Duration::from_millis(10));
71 1
72 });
73
74 let task = cx.foreground_executor().spawn(async move {
75 io_task.await;
76 });
77
78 task.await;
79}
80
81#[cfg(not(windows))]
82#[gpui::test]
83async fn test_symlinks(cx: &mut gpui::TestAppContext) {
84 init_test(cx);
85 cx.executor().allow_parking();
86
87 let dir = TempTree::new(json!({
88 "root": {
89 "apple": "",
90 "banana": {
91 "carrot": {
92 "date": "",
93 "endive": "",
94 }
95 },
96 "fennel": {
97 "grape": "",
98 }
99 }
100 }));
101
102 let root_link_path = dir.path().join("root_link");
103 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
104 os::unix::fs::symlink(
105 dir.path().join("root/fennel"),
106 dir.path().join("root/finnochio"),
107 )
108 .unwrap();
109
110 let project = Project::test(
111 Arc::new(RealFs::new(None, cx.executor())),
112 [root_link_path.as_ref()],
113 cx,
114 )
115 .await;
116
117 project.update(cx, |project, cx| {
118 let tree = project.worktrees(cx).next().unwrap().read(cx);
119 assert_eq!(tree.file_count(), 5);
120 assert_eq!(
121 tree.inode_for_path("fennel/grape"),
122 tree.inode_for_path("finnochio/grape")
123 );
124 });
125}
126
127#[gpui::test]
128async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
129 init_test(cx);
130
131 let dir = TempTree::new(json!({
132 ".editorconfig": r#"
133 root = true
134 [*.rs]
135 indent_style = tab
136 indent_size = 3
137 end_of_line = lf
138 insert_final_newline = true
139 trim_trailing_whitespace = true
140 [*.js]
141 tab_width = 10
142 "#,
143 ".zed": {
144 "settings.json": r#"{
145 "tab_size": 8,
146 "hard_tabs": false,
147 "ensure_final_newline_on_save": false,
148 "remove_trailing_whitespace_on_save": false,
149 "soft_wrap": "editor_width"
150 }"#,
151 },
152 "a.rs": "fn a() {\n A\n}",
153 "b": {
154 ".editorconfig": r#"
155 [*.rs]
156 indent_size = 2
157 "#,
158 "b.rs": "fn b() {\n B\n}",
159 },
160 "c.js": "def c\n C\nend",
161 "README.json": "tabs are better\n",
162 }));
163
164 let path = dir.path();
165 let fs = FakeFs::new(cx.executor());
166 fs.insert_tree_from_real_fs(path, path).await;
167 let project = Project::test(fs, [path], cx).await;
168
169 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
170 language_registry.add(js_lang());
171 language_registry.add(json_lang());
172 language_registry.add(rust_lang());
173
174 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
175
176 cx.executor().run_until_parked();
177
178 cx.update(|cx| {
179 let tree = worktree.read(cx);
180 let settings_for = |path: &str| {
181 let file_entry = tree.entry_for_path(path).unwrap().clone();
182 let file = File::for_entry(file_entry, worktree.clone());
183 let file_language = project
184 .read(cx)
185 .languages()
186 .language_for_file_path(file.path.as_ref());
187 let file_language = cx
188 .background_executor()
189 .block(file_language)
190 .expect("Failed to get file language");
191 let file = file as _;
192 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
193 };
194
195 let settings_a = settings_for("a.rs");
196 let settings_b = settings_for("b/b.rs");
197 let settings_c = settings_for("c.js");
198 let settings_readme = settings_for("README.json");
199
200 // .editorconfig overrides .zed/settings
201 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
202 assert_eq!(settings_a.hard_tabs, true);
203 assert_eq!(settings_a.ensure_final_newline_on_save, true);
204 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
205
206 // .editorconfig in b/ overrides .editorconfig in root
207 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
208
209 // "indent_size" is not set, so "tab_width" is used
210 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
211
212 // README.md should not be affected by .editorconfig's globe "*.rs"
213 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
214 });
215}
216
217#[gpui::test]
218async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
219 init_test(cx);
220 TaskStore::init(None);
221
222 let fs = FakeFs::new(cx.executor());
223 fs.insert_tree(
224 path!("/dir"),
225 json!({
226 ".zed": {
227 "settings.json": r#"{ "tab_size": 8 }"#,
228 "tasks.json": r#"[{
229 "label": "cargo check all",
230 "command": "cargo",
231 "args": ["check", "--all"]
232 },]"#,
233 },
234 "a": {
235 "a.rs": "fn a() {\n A\n}"
236 },
237 "b": {
238 ".zed": {
239 "settings.json": r#"{ "tab_size": 2 }"#,
240 "tasks.json": r#"[{
241 "label": "cargo check",
242 "command": "cargo",
243 "args": ["check"]
244 },]"#,
245 },
246 "b.rs": "fn b() {\n B\n}"
247 }
248 }),
249 )
250 .await;
251
252 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
253 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
254
255 cx.executor().run_until_parked();
256 let worktree_id = cx.update(|cx| {
257 project.update(cx, |project, cx| {
258 project.worktrees(cx).next().unwrap().read(cx).id()
259 })
260 });
261
262 let mut task_contexts = TaskContexts::default();
263 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
264
265 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
266 id: worktree_id,
267 directory_in_worktree: PathBuf::from(".zed"),
268 id_base: "local worktree tasks from directory \".zed\"".into(),
269 };
270
271 let all_tasks = cx
272 .update(|cx| {
273 let tree = worktree.read(cx);
274
275 let file_a = File::for_entry(
276 tree.entry_for_path("a/a.rs").unwrap().clone(),
277 worktree.clone(),
278 ) as _;
279 let settings_a = language_settings(None, Some(&file_a), cx);
280 let file_b = File::for_entry(
281 tree.entry_for_path("b/b.rs").unwrap().clone(),
282 worktree.clone(),
283 ) as _;
284 let settings_b = language_settings(None, Some(&file_b), cx);
285
286 assert_eq!(settings_a.tab_size.get(), 8);
287 assert_eq!(settings_b.tab_size.get(), 2);
288
289 get_all_tasks(&project, &task_contexts, cx)
290 })
291 .into_iter()
292 .map(|(source_kind, task)| {
293 let resolved = task.resolved.unwrap();
294 (
295 source_kind,
296 task.resolved_label,
297 resolved.args,
298 resolved.env,
299 )
300 })
301 .collect::<Vec<_>>();
302 assert_eq!(
303 all_tasks,
304 vec![
305 (
306 TaskSourceKind::Worktree {
307 id: worktree_id,
308 directory_in_worktree: PathBuf::from(separator!("b/.zed")),
309 id_base: if cfg!(windows) {
310 "local worktree tasks from directory \"b\\\\.zed\"".into()
311 } else {
312 "local worktree tasks from directory \"b/.zed\"".into()
313 },
314 },
315 "cargo check".to_string(),
316 vec!["check".to_string()],
317 HashMap::default(),
318 ),
319 (
320 topmost_local_task_source_kind.clone(),
321 "cargo check all".to_string(),
322 vec!["check".to_string(), "--all".to_string()],
323 HashMap::default(),
324 ),
325 ]
326 );
327
328 let (_, resolved_task) = cx
329 .update(|cx| get_all_tasks(&project, &task_contexts, cx))
330 .into_iter()
331 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
332 .expect("should have one global task");
333 project.update(cx, |project, cx| {
334 let task_inventory = project
335 .task_store
336 .read(cx)
337 .task_inventory()
338 .cloned()
339 .unwrap();
340 task_inventory.update(cx, |inventory, _| {
341 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
342 inventory
343 .update_file_based_tasks(
344 TaskSettingsLocation::Global(tasks_file()),
345 Some(
346 &json!([{
347 "label": "cargo check unstable",
348 "command": "cargo",
349 "args": [
350 "check",
351 "--all",
352 "--all-targets"
353 ],
354 "env": {
355 "RUSTFLAGS": "-Zunstable-options"
356 }
357 }])
358 .to_string(),
359 ),
360 settings::TaskKind::Script,
361 )
362 .unwrap();
363 });
364 });
365 cx.run_until_parked();
366
367 let all_tasks = cx
368 .update(|cx| get_all_tasks(&project, &task_contexts, cx))
369 .into_iter()
370 .map(|(source_kind, task)| {
371 let resolved = task.resolved.unwrap();
372 (
373 source_kind,
374 task.resolved_label,
375 resolved.args,
376 resolved.env,
377 )
378 })
379 .collect::<Vec<_>>();
380 assert_eq!(
381 all_tasks,
382 vec![
383 (
384 topmost_local_task_source_kind.clone(),
385 "cargo check all".to_string(),
386 vec!["check".to_string(), "--all".to_string()],
387 HashMap::default(),
388 ),
389 (
390 TaskSourceKind::Worktree {
391 id: worktree_id,
392 directory_in_worktree: PathBuf::from(separator!("b/.zed")),
393 id_base: if cfg!(windows) {
394 "local worktree tasks from directory \"b\\\\.zed\"".into()
395 } else {
396 "local worktree tasks from directory \"b/.zed\"".into()
397 },
398 },
399 "cargo check".to_string(),
400 vec!["check".to_string()],
401 HashMap::default(),
402 ),
403 (
404 TaskSourceKind::AbsPath {
405 abs_path: paths::tasks_file().clone(),
406 id_base: "global tasks.json".into(),
407 },
408 "cargo check unstable".to_string(),
409 vec![
410 "check".to_string(),
411 "--all".to_string(),
412 "--all-targets".to_string(),
413 ],
414 HashMap::from_iter(Some((
415 "RUSTFLAGS".to_string(),
416 "-Zunstable-options".to_string()
417 ))),
418 ),
419 ]
420 );
421}
422
423#[gpui::test]
424async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
425 init_test(cx);
426 TaskStore::init(None);
427
428 let fs = FakeFs::new(cx.executor());
429 fs.insert_tree(
430 path!("/dir"),
431 json!({
432 ".zed": {
433 "tasks.json": r#"[{
434 "label": "test worktree root",
435 "command": "echo $ZED_WORKTREE_ROOT"
436 }]"#,
437 },
438 "a": {
439 "a.rs": "fn a() {\n A\n}"
440 },
441 }),
442 )
443 .await;
444
445 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
446 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
447
448 cx.executor().run_until_parked();
449 let worktree_id = cx.update(|cx| {
450 project.update(cx, |project, cx| {
451 project.worktrees(cx).next().unwrap().read(cx).id()
452 })
453 });
454
455 let active_non_worktree_item_tasks = cx.update(|cx| {
456 get_all_tasks(
457 &project,
458 &TaskContexts {
459 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
460 active_worktree_context: None,
461 other_worktree_contexts: Vec::new(),
462 lsp_task_sources: HashMap::default(),
463 latest_selection: None,
464 },
465 cx,
466 )
467 });
468 assert!(
469 active_non_worktree_item_tasks.is_empty(),
470 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
471 );
472
473 let active_worktree_tasks = cx.update(|cx| {
474 get_all_tasks(
475 &project,
476 &TaskContexts {
477 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
478 active_worktree_context: Some((worktree_id, {
479 let mut worktree_context = TaskContext::default();
480 worktree_context
481 .task_variables
482 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
483 worktree_context
484 })),
485 other_worktree_contexts: Vec::new(),
486 lsp_task_sources: HashMap::default(),
487 latest_selection: None,
488 },
489 cx,
490 )
491 });
492 assert_eq!(
493 active_worktree_tasks
494 .into_iter()
495 .map(|(source_kind, task)| {
496 let resolved = task.resolved.unwrap();
497 (source_kind, resolved.command)
498 })
499 .collect::<Vec<_>>(),
500 vec![(
501 TaskSourceKind::Worktree {
502 id: worktree_id,
503 directory_in_worktree: PathBuf::from(separator!(".zed")),
504 id_base: if cfg!(windows) {
505 "local worktree tasks from directory \".zed\"".into()
506 } else {
507 "local worktree tasks from directory \".zed\"".into()
508 },
509 },
510 "echo /dir".to_string(),
511 )]
512 );
513}
514
515#[gpui::test]
516async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
517 init_test(cx);
518
519 let fs = FakeFs::new(cx.executor());
520 fs.insert_tree(
521 path!("/dir"),
522 json!({
523 "test.rs": "const A: i32 = 1;",
524 "test2.rs": "",
525 "Cargo.toml": "a = 1",
526 "package.json": "{\"a\": 1}",
527 }),
528 )
529 .await;
530
531 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
532 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
533
534 let mut fake_rust_servers = language_registry.register_fake_lsp(
535 "Rust",
536 FakeLspAdapter {
537 name: "the-rust-language-server",
538 capabilities: lsp::ServerCapabilities {
539 completion_provider: Some(lsp::CompletionOptions {
540 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
541 ..Default::default()
542 }),
543 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
544 lsp::TextDocumentSyncOptions {
545 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
546 ..Default::default()
547 },
548 )),
549 ..Default::default()
550 },
551 ..Default::default()
552 },
553 );
554 let mut fake_json_servers = language_registry.register_fake_lsp(
555 "JSON",
556 FakeLspAdapter {
557 name: "the-json-language-server",
558 capabilities: lsp::ServerCapabilities {
559 completion_provider: Some(lsp::CompletionOptions {
560 trigger_characters: Some(vec![":".to_string()]),
561 ..Default::default()
562 }),
563 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
564 lsp::TextDocumentSyncOptions {
565 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
566 ..Default::default()
567 },
568 )),
569 ..Default::default()
570 },
571 ..Default::default()
572 },
573 );
574
575 // Open a buffer without an associated language server.
576 let (toml_buffer, _handle) = project
577 .update(cx, |project, cx| {
578 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
579 })
580 .await
581 .unwrap();
582
583 // Open a buffer with an associated language server before the language for it has been loaded.
584 let (rust_buffer, _handle2) = project
585 .update(cx, |project, cx| {
586 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
587 })
588 .await
589 .unwrap();
590 rust_buffer.update(cx, |buffer, _| {
591 assert_eq!(buffer.language().map(|l| l.name()), None);
592 });
593
594 // Now we add the languages to the project, and ensure they get assigned to all
595 // the relevant open buffers.
596 language_registry.add(json_lang());
597 language_registry.add(rust_lang());
598 cx.executor().run_until_parked();
599 rust_buffer.update(cx, |buffer, _| {
600 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
601 });
602
603 // A server is started up, and it is notified about Rust files.
604 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
605 assert_eq!(
606 fake_rust_server
607 .receive_notification::<lsp::notification::DidOpenTextDocument>()
608 .await
609 .text_document,
610 lsp::TextDocumentItem {
611 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
612 version: 0,
613 text: "const A: i32 = 1;".to_string(),
614 language_id: "rust".to_string(),
615 }
616 );
617
618 // The buffer is configured based on the language server's capabilities.
619 rust_buffer.update(cx, |buffer, _| {
620 assert_eq!(
621 buffer
622 .completion_triggers()
623 .into_iter()
624 .cloned()
625 .collect::<Vec<_>>(),
626 &[".".to_string(), "::".to_string()]
627 );
628 });
629 toml_buffer.update(cx, |buffer, _| {
630 assert!(buffer.completion_triggers().is_empty());
631 });
632
633 // Edit a buffer. The changes are reported to the language server.
634 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
635 assert_eq!(
636 fake_rust_server
637 .receive_notification::<lsp::notification::DidChangeTextDocument>()
638 .await
639 .text_document,
640 lsp::VersionedTextDocumentIdentifier::new(
641 lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
642 1
643 )
644 );
645
646 // Open a third buffer with a different associated language server.
647 let (json_buffer, _json_handle) = project
648 .update(cx, |project, cx| {
649 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
650 })
651 .await
652 .unwrap();
653
654 // A json language server is started up and is only notified about the json buffer.
655 let mut fake_json_server = fake_json_servers.next().await.unwrap();
656 assert_eq!(
657 fake_json_server
658 .receive_notification::<lsp::notification::DidOpenTextDocument>()
659 .await
660 .text_document,
661 lsp::TextDocumentItem {
662 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
663 version: 0,
664 text: "{\"a\": 1}".to_string(),
665 language_id: "json".to_string(),
666 }
667 );
668
669 // This buffer is configured based on the second language server's
670 // capabilities.
671 json_buffer.update(cx, |buffer, _| {
672 assert_eq!(
673 buffer
674 .completion_triggers()
675 .into_iter()
676 .cloned()
677 .collect::<Vec<_>>(),
678 &[":".to_string()]
679 );
680 });
681
682 // When opening another buffer whose language server is already running,
683 // it is also configured based on the existing language server's capabilities.
684 let (rust_buffer2, _handle4) = project
685 .update(cx, |project, cx| {
686 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
687 })
688 .await
689 .unwrap();
690 rust_buffer2.update(cx, |buffer, _| {
691 assert_eq!(
692 buffer
693 .completion_triggers()
694 .into_iter()
695 .cloned()
696 .collect::<Vec<_>>(),
697 &[".".to_string(), "::".to_string()]
698 );
699 });
700
701 // Changes are reported only to servers matching the buffer's language.
702 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
703 rust_buffer2.update(cx, |buffer, cx| {
704 buffer.edit([(0..0, "let x = 1;")], None, cx)
705 });
706 assert_eq!(
707 fake_rust_server
708 .receive_notification::<lsp::notification::DidChangeTextDocument>()
709 .await
710 .text_document,
711 lsp::VersionedTextDocumentIdentifier::new(
712 lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap(),
713 1
714 )
715 );
716
717 // Save notifications are reported to all servers.
718 project
719 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
720 .await
721 .unwrap();
722 assert_eq!(
723 fake_rust_server
724 .receive_notification::<lsp::notification::DidSaveTextDocument>()
725 .await
726 .text_document,
727 lsp::TextDocumentIdentifier::new(
728 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
729 )
730 );
731 assert_eq!(
732 fake_json_server
733 .receive_notification::<lsp::notification::DidSaveTextDocument>()
734 .await
735 .text_document,
736 lsp::TextDocumentIdentifier::new(
737 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
738 )
739 );
740
741 // Renames are reported only to servers matching the buffer's language.
742 fs.rename(
743 Path::new(path!("/dir/test2.rs")),
744 Path::new(path!("/dir/test3.rs")),
745 Default::default(),
746 )
747 .await
748 .unwrap();
749 assert_eq!(
750 fake_rust_server
751 .receive_notification::<lsp::notification::DidCloseTextDocument>()
752 .await
753 .text_document,
754 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap()),
755 );
756 assert_eq!(
757 fake_rust_server
758 .receive_notification::<lsp::notification::DidOpenTextDocument>()
759 .await
760 .text_document,
761 lsp::TextDocumentItem {
762 uri: lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),
763 version: 0,
764 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
765 language_id: "rust".to_string(),
766 },
767 );
768
769 rust_buffer2.update(cx, |buffer, cx| {
770 buffer.update_diagnostics(
771 LanguageServerId(0),
772 DiagnosticSet::from_sorted_entries(
773 vec![DiagnosticEntry {
774 diagnostic: Default::default(),
775 range: Anchor::MIN..Anchor::MAX,
776 }],
777 &buffer.snapshot(),
778 ),
779 cx,
780 );
781 assert_eq!(
782 buffer
783 .snapshot()
784 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
785 .count(),
786 1
787 );
788 });
789
790 // When the rename changes the extension of the file, the buffer gets closed on the old
791 // language server and gets opened on the new one.
792 fs.rename(
793 Path::new(path!("/dir/test3.rs")),
794 Path::new(path!("/dir/test3.json")),
795 Default::default(),
796 )
797 .await
798 .unwrap();
799 assert_eq!(
800 fake_rust_server
801 .receive_notification::<lsp::notification::DidCloseTextDocument>()
802 .await
803 .text_document,
804 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap()),
805 );
806 assert_eq!(
807 fake_json_server
808 .receive_notification::<lsp::notification::DidOpenTextDocument>()
809 .await
810 .text_document,
811 lsp::TextDocumentItem {
812 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
813 version: 0,
814 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
815 language_id: "json".to_string(),
816 },
817 );
818
819 // We clear the diagnostics, since the language has changed.
820 rust_buffer2.update(cx, |buffer, _| {
821 assert_eq!(
822 buffer
823 .snapshot()
824 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
825 .count(),
826 0
827 );
828 });
829
830 // The renamed file's version resets after changing language server.
831 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
832 assert_eq!(
833 fake_json_server
834 .receive_notification::<lsp::notification::DidChangeTextDocument>()
835 .await
836 .text_document,
837 lsp::VersionedTextDocumentIdentifier::new(
838 lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
839 1
840 )
841 );
842
843 // Restart language servers
844 project.update(cx, |project, cx| {
845 project.restart_language_servers_for_buffers(
846 vec![rust_buffer.clone(), json_buffer.clone()],
847 cx,
848 );
849 });
850
851 let mut rust_shutdown_requests = fake_rust_server
852 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
853 let mut json_shutdown_requests = fake_json_server
854 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
855 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
856
857 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
858 let mut fake_json_server = fake_json_servers.next().await.unwrap();
859
860 // Ensure rust document is reopened in new rust language server
861 assert_eq!(
862 fake_rust_server
863 .receive_notification::<lsp::notification::DidOpenTextDocument>()
864 .await
865 .text_document,
866 lsp::TextDocumentItem {
867 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
868 version: 0,
869 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
870 language_id: "rust".to_string(),
871 }
872 );
873
874 // Ensure json documents are reopened in new json language server
875 assert_set_eq!(
876 [
877 fake_json_server
878 .receive_notification::<lsp::notification::DidOpenTextDocument>()
879 .await
880 .text_document,
881 fake_json_server
882 .receive_notification::<lsp::notification::DidOpenTextDocument>()
883 .await
884 .text_document,
885 ],
886 [
887 lsp::TextDocumentItem {
888 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
889 version: 0,
890 text: json_buffer.update(cx, |buffer, _| buffer.text()),
891 language_id: "json".to_string(),
892 },
893 lsp::TextDocumentItem {
894 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
895 version: 0,
896 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
897 language_id: "json".to_string(),
898 }
899 ]
900 );
901
902 // Close notifications are reported only to servers matching the buffer's language.
903 cx.update(|_| drop(_json_handle));
904 let close_message = lsp::DidCloseTextDocumentParams {
905 text_document: lsp::TextDocumentIdentifier::new(
906 lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
907 ),
908 };
909 assert_eq!(
910 fake_json_server
911 .receive_notification::<lsp::notification::DidCloseTextDocument>()
912 .await,
913 close_message,
914 );
915}
916
917#[gpui::test]
918async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
919 init_test(cx);
920
921 let fs = FakeFs::new(cx.executor());
922 fs.insert_tree(
923 path!("/the-root"),
924 json!({
925 ".gitignore": "target\n",
926 "Cargo.lock": "",
927 "src": {
928 "a.rs": "",
929 "b.rs": "",
930 },
931 "target": {
932 "x": {
933 "out": {
934 "x.rs": ""
935 }
936 },
937 "y": {
938 "out": {
939 "y.rs": "",
940 }
941 },
942 "z": {
943 "out": {
944 "z.rs": ""
945 }
946 }
947 }
948 }),
949 )
950 .await;
951 fs.insert_tree(
952 path!("/the-registry"),
953 json!({
954 "dep1": {
955 "src": {
956 "dep1.rs": "",
957 }
958 },
959 "dep2": {
960 "src": {
961 "dep2.rs": "",
962 }
963 },
964 }),
965 )
966 .await;
967 fs.insert_tree(
968 path!("/the/stdlib"),
969 json!({
970 "LICENSE": "",
971 "src": {
972 "string.rs": "",
973 }
974 }),
975 )
976 .await;
977
978 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
979 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
980 (project.languages().clone(), project.lsp_store())
981 });
982 language_registry.add(rust_lang());
983 let mut fake_servers = language_registry.register_fake_lsp(
984 "Rust",
985 FakeLspAdapter {
986 name: "the-language-server",
987 ..Default::default()
988 },
989 );
990
991 cx.executor().run_until_parked();
992
993 // Start the language server by opening a buffer with a compatible file extension.
994 project
995 .update(cx, |project, cx| {
996 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
997 })
998 .await
999 .unwrap();
1000
1001 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
1002 project.update(cx, |project, cx| {
1003 let worktree = project.worktrees(cx).next().unwrap();
1004 assert_eq!(
1005 worktree
1006 .read(cx)
1007 .snapshot()
1008 .entries(true, 0)
1009 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1010 .collect::<Vec<_>>(),
1011 &[
1012 (Path::new(""), false),
1013 (Path::new(".gitignore"), false),
1014 (Path::new("Cargo.lock"), false),
1015 (Path::new("src"), false),
1016 (Path::new("src/a.rs"), false),
1017 (Path::new("src/b.rs"), false),
1018 (Path::new("target"), true),
1019 ]
1020 );
1021 });
1022
1023 let prev_read_dir_count = fs.read_dir_call_count();
1024
1025 let fake_server = fake_servers.next().await.unwrap();
1026 let (server_id, server_name) = lsp_store.read_with(cx, |lsp_store, _| {
1027 let (id, status) = lsp_store.language_server_statuses().next().unwrap();
1028 (id, LanguageServerName::from(status.name.as_str()))
1029 });
1030
1031 // Simulate jumping to a definition in a dependency outside of the worktree.
1032 let _out_of_worktree_buffer = project
1033 .update(cx, |project, cx| {
1034 project.open_local_buffer_via_lsp(
1035 lsp::Url::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
1036 server_id,
1037 server_name.clone(),
1038 cx,
1039 )
1040 })
1041 .await
1042 .unwrap();
1043
1044 // Keep track of the FS events reported to the language server.
1045 let file_changes = Arc::new(Mutex::new(Vec::new()));
1046 fake_server
1047 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
1048 registrations: vec![lsp::Registration {
1049 id: Default::default(),
1050 method: "workspace/didChangeWatchedFiles".to_string(),
1051 register_options: serde_json::to_value(
1052 lsp::DidChangeWatchedFilesRegistrationOptions {
1053 watchers: vec![
1054 lsp::FileSystemWatcher {
1055 glob_pattern: lsp::GlobPattern::String(
1056 path!("/the-root/Cargo.toml").to_string(),
1057 ),
1058 kind: None,
1059 },
1060 lsp::FileSystemWatcher {
1061 glob_pattern: lsp::GlobPattern::String(
1062 path!("/the-root/src/*.{rs,c}").to_string(),
1063 ),
1064 kind: None,
1065 },
1066 lsp::FileSystemWatcher {
1067 glob_pattern: lsp::GlobPattern::String(
1068 path!("/the-root/target/y/**/*.rs").to_string(),
1069 ),
1070 kind: None,
1071 },
1072 lsp::FileSystemWatcher {
1073 glob_pattern: lsp::GlobPattern::String(
1074 path!("/the/stdlib/src/**/*.rs").to_string(),
1075 ),
1076 kind: None,
1077 },
1078 lsp::FileSystemWatcher {
1079 glob_pattern: lsp::GlobPattern::String(
1080 path!("**/Cargo.lock").to_string(),
1081 ),
1082 kind: None,
1083 },
1084 ],
1085 },
1086 )
1087 .ok(),
1088 }],
1089 })
1090 .await
1091 .unwrap();
1092 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
1093 let file_changes = file_changes.clone();
1094 move |params, _| {
1095 let mut file_changes = file_changes.lock();
1096 file_changes.extend(params.changes);
1097 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
1098 }
1099 });
1100
1101 cx.executor().run_until_parked();
1102 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
1103 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 5);
1104
1105 let mut new_watched_paths = fs.watched_paths();
1106 new_watched_paths.retain(|path| !path.starts_with(config_dir()));
1107 assert_eq!(
1108 &new_watched_paths,
1109 &[
1110 Path::new(path!("/the-root")),
1111 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
1112 Path::new(path!("/the/stdlib/src"))
1113 ]
1114 );
1115
1116 // Now the language server has asked us to watch an ignored directory path,
1117 // so we recursively load it.
1118 project.update(cx, |project, cx| {
1119 let worktree = project.visible_worktrees(cx).next().unwrap();
1120 assert_eq!(
1121 worktree
1122 .read(cx)
1123 .snapshot()
1124 .entries(true, 0)
1125 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1126 .collect::<Vec<_>>(),
1127 &[
1128 (Path::new(""), false),
1129 (Path::new(".gitignore"), false),
1130 (Path::new("Cargo.lock"), false),
1131 (Path::new("src"), false),
1132 (Path::new("src/a.rs"), false),
1133 (Path::new("src/b.rs"), false),
1134 (Path::new("target"), true),
1135 (Path::new("target/x"), true),
1136 (Path::new("target/y"), true),
1137 (Path::new("target/y/out"), true),
1138 (Path::new("target/y/out/y.rs"), true),
1139 (Path::new("target/z"), true),
1140 ]
1141 );
1142 });
1143
1144 // Perform some file system mutations, two of which match the watched patterns,
1145 // and one of which does not.
1146 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
1147 .await
1148 .unwrap();
1149 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
1150 .await
1151 .unwrap();
1152 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
1153 .await
1154 .unwrap();
1155 fs.create_file(
1156 path!("/the-root/target/x/out/x2.rs").as_ref(),
1157 Default::default(),
1158 )
1159 .await
1160 .unwrap();
1161 fs.create_file(
1162 path!("/the-root/target/y/out/y2.rs").as_ref(),
1163 Default::default(),
1164 )
1165 .await
1166 .unwrap();
1167 fs.save(
1168 path!("/the-root/Cargo.lock").as_ref(),
1169 &"".into(),
1170 Default::default(),
1171 )
1172 .await
1173 .unwrap();
1174 fs.save(
1175 path!("/the-stdlib/LICENSE").as_ref(),
1176 &"".into(),
1177 Default::default(),
1178 )
1179 .await
1180 .unwrap();
1181 fs.save(
1182 path!("/the/stdlib/src/string.rs").as_ref(),
1183 &"".into(),
1184 Default::default(),
1185 )
1186 .await
1187 .unwrap();
1188
1189 // The language server receives events for the FS mutations that match its watch patterns.
1190 cx.executor().run_until_parked();
1191 assert_eq!(
1192 &*file_changes.lock(),
1193 &[
1194 lsp::FileEvent {
1195 uri: lsp::Url::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
1196 typ: lsp::FileChangeType::CHANGED,
1197 },
1198 lsp::FileEvent {
1199 uri: lsp::Url::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
1200 typ: lsp::FileChangeType::DELETED,
1201 },
1202 lsp::FileEvent {
1203 uri: lsp::Url::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
1204 typ: lsp::FileChangeType::CREATED,
1205 },
1206 lsp::FileEvent {
1207 uri: lsp::Url::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
1208 typ: lsp::FileChangeType::CREATED,
1209 },
1210 lsp::FileEvent {
1211 uri: lsp::Url::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
1212 typ: lsp::FileChangeType::CHANGED,
1213 },
1214 ]
1215 );
1216}
1217
1218#[gpui::test]
1219async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1220 init_test(cx);
1221
1222 let fs = FakeFs::new(cx.executor());
1223 fs.insert_tree(
1224 path!("/dir"),
1225 json!({
1226 "a.rs": "let a = 1;",
1227 "b.rs": "let b = 2;"
1228 }),
1229 )
1230 .await;
1231
1232 let project = Project::test(
1233 fs,
1234 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1235 cx,
1236 )
1237 .await;
1238 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1239
1240 let buffer_a = project
1241 .update(cx, |project, cx| {
1242 project.open_local_buffer(path!("/dir/a.rs"), cx)
1243 })
1244 .await
1245 .unwrap();
1246 let buffer_b = project
1247 .update(cx, |project, cx| {
1248 project.open_local_buffer(path!("/dir/b.rs"), cx)
1249 })
1250 .await
1251 .unwrap();
1252
1253 lsp_store.update(cx, |lsp_store, cx| {
1254 lsp_store
1255 .update_diagnostics(
1256 LanguageServerId(0),
1257 lsp::PublishDiagnosticsParams {
1258 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1259 version: None,
1260 diagnostics: vec![lsp::Diagnostic {
1261 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1262 severity: Some(lsp::DiagnosticSeverity::ERROR),
1263 message: "error 1".to_string(),
1264 ..Default::default()
1265 }],
1266 },
1267 &[],
1268 cx,
1269 )
1270 .unwrap();
1271 lsp_store
1272 .update_diagnostics(
1273 LanguageServerId(0),
1274 lsp::PublishDiagnosticsParams {
1275 uri: Url::from_file_path(path!("/dir/b.rs")).unwrap(),
1276 version: None,
1277 diagnostics: vec![lsp::Diagnostic {
1278 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1279 severity: Some(DiagnosticSeverity::WARNING),
1280 message: "error 2".to_string(),
1281 ..Default::default()
1282 }],
1283 },
1284 &[],
1285 cx,
1286 )
1287 .unwrap();
1288 });
1289
1290 buffer_a.update(cx, |buffer, _| {
1291 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1292 assert_eq!(
1293 chunks
1294 .iter()
1295 .map(|(s, d)| (s.as_str(), *d))
1296 .collect::<Vec<_>>(),
1297 &[
1298 ("let ", None),
1299 ("a", Some(DiagnosticSeverity::ERROR)),
1300 (" = 1;", None),
1301 ]
1302 );
1303 });
1304 buffer_b.update(cx, |buffer, _| {
1305 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1306 assert_eq!(
1307 chunks
1308 .iter()
1309 .map(|(s, d)| (s.as_str(), *d))
1310 .collect::<Vec<_>>(),
1311 &[
1312 ("let ", None),
1313 ("b", Some(DiagnosticSeverity::WARNING)),
1314 (" = 2;", None),
1315 ]
1316 );
1317 });
1318}
1319
1320#[gpui::test]
1321async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1322 init_test(cx);
1323
1324 let fs = FakeFs::new(cx.executor());
1325 fs.insert_tree(
1326 path!("/root"),
1327 json!({
1328 "dir": {
1329 ".git": {
1330 "HEAD": "ref: refs/heads/main",
1331 },
1332 ".gitignore": "b.rs",
1333 "a.rs": "let a = 1;",
1334 "b.rs": "let b = 2;",
1335 },
1336 "other.rs": "let b = c;"
1337 }),
1338 )
1339 .await;
1340
1341 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1342 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1343 let (worktree, _) = project
1344 .update(cx, |project, cx| {
1345 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1346 })
1347 .await
1348 .unwrap();
1349 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1350
1351 let (worktree, _) = project
1352 .update(cx, |project, cx| {
1353 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1354 })
1355 .await
1356 .unwrap();
1357 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1358
1359 let server_id = LanguageServerId(0);
1360 lsp_store.update(cx, |lsp_store, cx| {
1361 lsp_store
1362 .update_diagnostics(
1363 server_id,
1364 lsp::PublishDiagnosticsParams {
1365 uri: Url::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1366 version: None,
1367 diagnostics: vec![lsp::Diagnostic {
1368 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1369 severity: Some(lsp::DiagnosticSeverity::ERROR),
1370 message: "unused variable 'b'".to_string(),
1371 ..Default::default()
1372 }],
1373 },
1374 &[],
1375 cx,
1376 )
1377 .unwrap();
1378 lsp_store
1379 .update_diagnostics(
1380 server_id,
1381 lsp::PublishDiagnosticsParams {
1382 uri: Url::from_file_path(path!("/root/other.rs")).unwrap(),
1383 version: None,
1384 diagnostics: vec![lsp::Diagnostic {
1385 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1386 severity: Some(lsp::DiagnosticSeverity::ERROR),
1387 message: "unknown variable 'c'".to_string(),
1388 ..Default::default()
1389 }],
1390 },
1391 &[],
1392 cx,
1393 )
1394 .unwrap();
1395 });
1396
1397 let main_ignored_buffer = project
1398 .update(cx, |project, cx| {
1399 project.open_buffer((main_worktree_id, "b.rs"), cx)
1400 })
1401 .await
1402 .unwrap();
1403 main_ignored_buffer.update(cx, |buffer, _| {
1404 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1405 assert_eq!(
1406 chunks
1407 .iter()
1408 .map(|(s, d)| (s.as_str(), *d))
1409 .collect::<Vec<_>>(),
1410 &[
1411 ("let ", None),
1412 ("b", Some(DiagnosticSeverity::ERROR)),
1413 (" = 2;", None),
1414 ],
1415 "Gigitnored buffers should still get in-buffer diagnostics",
1416 );
1417 });
1418 let other_buffer = project
1419 .update(cx, |project, cx| {
1420 project.open_buffer((other_worktree_id, ""), cx)
1421 })
1422 .await
1423 .unwrap();
1424 other_buffer.update(cx, |buffer, _| {
1425 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1426 assert_eq!(
1427 chunks
1428 .iter()
1429 .map(|(s, d)| (s.as_str(), *d))
1430 .collect::<Vec<_>>(),
1431 &[
1432 ("let b = ", None),
1433 ("c", Some(DiagnosticSeverity::ERROR)),
1434 (";", None),
1435 ],
1436 "Buffers from hidden projects should still get in-buffer diagnostics"
1437 );
1438 });
1439
1440 project.update(cx, |project, cx| {
1441 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1442 assert_eq!(
1443 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1444 vec![(
1445 ProjectPath {
1446 worktree_id: main_worktree_id,
1447 path: Arc::from(Path::new("b.rs")),
1448 },
1449 server_id,
1450 DiagnosticSummary {
1451 error_count: 1,
1452 warning_count: 0,
1453 }
1454 )]
1455 );
1456 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1457 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1458 });
1459}
1460
1461#[gpui::test]
1462async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1463 init_test(cx);
1464
1465 let progress_token = "the-progress-token";
1466
1467 let fs = FakeFs::new(cx.executor());
1468 fs.insert_tree(
1469 path!("/dir"),
1470 json!({
1471 "a.rs": "fn a() { A }",
1472 "b.rs": "const y: i32 = 1",
1473 }),
1474 )
1475 .await;
1476
1477 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1478 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1479
1480 language_registry.add(rust_lang());
1481 let mut fake_servers = language_registry.register_fake_lsp(
1482 "Rust",
1483 FakeLspAdapter {
1484 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1485 disk_based_diagnostics_sources: vec!["disk".into()],
1486 ..Default::default()
1487 },
1488 );
1489
1490 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1491
1492 // Cause worktree to start the fake language server
1493 let _ = project
1494 .update(cx, |project, cx| {
1495 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1496 })
1497 .await
1498 .unwrap();
1499
1500 let mut events = cx.events(&project);
1501
1502 let fake_server = fake_servers.next().await.unwrap();
1503 assert_eq!(
1504 events.next().await.unwrap(),
1505 Event::LanguageServerAdded(
1506 LanguageServerId(0),
1507 fake_server.server.name(),
1508 Some(worktree_id)
1509 ),
1510 );
1511
1512 fake_server
1513 .start_progress(format!("{}/0", progress_token))
1514 .await;
1515 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1516 assert_eq!(
1517 events.next().await.unwrap(),
1518 Event::DiskBasedDiagnosticsStarted {
1519 language_server_id: LanguageServerId(0),
1520 }
1521 );
1522
1523 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1524 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1525 version: None,
1526 diagnostics: vec![lsp::Diagnostic {
1527 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1528 severity: Some(lsp::DiagnosticSeverity::ERROR),
1529 message: "undefined variable 'A'".to_string(),
1530 ..Default::default()
1531 }],
1532 });
1533 assert_eq!(
1534 events.next().await.unwrap(),
1535 Event::DiagnosticsUpdated {
1536 language_server_id: LanguageServerId(0),
1537 path: (worktree_id, Path::new("a.rs")).into()
1538 }
1539 );
1540
1541 fake_server.end_progress(format!("{}/0", progress_token));
1542 assert_eq!(
1543 events.next().await.unwrap(),
1544 Event::DiskBasedDiagnosticsFinished {
1545 language_server_id: LanguageServerId(0)
1546 }
1547 );
1548
1549 let buffer = project
1550 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1551 .await
1552 .unwrap();
1553
1554 buffer.update(cx, |buffer, _| {
1555 let snapshot = buffer.snapshot();
1556 let diagnostics = snapshot
1557 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1558 .collect::<Vec<_>>();
1559 assert_eq!(
1560 diagnostics,
1561 &[DiagnosticEntry {
1562 range: Point::new(0, 9)..Point::new(0, 10),
1563 diagnostic: Diagnostic {
1564 severity: lsp::DiagnosticSeverity::ERROR,
1565 message: "undefined variable 'A'".to_string(),
1566 group_id: 0,
1567 is_primary: true,
1568 ..Default::default()
1569 }
1570 }]
1571 )
1572 });
1573
1574 // Ensure publishing empty diagnostics twice only results in one update event.
1575 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1576 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1577 version: None,
1578 diagnostics: Default::default(),
1579 });
1580 assert_eq!(
1581 events.next().await.unwrap(),
1582 Event::DiagnosticsUpdated {
1583 language_server_id: LanguageServerId(0),
1584 path: (worktree_id, Path::new("a.rs")).into()
1585 }
1586 );
1587
1588 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1589 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1590 version: None,
1591 diagnostics: Default::default(),
1592 });
1593 cx.executor().run_until_parked();
1594 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1595}
1596
1597#[gpui::test]
1598async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1599 init_test(cx);
1600
1601 let progress_token = "the-progress-token";
1602
1603 let fs = FakeFs::new(cx.executor());
1604 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1605
1606 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1607
1608 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1609 language_registry.add(rust_lang());
1610 let mut fake_servers = language_registry.register_fake_lsp(
1611 "Rust",
1612 FakeLspAdapter {
1613 name: "the-language-server",
1614 disk_based_diagnostics_sources: vec!["disk".into()],
1615 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1616 ..Default::default()
1617 },
1618 );
1619
1620 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1621
1622 let (buffer, _handle) = project
1623 .update(cx, |project, cx| {
1624 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1625 })
1626 .await
1627 .unwrap();
1628 // Simulate diagnostics starting to update.
1629 let fake_server = fake_servers.next().await.unwrap();
1630 fake_server.start_progress(progress_token).await;
1631
1632 // Restart the server before the diagnostics finish updating.
1633 project.update(cx, |project, cx| {
1634 project.restart_language_servers_for_buffers(vec![buffer], cx);
1635 });
1636 let mut events = cx.events(&project);
1637
1638 // Simulate the newly started server sending more diagnostics.
1639 let fake_server = fake_servers.next().await.unwrap();
1640 assert_eq!(
1641 events.next().await.unwrap(),
1642 Event::LanguageServerAdded(
1643 LanguageServerId(1),
1644 fake_server.server.name(),
1645 Some(worktree_id)
1646 )
1647 );
1648 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1649 fake_server.start_progress(progress_token).await;
1650 assert_eq!(
1651 events.next().await.unwrap(),
1652 Event::DiskBasedDiagnosticsStarted {
1653 language_server_id: LanguageServerId(1)
1654 }
1655 );
1656 project.update(cx, |project, cx| {
1657 assert_eq!(
1658 project
1659 .language_servers_running_disk_based_diagnostics(cx)
1660 .collect::<Vec<_>>(),
1661 [LanguageServerId(1)]
1662 );
1663 });
1664
1665 // All diagnostics are considered done, despite the old server's diagnostic
1666 // task never completing.
1667 fake_server.end_progress(progress_token);
1668 assert_eq!(
1669 events.next().await.unwrap(),
1670 Event::DiskBasedDiagnosticsFinished {
1671 language_server_id: LanguageServerId(1)
1672 }
1673 );
1674 project.update(cx, |project, cx| {
1675 assert_eq!(
1676 project
1677 .language_servers_running_disk_based_diagnostics(cx)
1678 .collect::<Vec<_>>(),
1679 [] as [language::LanguageServerId; 0]
1680 );
1681 });
1682}
1683
1684#[gpui::test]
1685async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1686 init_test(cx);
1687
1688 let fs = FakeFs::new(cx.executor());
1689 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
1690
1691 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1692
1693 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1694 language_registry.add(rust_lang());
1695 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1696
1697 let (buffer, _) = project
1698 .update(cx, |project, cx| {
1699 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1700 })
1701 .await
1702 .unwrap();
1703
1704 // Publish diagnostics
1705 let fake_server = fake_servers.next().await.unwrap();
1706 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1707 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1708 version: None,
1709 diagnostics: vec![lsp::Diagnostic {
1710 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1711 severity: Some(lsp::DiagnosticSeverity::ERROR),
1712 message: "the message".to_string(),
1713 ..Default::default()
1714 }],
1715 });
1716
1717 cx.executor().run_until_parked();
1718 buffer.update(cx, |buffer, _| {
1719 assert_eq!(
1720 buffer
1721 .snapshot()
1722 .diagnostics_in_range::<_, usize>(0..1, false)
1723 .map(|entry| entry.diagnostic.message.clone())
1724 .collect::<Vec<_>>(),
1725 ["the message".to_string()]
1726 );
1727 });
1728 project.update(cx, |project, cx| {
1729 assert_eq!(
1730 project.diagnostic_summary(false, cx),
1731 DiagnosticSummary {
1732 error_count: 1,
1733 warning_count: 0,
1734 }
1735 );
1736 });
1737
1738 project.update(cx, |project, cx| {
1739 project.restart_language_servers_for_buffers(vec![buffer.clone()], cx);
1740 });
1741
1742 // The diagnostics are cleared.
1743 cx.executor().run_until_parked();
1744 buffer.update(cx, |buffer, _| {
1745 assert_eq!(
1746 buffer
1747 .snapshot()
1748 .diagnostics_in_range::<_, usize>(0..1, false)
1749 .map(|entry| entry.diagnostic.message.clone())
1750 .collect::<Vec<_>>(),
1751 Vec::<String>::new(),
1752 );
1753 });
1754 project.update(cx, |project, cx| {
1755 assert_eq!(
1756 project.diagnostic_summary(false, cx),
1757 DiagnosticSummary {
1758 error_count: 0,
1759 warning_count: 0,
1760 }
1761 );
1762 });
1763}
1764
1765#[gpui::test]
1766async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1767 init_test(cx);
1768
1769 let fs = FakeFs::new(cx.executor());
1770 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1771
1772 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1773 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1774
1775 language_registry.add(rust_lang());
1776 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1777
1778 let (buffer, _handle) = project
1779 .update(cx, |project, cx| {
1780 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1781 })
1782 .await
1783 .unwrap();
1784
1785 // Before restarting the server, report diagnostics with an unknown buffer version.
1786 let fake_server = fake_servers.next().await.unwrap();
1787 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1788 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1789 version: Some(10000),
1790 diagnostics: Vec::new(),
1791 });
1792 cx.executor().run_until_parked();
1793 project.update(cx, |project, cx| {
1794 project.restart_language_servers_for_buffers(vec![buffer.clone()], cx);
1795 });
1796
1797 let mut fake_server = fake_servers.next().await.unwrap();
1798 let notification = fake_server
1799 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1800 .await
1801 .text_document;
1802 assert_eq!(notification.version, 0);
1803}
1804
1805#[gpui::test]
1806async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1807 init_test(cx);
1808
1809 let progress_token = "the-progress-token";
1810
1811 let fs = FakeFs::new(cx.executor());
1812 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1813
1814 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1815
1816 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1817 language_registry.add(rust_lang());
1818 let mut fake_servers = language_registry.register_fake_lsp(
1819 "Rust",
1820 FakeLspAdapter {
1821 name: "the-language-server",
1822 disk_based_diagnostics_sources: vec!["disk".into()],
1823 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1824 ..Default::default()
1825 },
1826 );
1827
1828 let (buffer, _handle) = project
1829 .update(cx, |project, cx| {
1830 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1831 })
1832 .await
1833 .unwrap();
1834
1835 // Simulate diagnostics starting to update.
1836 let mut fake_server = fake_servers.next().await.unwrap();
1837 fake_server
1838 .start_progress_with(
1839 "another-token",
1840 lsp::WorkDoneProgressBegin {
1841 cancellable: Some(false),
1842 ..Default::default()
1843 },
1844 )
1845 .await;
1846 fake_server
1847 .start_progress_with(
1848 progress_token,
1849 lsp::WorkDoneProgressBegin {
1850 cancellable: Some(true),
1851 ..Default::default()
1852 },
1853 )
1854 .await;
1855 cx.executor().run_until_parked();
1856
1857 project.update(cx, |project, cx| {
1858 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1859 });
1860
1861 let cancel_notification = fake_server
1862 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1863 .await;
1864 assert_eq!(
1865 cancel_notification.token,
1866 NumberOrString::String(progress_token.into())
1867 );
1868}
1869
1870#[gpui::test]
1871async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1872 init_test(cx);
1873
1874 let fs = FakeFs::new(cx.executor());
1875 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
1876 .await;
1877
1878 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1879 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1880
1881 let mut fake_rust_servers = language_registry.register_fake_lsp(
1882 "Rust",
1883 FakeLspAdapter {
1884 name: "rust-lsp",
1885 ..Default::default()
1886 },
1887 );
1888 let mut fake_js_servers = language_registry.register_fake_lsp(
1889 "JavaScript",
1890 FakeLspAdapter {
1891 name: "js-lsp",
1892 ..Default::default()
1893 },
1894 );
1895 language_registry.add(rust_lang());
1896 language_registry.add(js_lang());
1897
1898 let _rs_buffer = project
1899 .update(cx, |project, cx| {
1900 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1901 })
1902 .await
1903 .unwrap();
1904 let _js_buffer = project
1905 .update(cx, |project, cx| {
1906 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
1907 })
1908 .await
1909 .unwrap();
1910
1911 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1912 assert_eq!(
1913 fake_rust_server_1
1914 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1915 .await
1916 .text_document
1917 .uri
1918 .as_str(),
1919 uri!("file:///dir/a.rs")
1920 );
1921
1922 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1923 assert_eq!(
1924 fake_js_server
1925 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1926 .await
1927 .text_document
1928 .uri
1929 .as_str(),
1930 uri!("file:///dir/b.js")
1931 );
1932
1933 // Disable Rust language server, ensuring only that server gets stopped.
1934 cx.update(|cx| {
1935 SettingsStore::update_global(cx, |settings, cx| {
1936 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1937 settings.languages.insert(
1938 "Rust".into(),
1939 LanguageSettingsContent {
1940 enable_language_server: Some(false),
1941 ..Default::default()
1942 },
1943 );
1944 });
1945 })
1946 });
1947 fake_rust_server_1
1948 .receive_notification::<lsp::notification::Exit>()
1949 .await;
1950
1951 // Enable Rust and disable JavaScript language servers, ensuring that the
1952 // former gets started again and that the latter stops.
1953 cx.update(|cx| {
1954 SettingsStore::update_global(cx, |settings, cx| {
1955 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1956 settings.languages.insert(
1957 LanguageName::new("Rust"),
1958 LanguageSettingsContent {
1959 enable_language_server: Some(true),
1960 ..Default::default()
1961 },
1962 );
1963 settings.languages.insert(
1964 LanguageName::new("JavaScript"),
1965 LanguageSettingsContent {
1966 enable_language_server: Some(false),
1967 ..Default::default()
1968 },
1969 );
1970 });
1971 })
1972 });
1973 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1974 assert_eq!(
1975 fake_rust_server_2
1976 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1977 .await
1978 .text_document
1979 .uri
1980 .as_str(),
1981 uri!("file:///dir/a.rs")
1982 );
1983 fake_js_server
1984 .receive_notification::<lsp::notification::Exit>()
1985 .await;
1986}
1987
1988#[gpui::test(iterations = 3)]
1989async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1990 init_test(cx);
1991
1992 let text = "
1993 fn a() { A }
1994 fn b() { BB }
1995 fn c() { CCC }
1996 "
1997 .unindent();
1998
1999 let fs = FakeFs::new(cx.executor());
2000 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
2001
2002 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2003 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2004
2005 language_registry.add(rust_lang());
2006 let mut fake_servers = language_registry.register_fake_lsp(
2007 "Rust",
2008 FakeLspAdapter {
2009 disk_based_diagnostics_sources: vec!["disk".into()],
2010 ..Default::default()
2011 },
2012 );
2013
2014 let buffer = project
2015 .update(cx, |project, cx| {
2016 project.open_local_buffer(path!("/dir/a.rs"), cx)
2017 })
2018 .await
2019 .unwrap();
2020
2021 let _handle = project.update(cx, |project, cx| {
2022 project.register_buffer_with_language_servers(&buffer, cx)
2023 });
2024
2025 let mut fake_server = fake_servers.next().await.unwrap();
2026 let open_notification = fake_server
2027 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2028 .await;
2029
2030 // Edit the buffer, moving the content down
2031 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
2032 let change_notification_1 = fake_server
2033 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2034 .await;
2035 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
2036
2037 // Report some diagnostics for the initial version of the buffer
2038 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2039 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2040 version: Some(open_notification.text_document.version),
2041 diagnostics: vec![
2042 lsp::Diagnostic {
2043 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2044 severity: Some(DiagnosticSeverity::ERROR),
2045 message: "undefined variable 'A'".to_string(),
2046 source: Some("disk".to_string()),
2047 ..Default::default()
2048 },
2049 lsp::Diagnostic {
2050 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2051 severity: Some(DiagnosticSeverity::ERROR),
2052 message: "undefined variable 'BB'".to_string(),
2053 source: Some("disk".to_string()),
2054 ..Default::default()
2055 },
2056 lsp::Diagnostic {
2057 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
2058 severity: Some(DiagnosticSeverity::ERROR),
2059 source: Some("disk".to_string()),
2060 message: "undefined variable 'CCC'".to_string(),
2061 ..Default::default()
2062 },
2063 ],
2064 });
2065
2066 // The diagnostics have moved down since they were created.
2067 cx.executor().run_until_parked();
2068 buffer.update(cx, |buffer, _| {
2069 assert_eq!(
2070 buffer
2071 .snapshot()
2072 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
2073 .collect::<Vec<_>>(),
2074 &[
2075 DiagnosticEntry {
2076 range: Point::new(3, 9)..Point::new(3, 11),
2077 diagnostic: Diagnostic {
2078 source: Some("disk".into()),
2079 severity: DiagnosticSeverity::ERROR,
2080 message: "undefined variable 'BB'".to_string(),
2081 is_disk_based: true,
2082 group_id: 1,
2083 is_primary: true,
2084 ..Default::default()
2085 },
2086 },
2087 DiagnosticEntry {
2088 range: Point::new(4, 9)..Point::new(4, 12),
2089 diagnostic: Diagnostic {
2090 source: Some("disk".into()),
2091 severity: DiagnosticSeverity::ERROR,
2092 message: "undefined variable 'CCC'".to_string(),
2093 is_disk_based: true,
2094 group_id: 2,
2095 is_primary: true,
2096 ..Default::default()
2097 }
2098 }
2099 ]
2100 );
2101 assert_eq!(
2102 chunks_with_diagnostics(buffer, 0..buffer.len()),
2103 [
2104 ("\n\nfn a() { ".to_string(), None),
2105 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2106 (" }\nfn b() { ".to_string(), None),
2107 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
2108 (" }\nfn c() { ".to_string(), None),
2109 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
2110 (" }\n".to_string(), None),
2111 ]
2112 );
2113 assert_eq!(
2114 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
2115 [
2116 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
2117 (" }\nfn c() { ".to_string(), None),
2118 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
2119 ]
2120 );
2121 });
2122
2123 // Ensure overlapping diagnostics are highlighted correctly.
2124 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2125 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2126 version: Some(open_notification.text_document.version),
2127 diagnostics: vec![
2128 lsp::Diagnostic {
2129 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2130 severity: Some(DiagnosticSeverity::ERROR),
2131 message: "undefined variable 'A'".to_string(),
2132 source: Some("disk".to_string()),
2133 ..Default::default()
2134 },
2135 lsp::Diagnostic {
2136 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
2137 severity: Some(DiagnosticSeverity::WARNING),
2138 message: "unreachable statement".to_string(),
2139 source: Some("disk".to_string()),
2140 ..Default::default()
2141 },
2142 ],
2143 });
2144
2145 cx.executor().run_until_parked();
2146 buffer.update(cx, |buffer, _| {
2147 assert_eq!(
2148 buffer
2149 .snapshot()
2150 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
2151 .collect::<Vec<_>>(),
2152 &[
2153 DiagnosticEntry {
2154 range: Point::new(2, 9)..Point::new(2, 12),
2155 diagnostic: Diagnostic {
2156 source: Some("disk".into()),
2157 severity: DiagnosticSeverity::WARNING,
2158 message: "unreachable statement".to_string(),
2159 is_disk_based: true,
2160 group_id: 4,
2161 is_primary: true,
2162 ..Default::default()
2163 }
2164 },
2165 DiagnosticEntry {
2166 range: Point::new(2, 9)..Point::new(2, 10),
2167 diagnostic: Diagnostic {
2168 source: Some("disk".into()),
2169 severity: DiagnosticSeverity::ERROR,
2170 message: "undefined variable 'A'".to_string(),
2171 is_disk_based: true,
2172 group_id: 3,
2173 is_primary: true,
2174 ..Default::default()
2175 },
2176 }
2177 ]
2178 );
2179 assert_eq!(
2180 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
2181 [
2182 ("fn a() { ".to_string(), None),
2183 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2184 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2185 ("\n".to_string(), None),
2186 ]
2187 );
2188 assert_eq!(
2189 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
2190 [
2191 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2192 ("\n".to_string(), None),
2193 ]
2194 );
2195 });
2196
2197 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
2198 // changes since the last save.
2199 buffer.update(cx, |buffer, cx| {
2200 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
2201 buffer.edit(
2202 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
2203 None,
2204 cx,
2205 );
2206 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
2207 });
2208 let change_notification_2 = fake_server
2209 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2210 .await;
2211 assert!(
2212 change_notification_2.text_document.version > change_notification_1.text_document.version
2213 );
2214
2215 // Handle out-of-order diagnostics
2216 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2217 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2218 version: Some(change_notification_2.text_document.version),
2219 diagnostics: vec![
2220 lsp::Diagnostic {
2221 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2222 severity: Some(DiagnosticSeverity::ERROR),
2223 message: "undefined variable 'BB'".to_string(),
2224 source: Some("disk".to_string()),
2225 ..Default::default()
2226 },
2227 lsp::Diagnostic {
2228 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2229 severity: Some(DiagnosticSeverity::WARNING),
2230 message: "undefined variable 'A'".to_string(),
2231 source: Some("disk".to_string()),
2232 ..Default::default()
2233 },
2234 ],
2235 });
2236
2237 cx.executor().run_until_parked();
2238 buffer.update(cx, |buffer, _| {
2239 assert_eq!(
2240 buffer
2241 .snapshot()
2242 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2243 .collect::<Vec<_>>(),
2244 &[
2245 DiagnosticEntry {
2246 range: Point::new(2, 21)..Point::new(2, 22),
2247 diagnostic: Diagnostic {
2248 source: Some("disk".into()),
2249 severity: DiagnosticSeverity::WARNING,
2250 message: "undefined variable 'A'".to_string(),
2251 is_disk_based: true,
2252 group_id: 6,
2253 is_primary: true,
2254 ..Default::default()
2255 }
2256 },
2257 DiagnosticEntry {
2258 range: Point::new(3, 9)..Point::new(3, 14),
2259 diagnostic: Diagnostic {
2260 source: Some("disk".into()),
2261 severity: DiagnosticSeverity::ERROR,
2262 message: "undefined variable 'BB'".to_string(),
2263 is_disk_based: true,
2264 group_id: 5,
2265 is_primary: true,
2266 ..Default::default()
2267 },
2268 }
2269 ]
2270 );
2271 });
2272}
2273
2274#[gpui::test]
2275async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2276 init_test(cx);
2277
2278 let text = concat!(
2279 "let one = ;\n", //
2280 "let two = \n",
2281 "let three = 3;\n",
2282 );
2283
2284 let fs = FakeFs::new(cx.executor());
2285 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2286
2287 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2288 let buffer = project
2289 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2290 .await
2291 .unwrap();
2292
2293 project.update(cx, |project, cx| {
2294 project.lsp_store.update(cx, |lsp_store, cx| {
2295 lsp_store
2296 .update_diagnostic_entries(
2297 LanguageServerId(0),
2298 PathBuf::from("/dir/a.rs"),
2299 None,
2300 vec![
2301 DiagnosticEntry {
2302 range: Unclipped(PointUtf16::new(0, 10))
2303 ..Unclipped(PointUtf16::new(0, 10)),
2304 diagnostic: Diagnostic {
2305 severity: DiagnosticSeverity::ERROR,
2306 message: "syntax error 1".to_string(),
2307 ..Default::default()
2308 },
2309 },
2310 DiagnosticEntry {
2311 range: Unclipped(PointUtf16::new(1, 10))
2312 ..Unclipped(PointUtf16::new(1, 10)),
2313 diagnostic: Diagnostic {
2314 severity: DiagnosticSeverity::ERROR,
2315 message: "syntax error 2".to_string(),
2316 ..Default::default()
2317 },
2318 },
2319 ],
2320 cx,
2321 )
2322 .unwrap();
2323 })
2324 });
2325
2326 // An empty range is extended forward to include the following character.
2327 // At the end of a line, an empty range is extended backward to include
2328 // the preceding character.
2329 buffer.update(cx, |buffer, _| {
2330 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2331 assert_eq!(
2332 chunks
2333 .iter()
2334 .map(|(s, d)| (s.as_str(), *d))
2335 .collect::<Vec<_>>(),
2336 &[
2337 ("let one = ", None),
2338 (";", Some(DiagnosticSeverity::ERROR)),
2339 ("\nlet two =", None),
2340 (" ", Some(DiagnosticSeverity::ERROR)),
2341 ("\nlet three = 3;\n", None)
2342 ]
2343 );
2344 });
2345}
2346
2347#[gpui::test]
2348async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2349 init_test(cx);
2350
2351 let fs = FakeFs::new(cx.executor());
2352 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2353 .await;
2354
2355 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2356 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2357
2358 lsp_store.update(cx, |lsp_store, cx| {
2359 lsp_store
2360 .update_diagnostic_entries(
2361 LanguageServerId(0),
2362 Path::new("/dir/a.rs").to_owned(),
2363 None,
2364 vec![DiagnosticEntry {
2365 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2366 diagnostic: Diagnostic {
2367 severity: DiagnosticSeverity::ERROR,
2368 is_primary: true,
2369 message: "syntax error a1".to_string(),
2370 ..Default::default()
2371 },
2372 }],
2373 cx,
2374 )
2375 .unwrap();
2376 lsp_store
2377 .update_diagnostic_entries(
2378 LanguageServerId(1),
2379 Path::new("/dir/a.rs").to_owned(),
2380 None,
2381 vec![DiagnosticEntry {
2382 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2383 diagnostic: Diagnostic {
2384 severity: DiagnosticSeverity::ERROR,
2385 is_primary: true,
2386 message: "syntax error b1".to_string(),
2387 ..Default::default()
2388 },
2389 }],
2390 cx,
2391 )
2392 .unwrap();
2393
2394 assert_eq!(
2395 lsp_store.diagnostic_summary(false, cx),
2396 DiagnosticSummary {
2397 error_count: 2,
2398 warning_count: 0,
2399 }
2400 );
2401 });
2402}
2403
2404#[gpui::test]
2405async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2406 init_test(cx);
2407
2408 let text = "
2409 fn a() {
2410 f1();
2411 }
2412 fn b() {
2413 f2();
2414 }
2415 fn c() {
2416 f3();
2417 }
2418 "
2419 .unindent();
2420
2421 let fs = FakeFs::new(cx.executor());
2422 fs.insert_tree(
2423 path!("/dir"),
2424 json!({
2425 "a.rs": text.clone(),
2426 }),
2427 )
2428 .await;
2429
2430 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2431 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2432
2433 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2434 language_registry.add(rust_lang());
2435 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2436
2437 let (buffer, _handle) = project
2438 .update(cx, |project, cx| {
2439 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2440 })
2441 .await
2442 .unwrap();
2443
2444 let mut fake_server = fake_servers.next().await.unwrap();
2445 let lsp_document_version = fake_server
2446 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2447 .await
2448 .text_document
2449 .version;
2450
2451 // Simulate editing the buffer after the language server computes some edits.
2452 buffer.update(cx, |buffer, cx| {
2453 buffer.edit(
2454 [(
2455 Point::new(0, 0)..Point::new(0, 0),
2456 "// above first function\n",
2457 )],
2458 None,
2459 cx,
2460 );
2461 buffer.edit(
2462 [(
2463 Point::new(2, 0)..Point::new(2, 0),
2464 " // inside first function\n",
2465 )],
2466 None,
2467 cx,
2468 );
2469 buffer.edit(
2470 [(
2471 Point::new(6, 4)..Point::new(6, 4),
2472 "// inside second function ",
2473 )],
2474 None,
2475 cx,
2476 );
2477
2478 assert_eq!(
2479 buffer.text(),
2480 "
2481 // above first function
2482 fn a() {
2483 // inside first function
2484 f1();
2485 }
2486 fn b() {
2487 // inside second function f2();
2488 }
2489 fn c() {
2490 f3();
2491 }
2492 "
2493 .unindent()
2494 );
2495 });
2496
2497 let edits = lsp_store
2498 .update(cx, |lsp_store, cx| {
2499 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2500 &buffer,
2501 vec![
2502 // replace body of first function
2503 lsp::TextEdit {
2504 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2505 new_text: "
2506 fn a() {
2507 f10();
2508 }
2509 "
2510 .unindent(),
2511 },
2512 // edit inside second function
2513 lsp::TextEdit {
2514 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2515 new_text: "00".into(),
2516 },
2517 // edit inside third function via two distinct edits
2518 lsp::TextEdit {
2519 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2520 new_text: "4000".into(),
2521 },
2522 lsp::TextEdit {
2523 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2524 new_text: "".into(),
2525 },
2526 ],
2527 LanguageServerId(0),
2528 Some(lsp_document_version),
2529 cx,
2530 )
2531 })
2532 .await
2533 .unwrap();
2534
2535 buffer.update(cx, |buffer, cx| {
2536 for (range, new_text) in edits {
2537 buffer.edit([(range, new_text)], None, cx);
2538 }
2539 assert_eq!(
2540 buffer.text(),
2541 "
2542 // above first function
2543 fn a() {
2544 // inside first function
2545 f10();
2546 }
2547 fn b() {
2548 // inside second function f200();
2549 }
2550 fn c() {
2551 f4000();
2552 }
2553 "
2554 .unindent()
2555 );
2556 });
2557}
2558
2559#[gpui::test]
2560async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2561 init_test(cx);
2562
2563 let text = "
2564 use a::b;
2565 use a::c;
2566
2567 fn f() {
2568 b();
2569 c();
2570 }
2571 "
2572 .unindent();
2573
2574 let fs = FakeFs::new(cx.executor());
2575 fs.insert_tree(
2576 path!("/dir"),
2577 json!({
2578 "a.rs": text.clone(),
2579 }),
2580 )
2581 .await;
2582
2583 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2584 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2585 let buffer = project
2586 .update(cx, |project, cx| {
2587 project.open_local_buffer(path!("/dir/a.rs"), cx)
2588 })
2589 .await
2590 .unwrap();
2591
2592 // Simulate the language server sending us a small edit in the form of a very large diff.
2593 // Rust-analyzer does this when performing a merge-imports code action.
2594 let edits = lsp_store
2595 .update(cx, |lsp_store, cx| {
2596 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2597 &buffer,
2598 [
2599 // Replace the first use statement without editing the semicolon.
2600 lsp::TextEdit {
2601 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2602 new_text: "a::{b, c}".into(),
2603 },
2604 // Reinsert the remainder of the file between the semicolon and the final
2605 // newline of the file.
2606 lsp::TextEdit {
2607 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2608 new_text: "\n\n".into(),
2609 },
2610 lsp::TextEdit {
2611 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2612 new_text: "
2613 fn f() {
2614 b();
2615 c();
2616 }"
2617 .unindent(),
2618 },
2619 // Delete everything after the first newline of the file.
2620 lsp::TextEdit {
2621 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2622 new_text: "".into(),
2623 },
2624 ],
2625 LanguageServerId(0),
2626 None,
2627 cx,
2628 )
2629 })
2630 .await
2631 .unwrap();
2632
2633 buffer.update(cx, |buffer, cx| {
2634 let edits = edits
2635 .into_iter()
2636 .map(|(range, text)| {
2637 (
2638 range.start.to_point(buffer)..range.end.to_point(buffer),
2639 text,
2640 )
2641 })
2642 .collect::<Vec<_>>();
2643
2644 assert_eq!(
2645 edits,
2646 [
2647 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2648 (Point::new(1, 0)..Point::new(2, 0), "".into())
2649 ]
2650 );
2651
2652 for (range, new_text) in edits {
2653 buffer.edit([(range, new_text)], None, cx);
2654 }
2655 assert_eq!(
2656 buffer.text(),
2657 "
2658 use a::{b, c};
2659
2660 fn f() {
2661 b();
2662 c();
2663 }
2664 "
2665 .unindent()
2666 );
2667 });
2668}
2669
2670#[gpui::test]
2671async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
2672 cx: &mut gpui::TestAppContext,
2673) {
2674 init_test(cx);
2675
2676 let text = "Path()";
2677
2678 let fs = FakeFs::new(cx.executor());
2679 fs.insert_tree(
2680 path!("/dir"),
2681 json!({
2682 "a.rs": text
2683 }),
2684 )
2685 .await;
2686
2687 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2688 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2689 let buffer = project
2690 .update(cx, |project, cx| {
2691 project.open_local_buffer(path!("/dir/a.rs"), cx)
2692 })
2693 .await
2694 .unwrap();
2695
2696 // Simulate the language server sending us a pair of edits at the same location,
2697 // with an insertion following a replacement (which violates the LSP spec).
2698 let edits = lsp_store
2699 .update(cx, |lsp_store, cx| {
2700 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2701 &buffer,
2702 [
2703 lsp::TextEdit {
2704 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
2705 new_text: "Path".into(),
2706 },
2707 lsp::TextEdit {
2708 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2709 new_text: "from path import Path\n\n\n".into(),
2710 },
2711 ],
2712 LanguageServerId(0),
2713 None,
2714 cx,
2715 )
2716 })
2717 .await
2718 .unwrap();
2719
2720 buffer.update(cx, |buffer, cx| {
2721 buffer.edit(edits, None, cx);
2722 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
2723 });
2724}
2725
2726#[gpui::test]
2727async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2728 init_test(cx);
2729
2730 let text = "
2731 use a::b;
2732 use a::c;
2733
2734 fn f() {
2735 b();
2736 c();
2737 }
2738 "
2739 .unindent();
2740
2741 let fs = FakeFs::new(cx.executor());
2742 fs.insert_tree(
2743 path!("/dir"),
2744 json!({
2745 "a.rs": text.clone(),
2746 }),
2747 )
2748 .await;
2749
2750 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2751 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2752 let buffer = project
2753 .update(cx, |project, cx| {
2754 project.open_local_buffer(path!("/dir/a.rs"), cx)
2755 })
2756 .await
2757 .unwrap();
2758
2759 // Simulate the language server sending us edits in a non-ordered fashion,
2760 // with ranges sometimes being inverted or pointing to invalid locations.
2761 let edits = lsp_store
2762 .update(cx, |lsp_store, cx| {
2763 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2764 &buffer,
2765 [
2766 lsp::TextEdit {
2767 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2768 new_text: "\n\n".into(),
2769 },
2770 lsp::TextEdit {
2771 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2772 new_text: "a::{b, c}".into(),
2773 },
2774 lsp::TextEdit {
2775 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2776 new_text: "".into(),
2777 },
2778 lsp::TextEdit {
2779 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2780 new_text: "
2781 fn f() {
2782 b();
2783 c();
2784 }"
2785 .unindent(),
2786 },
2787 ],
2788 LanguageServerId(0),
2789 None,
2790 cx,
2791 )
2792 })
2793 .await
2794 .unwrap();
2795
2796 buffer.update(cx, |buffer, cx| {
2797 let edits = edits
2798 .into_iter()
2799 .map(|(range, text)| {
2800 (
2801 range.start.to_point(buffer)..range.end.to_point(buffer),
2802 text,
2803 )
2804 })
2805 .collect::<Vec<_>>();
2806
2807 assert_eq!(
2808 edits,
2809 [
2810 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2811 (Point::new(1, 0)..Point::new(2, 0), "".into())
2812 ]
2813 );
2814
2815 for (range, new_text) in edits {
2816 buffer.edit([(range, new_text)], None, cx);
2817 }
2818 assert_eq!(
2819 buffer.text(),
2820 "
2821 use a::{b, c};
2822
2823 fn f() {
2824 b();
2825 c();
2826 }
2827 "
2828 .unindent()
2829 );
2830 });
2831}
2832
2833fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2834 buffer: &Buffer,
2835 range: Range<T>,
2836) -> Vec<(String, Option<DiagnosticSeverity>)> {
2837 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2838 for chunk in buffer.snapshot().chunks(range, true) {
2839 if chunks.last().map_or(false, |prev_chunk| {
2840 prev_chunk.1 == chunk.diagnostic_severity
2841 }) {
2842 chunks.last_mut().unwrap().0.push_str(chunk.text);
2843 } else {
2844 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2845 }
2846 }
2847 chunks
2848}
2849
2850#[gpui::test(iterations = 10)]
2851async fn test_definition(cx: &mut gpui::TestAppContext) {
2852 init_test(cx);
2853
2854 let fs = FakeFs::new(cx.executor());
2855 fs.insert_tree(
2856 path!("/dir"),
2857 json!({
2858 "a.rs": "const fn a() { A }",
2859 "b.rs": "const y: i32 = crate::a()",
2860 }),
2861 )
2862 .await;
2863
2864 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
2865
2866 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2867 language_registry.add(rust_lang());
2868 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2869
2870 let (buffer, _handle) = project
2871 .update(cx, |project, cx| {
2872 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2873 })
2874 .await
2875 .unwrap();
2876
2877 let fake_server = fake_servers.next().await.unwrap();
2878 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2879 let params = params.text_document_position_params;
2880 assert_eq!(
2881 params.text_document.uri.to_file_path().unwrap(),
2882 Path::new(path!("/dir/b.rs")),
2883 );
2884 assert_eq!(params.position, lsp::Position::new(0, 22));
2885
2886 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2887 lsp::Location::new(
2888 lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2889 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2890 ),
2891 )))
2892 });
2893 let mut definitions = project
2894 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2895 .await
2896 .unwrap();
2897
2898 // Assert no new language server started
2899 cx.executor().run_until_parked();
2900 assert!(fake_servers.try_next().is_err());
2901
2902 assert_eq!(definitions.len(), 1);
2903 let definition = definitions.pop().unwrap();
2904 cx.update(|cx| {
2905 let target_buffer = definition.target.buffer.read(cx);
2906 assert_eq!(
2907 target_buffer
2908 .file()
2909 .unwrap()
2910 .as_local()
2911 .unwrap()
2912 .abs_path(cx),
2913 Path::new(path!("/dir/a.rs")),
2914 );
2915 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2916 assert_eq!(
2917 list_worktrees(&project, cx),
2918 [
2919 (path!("/dir/a.rs").as_ref(), false),
2920 (path!("/dir/b.rs").as_ref(), true)
2921 ],
2922 );
2923
2924 drop(definition);
2925 });
2926 cx.update(|cx| {
2927 assert_eq!(
2928 list_worktrees(&project, cx),
2929 [(path!("/dir/b.rs").as_ref(), true)]
2930 );
2931 });
2932
2933 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
2934 project
2935 .read(cx)
2936 .worktrees(cx)
2937 .map(|worktree| {
2938 let worktree = worktree.read(cx);
2939 (
2940 worktree.as_local().unwrap().abs_path().as_ref(),
2941 worktree.is_visible(),
2942 )
2943 })
2944 .collect::<Vec<_>>()
2945 }
2946}
2947
2948#[gpui::test]
2949async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
2950 init_test(cx);
2951
2952 let fs = FakeFs::new(cx.executor());
2953 fs.insert_tree(
2954 path!("/dir"),
2955 json!({
2956 "a.ts": "",
2957 }),
2958 )
2959 .await;
2960
2961 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2962
2963 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2964 language_registry.add(typescript_lang());
2965 let mut fake_language_servers = language_registry.register_fake_lsp(
2966 "TypeScript",
2967 FakeLspAdapter {
2968 capabilities: lsp::ServerCapabilities {
2969 completion_provider: Some(lsp::CompletionOptions {
2970 trigger_characters: Some(vec![".".to_string()]),
2971 ..Default::default()
2972 }),
2973 ..Default::default()
2974 },
2975 ..Default::default()
2976 },
2977 );
2978
2979 let (buffer, _handle) = project
2980 .update(cx, |p, cx| {
2981 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
2982 })
2983 .await
2984 .unwrap();
2985
2986 let fake_server = fake_language_servers.next().await.unwrap();
2987
2988 // When text_edit exists, it takes precedence over insert_text and label
2989 let text = "let a = obj.fqn";
2990 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2991 let completions = project.update(cx, |project, cx| {
2992 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2993 });
2994
2995 fake_server
2996 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
2997 Ok(Some(lsp::CompletionResponse::Array(vec![
2998 lsp::CompletionItem {
2999 label: "labelText".into(),
3000 insert_text: Some("insertText".into()),
3001 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
3002 range: lsp::Range::new(
3003 lsp::Position::new(0, text.len() as u32 - 3),
3004 lsp::Position::new(0, text.len() as u32),
3005 ),
3006 new_text: "textEditText".into(),
3007 })),
3008 ..Default::default()
3009 },
3010 ])))
3011 })
3012 .next()
3013 .await;
3014
3015 let completions = completions.await.unwrap().unwrap();
3016 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3017
3018 assert_eq!(completions.len(), 1);
3019 assert_eq!(completions[0].new_text, "textEditText");
3020 assert_eq!(
3021 completions[0].replace_range.to_offset(&snapshot),
3022 text.len() - 3..text.len()
3023 );
3024}
3025
3026#[gpui::test]
3027async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
3028 init_test(cx);
3029
3030 let fs = FakeFs::new(cx.executor());
3031 fs.insert_tree(
3032 path!("/dir"),
3033 json!({
3034 "a.ts": "",
3035 }),
3036 )
3037 .await;
3038
3039 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3040
3041 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3042 language_registry.add(typescript_lang());
3043 let mut fake_language_servers = language_registry.register_fake_lsp(
3044 "TypeScript",
3045 FakeLspAdapter {
3046 capabilities: lsp::ServerCapabilities {
3047 completion_provider: Some(lsp::CompletionOptions {
3048 trigger_characters: Some(vec![".".to_string()]),
3049 ..Default::default()
3050 }),
3051 ..Default::default()
3052 },
3053 ..Default::default()
3054 },
3055 );
3056
3057 let (buffer, _handle) = project
3058 .update(cx, |p, cx| {
3059 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3060 })
3061 .await
3062 .unwrap();
3063
3064 let fake_server = fake_language_servers.next().await.unwrap();
3065 let text = "let a = obj.fqn";
3066
3067 // Test 1: When text_edit is None but insert_text exists with default edit_range
3068 {
3069 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3070 let completions = project.update(cx, |project, cx| {
3071 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3072 });
3073
3074 fake_server
3075 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3076 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3077 is_incomplete: false,
3078 item_defaults: Some(lsp::CompletionListItemDefaults {
3079 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3080 lsp::Range::new(
3081 lsp::Position::new(0, text.len() as u32 - 3),
3082 lsp::Position::new(0, text.len() as u32),
3083 ),
3084 )),
3085 ..Default::default()
3086 }),
3087 items: vec![lsp::CompletionItem {
3088 label: "labelText".into(),
3089 insert_text: Some("insertText".into()),
3090 text_edit: None,
3091 ..Default::default()
3092 }],
3093 })))
3094 })
3095 .next()
3096 .await;
3097
3098 let completions = completions.await.unwrap().unwrap();
3099 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3100
3101 assert_eq!(completions.len(), 1);
3102 assert_eq!(completions[0].new_text, "insertText");
3103 assert_eq!(
3104 completions[0].replace_range.to_offset(&snapshot),
3105 text.len() - 3..text.len()
3106 );
3107 }
3108
3109 // Test 2: When both text_edit and insert_text are None with default edit_range
3110 {
3111 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3112 let completions = project.update(cx, |project, cx| {
3113 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3114 });
3115
3116 fake_server
3117 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3118 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3119 is_incomplete: false,
3120 item_defaults: Some(lsp::CompletionListItemDefaults {
3121 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3122 lsp::Range::new(
3123 lsp::Position::new(0, text.len() as u32 - 3),
3124 lsp::Position::new(0, text.len() as u32),
3125 ),
3126 )),
3127 ..Default::default()
3128 }),
3129 items: vec![lsp::CompletionItem {
3130 label: "labelText".into(),
3131 insert_text: None,
3132 text_edit: None,
3133 ..Default::default()
3134 }],
3135 })))
3136 })
3137 .next()
3138 .await;
3139
3140 let completions = completions.await.unwrap().unwrap();
3141 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3142
3143 assert_eq!(completions.len(), 1);
3144 assert_eq!(completions[0].new_text, "labelText");
3145 assert_eq!(
3146 completions[0].replace_range.to_offset(&snapshot),
3147 text.len() - 3..text.len()
3148 );
3149 }
3150}
3151
3152#[gpui::test]
3153async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
3154 init_test(cx);
3155
3156 let fs = FakeFs::new(cx.executor());
3157 fs.insert_tree(
3158 path!("/dir"),
3159 json!({
3160 "a.ts": "",
3161 }),
3162 )
3163 .await;
3164
3165 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3166
3167 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3168 language_registry.add(typescript_lang());
3169 let mut fake_language_servers = language_registry.register_fake_lsp(
3170 "TypeScript",
3171 FakeLspAdapter {
3172 capabilities: lsp::ServerCapabilities {
3173 completion_provider: Some(lsp::CompletionOptions {
3174 trigger_characters: Some(vec![":".to_string()]),
3175 ..Default::default()
3176 }),
3177 ..Default::default()
3178 },
3179 ..Default::default()
3180 },
3181 );
3182
3183 let (buffer, _handle) = project
3184 .update(cx, |p, cx| {
3185 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3186 })
3187 .await
3188 .unwrap();
3189
3190 let fake_server = fake_language_servers.next().await.unwrap();
3191
3192 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
3193 let text = "let a = b.fqn";
3194 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3195 let completions = project.update(cx, |project, cx| {
3196 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3197 });
3198
3199 fake_server
3200 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3201 Ok(Some(lsp::CompletionResponse::Array(vec![
3202 lsp::CompletionItem {
3203 label: "fullyQualifiedName?".into(),
3204 insert_text: Some("fullyQualifiedName".into()),
3205 ..Default::default()
3206 },
3207 ])))
3208 })
3209 .next()
3210 .await;
3211 let completions = completions.await.unwrap().unwrap();
3212 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3213 assert_eq!(completions.len(), 1);
3214 assert_eq!(completions[0].new_text, "fullyQualifiedName");
3215 assert_eq!(
3216 completions[0].replace_range.to_offset(&snapshot),
3217 text.len() - 3..text.len()
3218 );
3219
3220 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
3221 let text = "let a = \"atoms/cmp\"";
3222 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3223 let completions = project.update(cx, |project, cx| {
3224 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
3225 });
3226
3227 fake_server
3228 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3229 Ok(Some(lsp::CompletionResponse::Array(vec![
3230 lsp::CompletionItem {
3231 label: "component".into(),
3232 ..Default::default()
3233 },
3234 ])))
3235 })
3236 .next()
3237 .await;
3238 let completions = completions.await.unwrap().unwrap();
3239 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3240 assert_eq!(completions.len(), 1);
3241 assert_eq!(completions[0].new_text, "component");
3242 assert_eq!(
3243 completions[0].replace_range.to_offset(&snapshot),
3244 text.len() - 4..text.len() - 1
3245 );
3246}
3247
3248#[gpui::test]
3249async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
3250 init_test(cx);
3251
3252 let fs = FakeFs::new(cx.executor());
3253 fs.insert_tree(
3254 path!("/dir"),
3255 json!({
3256 "a.ts": "",
3257 }),
3258 )
3259 .await;
3260
3261 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3262
3263 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3264 language_registry.add(typescript_lang());
3265 let mut fake_language_servers = language_registry.register_fake_lsp(
3266 "TypeScript",
3267 FakeLspAdapter {
3268 capabilities: lsp::ServerCapabilities {
3269 completion_provider: Some(lsp::CompletionOptions {
3270 trigger_characters: Some(vec![":".to_string()]),
3271 ..Default::default()
3272 }),
3273 ..Default::default()
3274 },
3275 ..Default::default()
3276 },
3277 );
3278
3279 let (buffer, _handle) = project
3280 .update(cx, |p, cx| {
3281 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3282 })
3283 .await
3284 .unwrap();
3285
3286 let fake_server = fake_language_servers.next().await.unwrap();
3287
3288 let text = "let a = b.fqn";
3289 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3290 let completions = project.update(cx, |project, cx| {
3291 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3292 });
3293
3294 fake_server
3295 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3296 Ok(Some(lsp::CompletionResponse::Array(vec![
3297 lsp::CompletionItem {
3298 label: "fullyQualifiedName?".into(),
3299 insert_text: Some("fully\rQualified\r\nName".into()),
3300 ..Default::default()
3301 },
3302 ])))
3303 })
3304 .next()
3305 .await;
3306 let completions = completions.await.unwrap().unwrap();
3307 assert_eq!(completions.len(), 1);
3308 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
3309}
3310
3311#[gpui::test(iterations = 10)]
3312async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
3313 init_test(cx);
3314
3315 let fs = FakeFs::new(cx.executor());
3316 fs.insert_tree(
3317 path!("/dir"),
3318 json!({
3319 "a.ts": "a",
3320 }),
3321 )
3322 .await;
3323
3324 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3325
3326 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3327 language_registry.add(typescript_lang());
3328 let mut fake_language_servers = language_registry.register_fake_lsp(
3329 "TypeScript",
3330 FakeLspAdapter {
3331 capabilities: lsp::ServerCapabilities {
3332 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
3333 lsp::CodeActionOptions {
3334 resolve_provider: Some(true),
3335 ..lsp::CodeActionOptions::default()
3336 },
3337 )),
3338 execute_command_provider: Some(lsp::ExecuteCommandOptions {
3339 commands: vec!["_the/command".to_string()],
3340 ..lsp::ExecuteCommandOptions::default()
3341 }),
3342 ..lsp::ServerCapabilities::default()
3343 },
3344 ..FakeLspAdapter::default()
3345 },
3346 );
3347
3348 let (buffer, _handle) = project
3349 .update(cx, |p, cx| {
3350 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3351 })
3352 .await
3353 .unwrap();
3354
3355 let fake_server = fake_language_servers.next().await.unwrap();
3356
3357 // Language server returns code actions that contain commands, and not edits.
3358 let actions = project.update(cx, |project, cx| {
3359 project.code_actions(&buffer, 0..0, None, cx)
3360 });
3361 fake_server
3362 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
3363 Ok(Some(vec![
3364 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3365 title: "The code action".into(),
3366 data: Some(serde_json::json!({
3367 "command": "_the/command",
3368 })),
3369 ..lsp::CodeAction::default()
3370 }),
3371 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3372 title: "two".into(),
3373 ..lsp::CodeAction::default()
3374 }),
3375 ]))
3376 })
3377 .next()
3378 .await;
3379
3380 let action = actions.await.unwrap()[0].clone();
3381 let apply = project.update(cx, |project, cx| {
3382 project.apply_code_action(buffer.clone(), action, true, cx)
3383 });
3384
3385 // Resolving the code action does not populate its edits. In absence of
3386 // edits, we must execute the given command.
3387 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
3388 |mut action, _| async move {
3389 if action.data.is_some() {
3390 action.command = Some(lsp::Command {
3391 title: "The command".into(),
3392 command: "_the/command".into(),
3393 arguments: Some(vec![json!("the-argument")]),
3394 });
3395 }
3396 Ok(action)
3397 },
3398 );
3399
3400 // While executing the command, the language server sends the editor
3401 // a `workspaceEdit` request.
3402 fake_server
3403 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
3404 let fake = fake_server.clone();
3405 move |params, _| {
3406 assert_eq!(params.command, "_the/command");
3407 let fake = fake.clone();
3408 async move {
3409 fake.server
3410 .request::<lsp::request::ApplyWorkspaceEdit>(
3411 lsp::ApplyWorkspaceEditParams {
3412 label: None,
3413 edit: lsp::WorkspaceEdit {
3414 changes: Some(
3415 [(
3416 lsp::Url::from_file_path(path!("/dir/a.ts")).unwrap(),
3417 vec![lsp::TextEdit {
3418 range: lsp::Range::new(
3419 lsp::Position::new(0, 0),
3420 lsp::Position::new(0, 0),
3421 ),
3422 new_text: "X".into(),
3423 }],
3424 )]
3425 .into_iter()
3426 .collect(),
3427 ),
3428 ..Default::default()
3429 },
3430 },
3431 )
3432 .await
3433 .unwrap();
3434 Ok(Some(json!(null)))
3435 }
3436 }
3437 })
3438 .next()
3439 .await;
3440
3441 // Applying the code action returns a project transaction containing the edits
3442 // sent by the language server in its `workspaceEdit` request.
3443 let transaction = apply.await.unwrap();
3444 assert!(transaction.0.contains_key(&buffer));
3445 buffer.update(cx, |buffer, cx| {
3446 assert_eq!(buffer.text(), "Xa");
3447 buffer.undo(cx);
3448 assert_eq!(buffer.text(), "a");
3449 });
3450}
3451
3452#[gpui::test(iterations = 10)]
3453async fn test_save_file(cx: &mut gpui::TestAppContext) {
3454 init_test(cx);
3455
3456 let fs = FakeFs::new(cx.executor());
3457 fs.insert_tree(
3458 path!("/dir"),
3459 json!({
3460 "file1": "the old contents",
3461 }),
3462 )
3463 .await;
3464
3465 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3466 let buffer = project
3467 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3468 .await
3469 .unwrap();
3470 buffer.update(cx, |buffer, cx| {
3471 assert_eq!(buffer.text(), "the old contents");
3472 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3473 });
3474
3475 project
3476 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3477 .await
3478 .unwrap();
3479
3480 let new_text = fs
3481 .load(Path::new(path!("/dir/file1")))
3482 .await
3483 .unwrap()
3484 .replace("\r\n", "\n");
3485 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3486}
3487
3488#[gpui::test(iterations = 30)]
3489async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
3490 init_test(cx);
3491
3492 let fs = FakeFs::new(cx.executor().clone());
3493 fs.insert_tree(
3494 path!("/dir"),
3495 json!({
3496 "file1": "the original contents",
3497 }),
3498 )
3499 .await;
3500
3501 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3502 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3503 let buffer = project
3504 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3505 .await
3506 .unwrap();
3507
3508 // Simulate buffer diffs being slow, so that they don't complete before
3509 // the next file change occurs.
3510 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3511
3512 // Change the buffer's file on disk, and then wait for the file change
3513 // to be detected by the worktree, so that the buffer starts reloading.
3514 fs.save(
3515 path!("/dir/file1").as_ref(),
3516 &"the first contents".into(),
3517 Default::default(),
3518 )
3519 .await
3520 .unwrap();
3521 worktree.next_event(cx).await;
3522
3523 // Change the buffer's file again. Depending on the random seed, the
3524 // previous file change may still be in progress.
3525 fs.save(
3526 path!("/dir/file1").as_ref(),
3527 &"the second contents".into(),
3528 Default::default(),
3529 )
3530 .await
3531 .unwrap();
3532 worktree.next_event(cx).await;
3533
3534 cx.executor().run_until_parked();
3535 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3536 buffer.read_with(cx, |buffer, _| {
3537 assert_eq!(buffer.text(), on_disk_text);
3538 assert!(!buffer.is_dirty(), "buffer should not be dirty");
3539 assert!(!buffer.has_conflict(), "buffer should not be dirty");
3540 });
3541}
3542
3543#[gpui::test(iterations = 30)]
3544async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
3545 init_test(cx);
3546
3547 let fs = FakeFs::new(cx.executor().clone());
3548 fs.insert_tree(
3549 path!("/dir"),
3550 json!({
3551 "file1": "the original contents",
3552 }),
3553 )
3554 .await;
3555
3556 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3557 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3558 let buffer = project
3559 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3560 .await
3561 .unwrap();
3562
3563 // Simulate buffer diffs being slow, so that they don't complete before
3564 // the next file change occurs.
3565 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3566
3567 // Change the buffer's file on disk, and then wait for the file change
3568 // to be detected by the worktree, so that the buffer starts reloading.
3569 fs.save(
3570 path!("/dir/file1").as_ref(),
3571 &"the first contents".into(),
3572 Default::default(),
3573 )
3574 .await
3575 .unwrap();
3576 worktree.next_event(cx).await;
3577
3578 cx.executor()
3579 .spawn(cx.executor().simulate_random_delay())
3580 .await;
3581
3582 // Perform a noop edit, causing the buffer's version to increase.
3583 buffer.update(cx, |buffer, cx| {
3584 buffer.edit([(0..0, " ")], None, cx);
3585 buffer.undo(cx);
3586 });
3587
3588 cx.executor().run_until_parked();
3589 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3590 buffer.read_with(cx, |buffer, _| {
3591 let buffer_text = buffer.text();
3592 if buffer_text == on_disk_text {
3593 assert!(
3594 !buffer.is_dirty() && !buffer.has_conflict(),
3595 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
3596 );
3597 }
3598 // If the file change occurred while the buffer was processing the first
3599 // change, the buffer will be in a conflicting state.
3600 else {
3601 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3602 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3603 }
3604 });
3605}
3606
3607#[gpui::test]
3608async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
3609 init_test(cx);
3610
3611 let fs = FakeFs::new(cx.executor());
3612 fs.insert_tree(
3613 path!("/dir"),
3614 json!({
3615 "file1": "the old contents",
3616 }),
3617 )
3618 .await;
3619
3620 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
3621 let buffer = project
3622 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3623 .await
3624 .unwrap();
3625 buffer.update(cx, |buffer, cx| {
3626 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3627 });
3628
3629 project
3630 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3631 .await
3632 .unwrap();
3633
3634 let new_text = fs
3635 .load(Path::new(path!("/dir/file1")))
3636 .await
3637 .unwrap()
3638 .replace("\r\n", "\n");
3639 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3640}
3641
3642#[gpui::test]
3643async fn test_save_as(cx: &mut gpui::TestAppContext) {
3644 init_test(cx);
3645
3646 let fs = FakeFs::new(cx.executor());
3647 fs.insert_tree("/dir", json!({})).await;
3648
3649 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3650
3651 let languages = project.update(cx, |project, _| project.languages().clone());
3652 languages.add(rust_lang());
3653
3654 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
3655 buffer.update(cx, |buffer, cx| {
3656 buffer.edit([(0..0, "abc")], None, cx);
3657 assert!(buffer.is_dirty());
3658 assert!(!buffer.has_conflict());
3659 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
3660 });
3661 project
3662 .update(cx, |project, cx| {
3663 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
3664 let path = ProjectPath {
3665 worktree_id,
3666 path: Arc::from(Path::new("file1.rs")),
3667 };
3668 project.save_buffer_as(buffer.clone(), path, cx)
3669 })
3670 .await
3671 .unwrap();
3672 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
3673
3674 cx.executor().run_until_parked();
3675 buffer.update(cx, |buffer, cx| {
3676 assert_eq!(
3677 buffer.file().unwrap().full_path(cx),
3678 Path::new("dir/file1.rs")
3679 );
3680 assert!(!buffer.is_dirty());
3681 assert!(!buffer.has_conflict());
3682 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
3683 });
3684
3685 let opened_buffer = project
3686 .update(cx, |project, cx| {
3687 project.open_local_buffer("/dir/file1.rs", cx)
3688 })
3689 .await
3690 .unwrap();
3691 assert_eq!(opened_buffer, buffer);
3692}
3693
3694#[gpui::test(retries = 5)]
3695async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3696 use worktree::WorktreeModelHandle as _;
3697
3698 init_test(cx);
3699 cx.executor().allow_parking();
3700
3701 let dir = TempTree::new(json!({
3702 "a": {
3703 "file1": "",
3704 "file2": "",
3705 "file3": "",
3706 },
3707 "b": {
3708 "c": {
3709 "file4": "",
3710 "file5": "",
3711 }
3712 }
3713 }));
3714
3715 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
3716
3717 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3718 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3719 async move { buffer.await.unwrap() }
3720 };
3721 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3722 project.update(cx, |project, cx| {
3723 let tree = project.worktrees(cx).next().unwrap();
3724 tree.read(cx)
3725 .entry_for_path(path)
3726 .unwrap_or_else(|| panic!("no entry for path {}", path))
3727 .id
3728 })
3729 };
3730
3731 let buffer2 = buffer_for_path("a/file2", cx).await;
3732 let buffer3 = buffer_for_path("a/file3", cx).await;
3733 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3734 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3735
3736 let file2_id = id_for_path("a/file2", cx);
3737 let file3_id = id_for_path("a/file3", cx);
3738 let file4_id = id_for_path("b/c/file4", cx);
3739
3740 // Create a remote copy of this worktree.
3741 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3742 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3743
3744 let updates = Arc::new(Mutex::new(Vec::new()));
3745 tree.update(cx, |tree, cx| {
3746 let updates = updates.clone();
3747 tree.observe_updates(0, cx, move |update| {
3748 updates.lock().push(update);
3749 async { true }
3750 });
3751 });
3752
3753 let remote =
3754 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
3755
3756 cx.executor().run_until_parked();
3757
3758 cx.update(|cx| {
3759 assert!(!buffer2.read(cx).is_dirty());
3760 assert!(!buffer3.read(cx).is_dirty());
3761 assert!(!buffer4.read(cx).is_dirty());
3762 assert!(!buffer5.read(cx).is_dirty());
3763 });
3764
3765 // Rename and delete files and directories.
3766 tree.flush_fs_events(cx).await;
3767 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3768 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3769 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3770 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3771 tree.flush_fs_events(cx).await;
3772
3773 cx.update(|app| {
3774 assert_eq!(
3775 tree.read(app)
3776 .paths()
3777 .map(|p| p.to_str().unwrap())
3778 .collect::<Vec<_>>(),
3779 vec![
3780 "a",
3781 separator!("a/file1"),
3782 separator!("a/file2.new"),
3783 "b",
3784 "d",
3785 separator!("d/file3"),
3786 separator!("d/file4"),
3787 ]
3788 );
3789 });
3790
3791 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3792 assert_eq!(id_for_path("d/file3", cx), file3_id);
3793 assert_eq!(id_for_path("d/file4", cx), file4_id);
3794
3795 cx.update(|cx| {
3796 assert_eq!(
3797 buffer2.read(cx).file().unwrap().path().as_ref(),
3798 Path::new("a/file2.new")
3799 );
3800 assert_eq!(
3801 buffer3.read(cx).file().unwrap().path().as_ref(),
3802 Path::new("d/file3")
3803 );
3804 assert_eq!(
3805 buffer4.read(cx).file().unwrap().path().as_ref(),
3806 Path::new("d/file4")
3807 );
3808 assert_eq!(
3809 buffer5.read(cx).file().unwrap().path().as_ref(),
3810 Path::new("b/c/file5")
3811 );
3812
3813 assert_matches!(
3814 buffer2.read(cx).file().unwrap().disk_state(),
3815 DiskState::Present { .. }
3816 );
3817 assert_matches!(
3818 buffer3.read(cx).file().unwrap().disk_state(),
3819 DiskState::Present { .. }
3820 );
3821 assert_matches!(
3822 buffer4.read(cx).file().unwrap().disk_state(),
3823 DiskState::Present { .. }
3824 );
3825 assert_eq!(
3826 buffer5.read(cx).file().unwrap().disk_state(),
3827 DiskState::Deleted
3828 );
3829 });
3830
3831 // Update the remote worktree. Check that it becomes consistent with the
3832 // local worktree.
3833 cx.executor().run_until_parked();
3834
3835 remote.update(cx, |remote, _| {
3836 for update in updates.lock().drain(..) {
3837 remote.as_remote_mut().unwrap().update_from_remote(update);
3838 }
3839 });
3840 cx.executor().run_until_parked();
3841 remote.update(cx, |remote, _| {
3842 assert_eq!(
3843 remote
3844 .paths()
3845 .map(|p| p.to_str().unwrap())
3846 .collect::<Vec<_>>(),
3847 vec![
3848 "a",
3849 separator!("a/file1"),
3850 separator!("a/file2.new"),
3851 "b",
3852 "d",
3853 separator!("d/file3"),
3854 separator!("d/file4"),
3855 ]
3856 );
3857 });
3858}
3859
3860#[gpui::test(iterations = 10)]
3861async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3862 init_test(cx);
3863
3864 let fs = FakeFs::new(cx.executor());
3865 fs.insert_tree(
3866 path!("/dir"),
3867 json!({
3868 "a": {
3869 "file1": "",
3870 }
3871 }),
3872 )
3873 .await;
3874
3875 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3876 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3877 let tree_id = tree.update(cx, |tree, _| tree.id());
3878
3879 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3880 project.update(cx, |project, cx| {
3881 let tree = project.worktrees(cx).next().unwrap();
3882 tree.read(cx)
3883 .entry_for_path(path)
3884 .unwrap_or_else(|| panic!("no entry for path {}", path))
3885 .id
3886 })
3887 };
3888
3889 let dir_id = id_for_path("a", cx);
3890 let file_id = id_for_path("a/file1", cx);
3891 let buffer = project
3892 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3893 .await
3894 .unwrap();
3895 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3896
3897 project
3898 .update(cx, |project, cx| {
3899 project.rename_entry(dir_id, Path::new("b"), cx)
3900 })
3901 .unwrap()
3902 .await
3903 .to_included()
3904 .unwrap();
3905 cx.executor().run_until_parked();
3906
3907 assert_eq!(id_for_path("b", cx), dir_id);
3908 assert_eq!(id_for_path("b/file1", cx), file_id);
3909 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3910}
3911
3912#[gpui::test]
3913async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3914 init_test(cx);
3915
3916 let fs = FakeFs::new(cx.executor());
3917 fs.insert_tree(
3918 "/dir",
3919 json!({
3920 "a.txt": "a-contents",
3921 "b.txt": "b-contents",
3922 }),
3923 )
3924 .await;
3925
3926 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3927
3928 // Spawn multiple tasks to open paths, repeating some paths.
3929 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3930 (
3931 p.open_local_buffer("/dir/a.txt", cx),
3932 p.open_local_buffer("/dir/b.txt", cx),
3933 p.open_local_buffer("/dir/a.txt", cx),
3934 )
3935 });
3936
3937 let buffer_a_1 = buffer_a_1.await.unwrap();
3938 let buffer_a_2 = buffer_a_2.await.unwrap();
3939 let buffer_b = buffer_b.await.unwrap();
3940 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3941 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3942
3943 // There is only one buffer per path.
3944 let buffer_a_id = buffer_a_1.entity_id();
3945 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3946
3947 // Open the same path again while it is still open.
3948 drop(buffer_a_1);
3949 let buffer_a_3 = project
3950 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3951 .await
3952 .unwrap();
3953
3954 // There's still only one buffer per path.
3955 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3956}
3957
3958#[gpui::test]
3959async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3960 init_test(cx);
3961
3962 let fs = FakeFs::new(cx.executor());
3963 fs.insert_tree(
3964 path!("/dir"),
3965 json!({
3966 "file1": "abc",
3967 "file2": "def",
3968 "file3": "ghi",
3969 }),
3970 )
3971 .await;
3972
3973 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3974
3975 let buffer1 = project
3976 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3977 .await
3978 .unwrap();
3979 let events = Arc::new(Mutex::new(Vec::new()));
3980
3981 // initially, the buffer isn't dirty.
3982 buffer1.update(cx, |buffer, cx| {
3983 cx.subscribe(&buffer1, {
3984 let events = events.clone();
3985 move |_, _, event, _| match event {
3986 BufferEvent::Operation { .. } => {}
3987 _ => events.lock().push(event.clone()),
3988 }
3989 })
3990 .detach();
3991
3992 assert!(!buffer.is_dirty());
3993 assert!(events.lock().is_empty());
3994
3995 buffer.edit([(1..2, "")], None, cx);
3996 });
3997
3998 // after the first edit, the buffer is dirty, and emits a dirtied event.
3999 buffer1.update(cx, |buffer, cx| {
4000 assert!(buffer.text() == "ac");
4001 assert!(buffer.is_dirty());
4002 assert_eq!(
4003 *events.lock(),
4004 &[
4005 language::BufferEvent::Edited,
4006 language::BufferEvent::DirtyChanged
4007 ]
4008 );
4009 events.lock().clear();
4010 buffer.did_save(
4011 buffer.version(),
4012 buffer.file().unwrap().disk_state().mtime(),
4013 cx,
4014 );
4015 });
4016
4017 // after saving, the buffer is not dirty, and emits a saved event.
4018 buffer1.update(cx, |buffer, cx| {
4019 assert!(!buffer.is_dirty());
4020 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
4021 events.lock().clear();
4022
4023 buffer.edit([(1..1, "B")], None, cx);
4024 buffer.edit([(2..2, "D")], None, cx);
4025 });
4026
4027 // after editing again, the buffer is dirty, and emits another dirty event.
4028 buffer1.update(cx, |buffer, cx| {
4029 assert!(buffer.text() == "aBDc");
4030 assert!(buffer.is_dirty());
4031 assert_eq!(
4032 *events.lock(),
4033 &[
4034 language::BufferEvent::Edited,
4035 language::BufferEvent::DirtyChanged,
4036 language::BufferEvent::Edited,
4037 ],
4038 );
4039 events.lock().clear();
4040
4041 // After restoring the buffer to its previously-saved state,
4042 // the buffer is not considered dirty anymore.
4043 buffer.edit([(1..3, "")], None, cx);
4044 assert!(buffer.text() == "ac");
4045 assert!(!buffer.is_dirty());
4046 });
4047
4048 assert_eq!(
4049 *events.lock(),
4050 &[
4051 language::BufferEvent::Edited,
4052 language::BufferEvent::DirtyChanged
4053 ]
4054 );
4055
4056 // When a file is deleted, it is not considered dirty.
4057 let events = Arc::new(Mutex::new(Vec::new()));
4058 let buffer2 = project
4059 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4060 .await
4061 .unwrap();
4062 buffer2.update(cx, |_, cx| {
4063 cx.subscribe(&buffer2, {
4064 let events = events.clone();
4065 move |_, _, event, _| match event {
4066 BufferEvent::Operation { .. } => {}
4067 _ => events.lock().push(event.clone()),
4068 }
4069 })
4070 .detach();
4071 });
4072
4073 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
4074 .await
4075 .unwrap();
4076 cx.executor().run_until_parked();
4077 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4078 assert_eq!(
4079 mem::take(&mut *events.lock()),
4080 &[language::BufferEvent::FileHandleChanged]
4081 );
4082
4083 // Buffer becomes dirty when edited.
4084 buffer2.update(cx, |buffer, cx| {
4085 buffer.edit([(2..3, "")], None, cx);
4086 assert_eq!(buffer.is_dirty(), true);
4087 });
4088 assert_eq!(
4089 mem::take(&mut *events.lock()),
4090 &[
4091 language::BufferEvent::Edited,
4092 language::BufferEvent::DirtyChanged
4093 ]
4094 );
4095
4096 // Buffer becomes clean again when all of its content is removed, because
4097 // the file was deleted.
4098 buffer2.update(cx, |buffer, cx| {
4099 buffer.edit([(0..2, "")], None, cx);
4100 assert_eq!(buffer.is_empty(), true);
4101 assert_eq!(buffer.is_dirty(), false);
4102 });
4103 assert_eq!(
4104 *events.lock(),
4105 &[
4106 language::BufferEvent::Edited,
4107 language::BufferEvent::DirtyChanged
4108 ]
4109 );
4110
4111 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4112 let events = Arc::new(Mutex::new(Vec::new()));
4113 let buffer3 = project
4114 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
4115 .await
4116 .unwrap();
4117 buffer3.update(cx, |_, cx| {
4118 cx.subscribe(&buffer3, {
4119 let events = events.clone();
4120 move |_, _, event, _| match event {
4121 BufferEvent::Operation { .. } => {}
4122 _ => events.lock().push(event.clone()),
4123 }
4124 })
4125 .detach();
4126 });
4127
4128 buffer3.update(cx, |buffer, cx| {
4129 buffer.edit([(0..0, "x")], None, cx);
4130 });
4131 events.lock().clear();
4132 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
4133 .await
4134 .unwrap();
4135 cx.executor().run_until_parked();
4136 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
4137 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
4138}
4139
4140#[gpui::test]
4141async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
4142 init_test(cx);
4143
4144 let (initial_contents, initial_offsets) =
4145 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
4146 let fs = FakeFs::new(cx.executor());
4147 fs.insert_tree(
4148 path!("/dir"),
4149 json!({
4150 "the-file": initial_contents,
4151 }),
4152 )
4153 .await;
4154 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4155 let buffer = project
4156 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
4157 .await
4158 .unwrap();
4159
4160 let anchors = initial_offsets
4161 .iter()
4162 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
4163 .collect::<Vec<_>>();
4164
4165 // Change the file on disk, adding two new lines of text, and removing
4166 // one line.
4167 buffer.update(cx, |buffer, _| {
4168 assert!(!buffer.is_dirty());
4169 assert!(!buffer.has_conflict());
4170 });
4171
4172 let (new_contents, new_offsets) =
4173 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
4174 fs.save(
4175 path!("/dir/the-file").as_ref(),
4176 &new_contents.as_str().into(),
4177 LineEnding::Unix,
4178 )
4179 .await
4180 .unwrap();
4181
4182 // Because the buffer was not modified, it is reloaded from disk. Its
4183 // contents are edited according to the diff between the old and new
4184 // file contents.
4185 cx.executor().run_until_parked();
4186 buffer.update(cx, |buffer, _| {
4187 assert_eq!(buffer.text(), new_contents);
4188 assert!(!buffer.is_dirty());
4189 assert!(!buffer.has_conflict());
4190
4191 let anchor_offsets = anchors
4192 .iter()
4193 .map(|anchor| anchor.to_offset(&*buffer))
4194 .collect::<Vec<_>>();
4195 assert_eq!(anchor_offsets, new_offsets);
4196 });
4197
4198 // Modify the buffer
4199 buffer.update(cx, |buffer, cx| {
4200 buffer.edit([(0..0, " ")], None, cx);
4201 assert!(buffer.is_dirty());
4202 assert!(!buffer.has_conflict());
4203 });
4204
4205 // Change the file on disk again, adding blank lines to the beginning.
4206 fs.save(
4207 path!("/dir/the-file").as_ref(),
4208 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
4209 LineEnding::Unix,
4210 )
4211 .await
4212 .unwrap();
4213
4214 // Because the buffer is modified, it doesn't reload from disk, but is
4215 // marked as having a conflict.
4216 cx.executor().run_until_parked();
4217 buffer.update(cx, |buffer, _| {
4218 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
4219 assert!(buffer.has_conflict());
4220 });
4221}
4222
4223#[gpui::test]
4224async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
4225 init_test(cx);
4226
4227 let fs = FakeFs::new(cx.executor());
4228 fs.insert_tree(
4229 path!("/dir"),
4230 json!({
4231 "file1": "a\nb\nc\n",
4232 "file2": "one\r\ntwo\r\nthree\r\n",
4233 }),
4234 )
4235 .await;
4236
4237 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4238 let buffer1 = project
4239 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4240 .await
4241 .unwrap();
4242 let buffer2 = project
4243 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4244 .await
4245 .unwrap();
4246
4247 buffer1.update(cx, |buffer, _| {
4248 assert_eq!(buffer.text(), "a\nb\nc\n");
4249 assert_eq!(buffer.line_ending(), LineEnding::Unix);
4250 });
4251 buffer2.update(cx, |buffer, _| {
4252 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
4253 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4254 });
4255
4256 // Change a file's line endings on disk from unix to windows. The buffer's
4257 // state updates correctly.
4258 fs.save(
4259 path!("/dir/file1").as_ref(),
4260 &"aaa\nb\nc\n".into(),
4261 LineEnding::Windows,
4262 )
4263 .await
4264 .unwrap();
4265 cx.executor().run_until_parked();
4266 buffer1.update(cx, |buffer, _| {
4267 assert_eq!(buffer.text(), "aaa\nb\nc\n");
4268 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4269 });
4270
4271 // Save a file with windows line endings. The file is written correctly.
4272 buffer2.update(cx, |buffer, cx| {
4273 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
4274 });
4275 project
4276 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
4277 .await
4278 .unwrap();
4279 assert_eq!(
4280 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
4281 "one\r\ntwo\r\nthree\r\nfour\r\n",
4282 );
4283}
4284
4285#[gpui::test]
4286async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
4287 init_test(cx);
4288
4289 let fs = FakeFs::new(cx.executor());
4290 fs.insert_tree(
4291 path!("/dir"),
4292 json!({
4293 "a.rs": "
4294 fn foo(mut v: Vec<usize>) {
4295 for x in &v {
4296 v.push(1);
4297 }
4298 }
4299 "
4300 .unindent(),
4301 }),
4302 )
4303 .await;
4304
4305 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4306 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4307 let buffer = project
4308 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
4309 .await
4310 .unwrap();
4311
4312 let buffer_uri = Url::from_file_path(path!("/dir/a.rs")).unwrap();
4313 let message = lsp::PublishDiagnosticsParams {
4314 uri: buffer_uri.clone(),
4315 diagnostics: vec![
4316 lsp::Diagnostic {
4317 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4318 severity: Some(DiagnosticSeverity::WARNING),
4319 message: "error 1".to_string(),
4320 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4321 location: lsp::Location {
4322 uri: buffer_uri.clone(),
4323 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4324 },
4325 message: "error 1 hint 1".to_string(),
4326 }]),
4327 ..Default::default()
4328 },
4329 lsp::Diagnostic {
4330 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4331 severity: Some(DiagnosticSeverity::HINT),
4332 message: "error 1 hint 1".to_string(),
4333 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4334 location: lsp::Location {
4335 uri: buffer_uri.clone(),
4336 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4337 },
4338 message: "original diagnostic".to_string(),
4339 }]),
4340 ..Default::default()
4341 },
4342 lsp::Diagnostic {
4343 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4344 severity: Some(DiagnosticSeverity::ERROR),
4345 message: "error 2".to_string(),
4346 related_information: Some(vec![
4347 lsp::DiagnosticRelatedInformation {
4348 location: lsp::Location {
4349 uri: buffer_uri.clone(),
4350 range: lsp::Range::new(
4351 lsp::Position::new(1, 13),
4352 lsp::Position::new(1, 15),
4353 ),
4354 },
4355 message: "error 2 hint 1".to_string(),
4356 },
4357 lsp::DiagnosticRelatedInformation {
4358 location: lsp::Location {
4359 uri: buffer_uri.clone(),
4360 range: lsp::Range::new(
4361 lsp::Position::new(1, 13),
4362 lsp::Position::new(1, 15),
4363 ),
4364 },
4365 message: "error 2 hint 2".to_string(),
4366 },
4367 ]),
4368 ..Default::default()
4369 },
4370 lsp::Diagnostic {
4371 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4372 severity: Some(DiagnosticSeverity::HINT),
4373 message: "error 2 hint 1".to_string(),
4374 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4375 location: lsp::Location {
4376 uri: buffer_uri.clone(),
4377 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4378 },
4379 message: "original diagnostic".to_string(),
4380 }]),
4381 ..Default::default()
4382 },
4383 lsp::Diagnostic {
4384 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4385 severity: Some(DiagnosticSeverity::HINT),
4386 message: "error 2 hint 2".to_string(),
4387 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4388 location: lsp::Location {
4389 uri: buffer_uri,
4390 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4391 },
4392 message: "original diagnostic".to_string(),
4393 }]),
4394 ..Default::default()
4395 },
4396 ],
4397 version: None,
4398 };
4399
4400 lsp_store
4401 .update(cx, |lsp_store, cx| {
4402 lsp_store.update_diagnostics(LanguageServerId(0), message, &[], cx)
4403 })
4404 .unwrap();
4405 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
4406
4407 assert_eq!(
4408 buffer
4409 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
4410 .collect::<Vec<_>>(),
4411 &[
4412 DiagnosticEntry {
4413 range: Point::new(1, 8)..Point::new(1, 9),
4414 diagnostic: Diagnostic {
4415 severity: DiagnosticSeverity::WARNING,
4416 message: "error 1".to_string(),
4417 group_id: 1,
4418 is_primary: true,
4419 ..Default::default()
4420 }
4421 },
4422 DiagnosticEntry {
4423 range: Point::new(1, 8)..Point::new(1, 9),
4424 diagnostic: Diagnostic {
4425 severity: DiagnosticSeverity::HINT,
4426 message: "error 1 hint 1".to_string(),
4427 group_id: 1,
4428 is_primary: false,
4429 ..Default::default()
4430 }
4431 },
4432 DiagnosticEntry {
4433 range: Point::new(1, 13)..Point::new(1, 15),
4434 diagnostic: Diagnostic {
4435 severity: DiagnosticSeverity::HINT,
4436 message: "error 2 hint 1".to_string(),
4437 group_id: 0,
4438 is_primary: false,
4439 ..Default::default()
4440 }
4441 },
4442 DiagnosticEntry {
4443 range: Point::new(1, 13)..Point::new(1, 15),
4444 diagnostic: Diagnostic {
4445 severity: DiagnosticSeverity::HINT,
4446 message: "error 2 hint 2".to_string(),
4447 group_id: 0,
4448 is_primary: false,
4449 ..Default::default()
4450 }
4451 },
4452 DiagnosticEntry {
4453 range: Point::new(2, 8)..Point::new(2, 17),
4454 diagnostic: Diagnostic {
4455 severity: DiagnosticSeverity::ERROR,
4456 message: "error 2".to_string(),
4457 group_id: 0,
4458 is_primary: true,
4459 ..Default::default()
4460 }
4461 }
4462 ]
4463 );
4464
4465 assert_eq!(
4466 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4467 &[
4468 DiagnosticEntry {
4469 range: Point::new(1, 13)..Point::new(1, 15),
4470 diagnostic: Diagnostic {
4471 severity: DiagnosticSeverity::HINT,
4472 message: "error 2 hint 1".to_string(),
4473 group_id: 0,
4474 is_primary: false,
4475 ..Default::default()
4476 }
4477 },
4478 DiagnosticEntry {
4479 range: Point::new(1, 13)..Point::new(1, 15),
4480 diagnostic: Diagnostic {
4481 severity: DiagnosticSeverity::HINT,
4482 message: "error 2 hint 2".to_string(),
4483 group_id: 0,
4484 is_primary: false,
4485 ..Default::default()
4486 }
4487 },
4488 DiagnosticEntry {
4489 range: Point::new(2, 8)..Point::new(2, 17),
4490 diagnostic: Diagnostic {
4491 severity: DiagnosticSeverity::ERROR,
4492 message: "error 2".to_string(),
4493 group_id: 0,
4494 is_primary: true,
4495 ..Default::default()
4496 }
4497 }
4498 ]
4499 );
4500
4501 assert_eq!(
4502 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4503 &[
4504 DiagnosticEntry {
4505 range: Point::new(1, 8)..Point::new(1, 9),
4506 diagnostic: Diagnostic {
4507 severity: DiagnosticSeverity::WARNING,
4508 message: "error 1".to_string(),
4509 group_id: 1,
4510 is_primary: true,
4511 ..Default::default()
4512 }
4513 },
4514 DiagnosticEntry {
4515 range: Point::new(1, 8)..Point::new(1, 9),
4516 diagnostic: Diagnostic {
4517 severity: DiagnosticSeverity::HINT,
4518 message: "error 1 hint 1".to_string(),
4519 group_id: 1,
4520 is_primary: false,
4521 ..Default::default()
4522 }
4523 },
4524 ]
4525 );
4526}
4527
4528#[gpui::test]
4529async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
4530 init_test(cx);
4531
4532 let fs = FakeFs::new(cx.executor());
4533 fs.insert_tree(
4534 path!("/dir"),
4535 json!({
4536 "one.rs": "const ONE: usize = 1;",
4537 "two": {
4538 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4539 }
4540
4541 }),
4542 )
4543 .await;
4544 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4545
4546 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4547 language_registry.add(rust_lang());
4548 let watched_paths = lsp::FileOperationRegistrationOptions {
4549 filters: vec![
4550 FileOperationFilter {
4551 scheme: Some("file".to_owned()),
4552 pattern: lsp::FileOperationPattern {
4553 glob: "**/*.rs".to_owned(),
4554 matches: Some(lsp::FileOperationPatternKind::File),
4555 options: None,
4556 },
4557 },
4558 FileOperationFilter {
4559 scheme: Some("file".to_owned()),
4560 pattern: lsp::FileOperationPattern {
4561 glob: "**/**".to_owned(),
4562 matches: Some(lsp::FileOperationPatternKind::Folder),
4563 options: None,
4564 },
4565 },
4566 ],
4567 };
4568 let mut fake_servers = language_registry.register_fake_lsp(
4569 "Rust",
4570 FakeLspAdapter {
4571 capabilities: lsp::ServerCapabilities {
4572 workspace: Some(lsp::WorkspaceServerCapabilities {
4573 workspace_folders: None,
4574 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
4575 did_rename: Some(watched_paths.clone()),
4576 will_rename: Some(watched_paths),
4577 ..Default::default()
4578 }),
4579 }),
4580 ..Default::default()
4581 },
4582 ..Default::default()
4583 },
4584 );
4585
4586 let _ = project
4587 .update(cx, |project, cx| {
4588 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4589 })
4590 .await
4591 .unwrap();
4592
4593 let fake_server = fake_servers.next().await.unwrap();
4594 let response = project.update(cx, |project, cx| {
4595 let worktree = project.worktrees(cx).next().unwrap();
4596 let entry = worktree.read(cx).entry_for_path("one.rs").unwrap();
4597 project.rename_entry(entry.id, "three.rs".as_ref(), cx)
4598 });
4599 let expected_edit = lsp::WorkspaceEdit {
4600 changes: None,
4601 document_changes: Some(DocumentChanges::Edits({
4602 vec![TextDocumentEdit {
4603 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
4604 range: lsp::Range {
4605 start: lsp::Position {
4606 line: 0,
4607 character: 1,
4608 },
4609 end: lsp::Position {
4610 line: 0,
4611 character: 3,
4612 },
4613 },
4614 new_text: "This is not a drill".to_owned(),
4615 })],
4616 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
4617 uri: Url::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
4618 version: Some(1337),
4619 },
4620 }]
4621 })),
4622 change_annotations: None,
4623 };
4624 let resolved_workspace_edit = Arc::new(OnceLock::new());
4625 fake_server
4626 .set_request_handler::<WillRenameFiles, _, _>({
4627 let resolved_workspace_edit = resolved_workspace_edit.clone();
4628 let expected_edit = expected_edit.clone();
4629 move |params, _| {
4630 let resolved_workspace_edit = resolved_workspace_edit.clone();
4631 let expected_edit = expected_edit.clone();
4632 async move {
4633 assert_eq!(params.files.len(), 1);
4634 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4635 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4636 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
4637 Ok(Some(expected_edit))
4638 }
4639 }
4640 })
4641 .next()
4642 .await
4643 .unwrap();
4644 let _ = response.await.unwrap();
4645 fake_server
4646 .handle_notification::<DidRenameFiles, _>(|params, _| {
4647 assert_eq!(params.files.len(), 1);
4648 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4649 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4650 })
4651 .next()
4652 .await
4653 .unwrap();
4654 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
4655}
4656
4657#[gpui::test]
4658async fn test_rename(cx: &mut gpui::TestAppContext) {
4659 // hi
4660 init_test(cx);
4661
4662 let fs = FakeFs::new(cx.executor());
4663 fs.insert_tree(
4664 path!("/dir"),
4665 json!({
4666 "one.rs": "const ONE: usize = 1;",
4667 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4668 }),
4669 )
4670 .await;
4671
4672 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4673
4674 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4675 language_registry.add(rust_lang());
4676 let mut fake_servers = language_registry.register_fake_lsp(
4677 "Rust",
4678 FakeLspAdapter {
4679 capabilities: lsp::ServerCapabilities {
4680 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
4681 prepare_provider: Some(true),
4682 work_done_progress_options: Default::default(),
4683 })),
4684 ..Default::default()
4685 },
4686 ..Default::default()
4687 },
4688 );
4689
4690 let (buffer, _handle) = project
4691 .update(cx, |project, cx| {
4692 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4693 })
4694 .await
4695 .unwrap();
4696
4697 let fake_server = fake_servers.next().await.unwrap();
4698
4699 let response = project.update(cx, |project, cx| {
4700 project.prepare_rename(buffer.clone(), 7, cx)
4701 });
4702 fake_server
4703 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
4704 assert_eq!(
4705 params.text_document.uri.as_str(),
4706 uri!("file:///dir/one.rs")
4707 );
4708 assert_eq!(params.position, lsp::Position::new(0, 7));
4709 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4710 lsp::Position::new(0, 6),
4711 lsp::Position::new(0, 9),
4712 ))))
4713 })
4714 .next()
4715 .await
4716 .unwrap();
4717 let response = response.await.unwrap();
4718 let PrepareRenameResponse::Success(range) = response else {
4719 panic!("{:?}", response);
4720 };
4721 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
4722 assert_eq!(range, 6..9);
4723
4724 let response = project.update(cx, |project, cx| {
4725 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
4726 });
4727 fake_server
4728 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
4729 assert_eq!(
4730 params.text_document_position.text_document.uri.as_str(),
4731 uri!("file:///dir/one.rs")
4732 );
4733 assert_eq!(
4734 params.text_document_position.position,
4735 lsp::Position::new(0, 7)
4736 );
4737 assert_eq!(params.new_name, "THREE");
4738 Ok(Some(lsp::WorkspaceEdit {
4739 changes: Some(
4740 [
4741 (
4742 lsp::Url::from_file_path(path!("/dir/one.rs")).unwrap(),
4743 vec![lsp::TextEdit::new(
4744 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
4745 "THREE".to_string(),
4746 )],
4747 ),
4748 (
4749 lsp::Url::from_file_path(path!("/dir/two.rs")).unwrap(),
4750 vec![
4751 lsp::TextEdit::new(
4752 lsp::Range::new(
4753 lsp::Position::new(0, 24),
4754 lsp::Position::new(0, 27),
4755 ),
4756 "THREE".to_string(),
4757 ),
4758 lsp::TextEdit::new(
4759 lsp::Range::new(
4760 lsp::Position::new(0, 35),
4761 lsp::Position::new(0, 38),
4762 ),
4763 "THREE".to_string(),
4764 ),
4765 ],
4766 ),
4767 ]
4768 .into_iter()
4769 .collect(),
4770 ),
4771 ..Default::default()
4772 }))
4773 })
4774 .next()
4775 .await
4776 .unwrap();
4777 let mut transaction = response.await.unwrap().0;
4778 assert_eq!(transaction.len(), 2);
4779 assert_eq!(
4780 transaction
4781 .remove_entry(&buffer)
4782 .unwrap()
4783 .0
4784 .update(cx, |buffer, _| buffer.text()),
4785 "const THREE: usize = 1;"
4786 );
4787 assert_eq!(
4788 transaction
4789 .into_keys()
4790 .next()
4791 .unwrap()
4792 .update(cx, |buffer, _| buffer.text()),
4793 "const TWO: usize = one::THREE + one::THREE;"
4794 );
4795}
4796
4797#[gpui::test]
4798async fn test_search(cx: &mut gpui::TestAppContext) {
4799 init_test(cx);
4800
4801 let fs = FakeFs::new(cx.executor());
4802 fs.insert_tree(
4803 path!("/dir"),
4804 json!({
4805 "one.rs": "const ONE: usize = 1;",
4806 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
4807 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
4808 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
4809 }),
4810 )
4811 .await;
4812 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4813 assert_eq!(
4814 search(
4815 &project,
4816 SearchQuery::text(
4817 "TWO",
4818 false,
4819 true,
4820 false,
4821 Default::default(),
4822 Default::default(),
4823 None
4824 )
4825 .unwrap(),
4826 cx
4827 )
4828 .await
4829 .unwrap(),
4830 HashMap::from_iter([
4831 (separator!("dir/two.rs").to_string(), vec![6..9]),
4832 (separator!("dir/three.rs").to_string(), vec![37..40])
4833 ])
4834 );
4835
4836 let buffer_4 = project
4837 .update(cx, |project, cx| {
4838 project.open_local_buffer(path!("/dir/four.rs"), cx)
4839 })
4840 .await
4841 .unwrap();
4842 buffer_4.update(cx, |buffer, cx| {
4843 let text = "two::TWO";
4844 buffer.edit([(20..28, text), (31..43, text)], None, cx);
4845 });
4846
4847 assert_eq!(
4848 search(
4849 &project,
4850 SearchQuery::text(
4851 "TWO",
4852 false,
4853 true,
4854 false,
4855 Default::default(),
4856 Default::default(),
4857 None,
4858 )
4859 .unwrap(),
4860 cx
4861 )
4862 .await
4863 .unwrap(),
4864 HashMap::from_iter([
4865 (separator!("dir/two.rs").to_string(), vec![6..9]),
4866 (separator!("dir/three.rs").to_string(), vec![37..40]),
4867 (separator!("dir/four.rs").to_string(), vec![25..28, 36..39])
4868 ])
4869 );
4870}
4871
4872#[gpui::test]
4873async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
4874 init_test(cx);
4875
4876 let search_query = "file";
4877
4878 let fs = FakeFs::new(cx.executor());
4879 fs.insert_tree(
4880 path!("/dir"),
4881 json!({
4882 "one.rs": r#"// Rust file one"#,
4883 "one.ts": r#"// TypeScript file one"#,
4884 "two.rs": r#"// Rust file two"#,
4885 "two.ts": r#"// TypeScript file two"#,
4886 }),
4887 )
4888 .await;
4889 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4890
4891 assert!(
4892 search(
4893 &project,
4894 SearchQuery::text(
4895 search_query,
4896 false,
4897 true,
4898 false,
4899 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4900 Default::default(),
4901 None
4902 )
4903 .unwrap(),
4904 cx
4905 )
4906 .await
4907 .unwrap()
4908 .is_empty(),
4909 "If no inclusions match, no files should be returned"
4910 );
4911
4912 assert_eq!(
4913 search(
4914 &project,
4915 SearchQuery::text(
4916 search_query,
4917 false,
4918 true,
4919 false,
4920 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4921 Default::default(),
4922 None
4923 )
4924 .unwrap(),
4925 cx
4926 )
4927 .await
4928 .unwrap(),
4929 HashMap::from_iter([
4930 (separator!("dir/one.rs").to_string(), vec![8..12]),
4931 (separator!("dir/two.rs").to_string(), vec![8..12]),
4932 ]),
4933 "Rust only search should give only Rust files"
4934 );
4935
4936 assert_eq!(
4937 search(
4938 &project,
4939 SearchQuery::text(
4940 search_query,
4941 false,
4942 true,
4943 false,
4944 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4945 Default::default(),
4946 None,
4947 )
4948 .unwrap(),
4949 cx
4950 )
4951 .await
4952 .unwrap(),
4953 HashMap::from_iter([
4954 (separator!("dir/one.ts").to_string(), vec![14..18]),
4955 (separator!("dir/two.ts").to_string(), vec![14..18]),
4956 ]),
4957 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
4958 );
4959
4960 assert_eq!(
4961 search(
4962 &project,
4963 SearchQuery::text(
4964 search_query,
4965 false,
4966 true,
4967 false,
4968 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
4969 .unwrap(),
4970 Default::default(),
4971 None,
4972 )
4973 .unwrap(),
4974 cx
4975 )
4976 .await
4977 .unwrap(),
4978 HashMap::from_iter([
4979 (separator!("dir/two.ts").to_string(), vec![14..18]),
4980 (separator!("dir/one.rs").to_string(), vec![8..12]),
4981 (separator!("dir/one.ts").to_string(), vec![14..18]),
4982 (separator!("dir/two.rs").to_string(), vec![8..12]),
4983 ]),
4984 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4985 );
4986}
4987
4988#[gpui::test]
4989async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4990 init_test(cx);
4991
4992 let search_query = "file";
4993
4994 let fs = FakeFs::new(cx.executor());
4995 fs.insert_tree(
4996 path!("/dir"),
4997 json!({
4998 "one.rs": r#"// Rust file one"#,
4999 "one.ts": r#"// TypeScript file one"#,
5000 "two.rs": r#"// Rust file two"#,
5001 "two.ts": r#"// TypeScript file two"#,
5002 }),
5003 )
5004 .await;
5005 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5006
5007 assert_eq!(
5008 search(
5009 &project,
5010 SearchQuery::text(
5011 search_query,
5012 false,
5013 true,
5014 false,
5015 Default::default(),
5016 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5017 None,
5018 )
5019 .unwrap(),
5020 cx
5021 )
5022 .await
5023 .unwrap(),
5024 HashMap::from_iter([
5025 (separator!("dir/one.rs").to_string(), vec![8..12]),
5026 (separator!("dir/one.ts").to_string(), vec![14..18]),
5027 (separator!("dir/two.rs").to_string(), vec![8..12]),
5028 (separator!("dir/two.ts").to_string(), vec![14..18]),
5029 ]),
5030 "If no exclusions match, all files should be returned"
5031 );
5032
5033 assert_eq!(
5034 search(
5035 &project,
5036 SearchQuery::text(
5037 search_query,
5038 false,
5039 true,
5040 false,
5041 Default::default(),
5042 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
5043 None,
5044 )
5045 .unwrap(),
5046 cx
5047 )
5048 .await
5049 .unwrap(),
5050 HashMap::from_iter([
5051 (separator!("dir/one.ts").to_string(), vec![14..18]),
5052 (separator!("dir/two.ts").to_string(), vec![14..18]),
5053 ]),
5054 "Rust exclusion search should give only TypeScript files"
5055 );
5056
5057 assert_eq!(
5058 search(
5059 &project,
5060 SearchQuery::text(
5061 search_query,
5062 false,
5063 true,
5064 false,
5065 Default::default(),
5066 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5067 None,
5068 )
5069 .unwrap(),
5070 cx
5071 )
5072 .await
5073 .unwrap(),
5074 HashMap::from_iter([
5075 (separator!("dir/one.rs").to_string(), vec![8..12]),
5076 (separator!("dir/two.rs").to_string(), vec![8..12]),
5077 ]),
5078 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5079 );
5080
5081 assert!(
5082 search(
5083 &project,
5084 SearchQuery::text(
5085 search_query,
5086 false,
5087 true,
5088 false,
5089 Default::default(),
5090 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
5091 .unwrap(),
5092 None,
5093 )
5094 .unwrap(),
5095 cx
5096 )
5097 .await
5098 .unwrap()
5099 .is_empty(),
5100 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5101 );
5102}
5103
5104#[gpui::test]
5105async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
5106 init_test(cx);
5107
5108 let search_query = "file";
5109
5110 let fs = FakeFs::new(cx.executor());
5111 fs.insert_tree(
5112 path!("/dir"),
5113 json!({
5114 "one.rs": r#"// Rust file one"#,
5115 "one.ts": r#"// TypeScript file one"#,
5116 "two.rs": r#"// Rust file two"#,
5117 "two.ts": r#"// TypeScript file two"#,
5118 }),
5119 )
5120 .await;
5121 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5122
5123 assert!(
5124 search(
5125 &project,
5126 SearchQuery::text(
5127 search_query,
5128 false,
5129 true,
5130 false,
5131 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5132 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5133 None,
5134 )
5135 .unwrap(),
5136 cx
5137 )
5138 .await
5139 .unwrap()
5140 .is_empty(),
5141 "If both no exclusions and inclusions match, exclusions should win and return nothing"
5142 );
5143
5144 assert!(
5145 search(
5146 &project,
5147 SearchQuery::text(
5148 search_query,
5149 false,
5150 true,
5151 false,
5152 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5153 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5154 None,
5155 )
5156 .unwrap(),
5157 cx
5158 )
5159 .await
5160 .unwrap()
5161 .is_empty(),
5162 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
5163 );
5164
5165 assert!(
5166 search(
5167 &project,
5168 SearchQuery::text(
5169 search_query,
5170 false,
5171 true,
5172 false,
5173 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5174 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5175 None,
5176 )
5177 .unwrap(),
5178 cx
5179 )
5180 .await
5181 .unwrap()
5182 .is_empty(),
5183 "Non-matching inclusions and exclusions should not change that."
5184 );
5185
5186 assert_eq!(
5187 search(
5188 &project,
5189 SearchQuery::text(
5190 search_query,
5191 false,
5192 true,
5193 false,
5194 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5195 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
5196 None,
5197 )
5198 .unwrap(),
5199 cx
5200 )
5201 .await
5202 .unwrap(),
5203 HashMap::from_iter([
5204 (separator!("dir/one.ts").to_string(), vec![14..18]),
5205 (separator!("dir/two.ts").to_string(), vec![14..18]),
5206 ]),
5207 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
5208 );
5209}
5210
5211#[gpui::test]
5212async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
5213 init_test(cx);
5214
5215 let fs = FakeFs::new(cx.executor());
5216 fs.insert_tree(
5217 path!("/worktree-a"),
5218 json!({
5219 "haystack.rs": r#"// NEEDLE"#,
5220 "haystack.ts": r#"// NEEDLE"#,
5221 }),
5222 )
5223 .await;
5224 fs.insert_tree(
5225 path!("/worktree-b"),
5226 json!({
5227 "haystack.rs": r#"// NEEDLE"#,
5228 "haystack.ts": r#"// NEEDLE"#,
5229 }),
5230 )
5231 .await;
5232
5233 let project = Project::test(
5234 fs.clone(),
5235 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
5236 cx,
5237 )
5238 .await;
5239
5240 assert_eq!(
5241 search(
5242 &project,
5243 SearchQuery::text(
5244 "NEEDLE",
5245 false,
5246 true,
5247 false,
5248 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
5249 Default::default(),
5250 None,
5251 )
5252 .unwrap(),
5253 cx
5254 )
5255 .await
5256 .unwrap(),
5257 HashMap::from_iter([(separator!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
5258 "should only return results from included worktree"
5259 );
5260 assert_eq!(
5261 search(
5262 &project,
5263 SearchQuery::text(
5264 "NEEDLE",
5265 false,
5266 true,
5267 false,
5268 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
5269 Default::default(),
5270 None,
5271 )
5272 .unwrap(),
5273 cx
5274 )
5275 .await
5276 .unwrap(),
5277 HashMap::from_iter([(separator!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
5278 "should only return results from included worktree"
5279 );
5280
5281 assert_eq!(
5282 search(
5283 &project,
5284 SearchQuery::text(
5285 "NEEDLE",
5286 false,
5287 true,
5288 false,
5289 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5290 Default::default(),
5291 None,
5292 )
5293 .unwrap(),
5294 cx
5295 )
5296 .await
5297 .unwrap(),
5298 HashMap::from_iter([
5299 (separator!("worktree-a/haystack.ts").to_string(), vec![3..9]),
5300 (separator!("worktree-b/haystack.ts").to_string(), vec![3..9])
5301 ]),
5302 "should return results from both worktrees"
5303 );
5304}
5305
5306#[gpui::test]
5307async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
5308 init_test(cx);
5309
5310 let fs = FakeFs::new(cx.background_executor.clone());
5311 fs.insert_tree(
5312 path!("/dir"),
5313 json!({
5314 ".git": {},
5315 ".gitignore": "**/target\n/node_modules\n",
5316 "target": {
5317 "index.txt": "index_key:index_value"
5318 },
5319 "node_modules": {
5320 "eslint": {
5321 "index.ts": "const eslint_key = 'eslint value'",
5322 "package.json": r#"{ "some_key": "some value" }"#,
5323 },
5324 "prettier": {
5325 "index.ts": "const prettier_key = 'prettier value'",
5326 "package.json": r#"{ "other_key": "other value" }"#,
5327 },
5328 },
5329 "package.json": r#"{ "main_key": "main value" }"#,
5330 }),
5331 )
5332 .await;
5333 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5334
5335 let query = "key";
5336 assert_eq!(
5337 search(
5338 &project,
5339 SearchQuery::text(
5340 query,
5341 false,
5342 false,
5343 false,
5344 Default::default(),
5345 Default::default(),
5346 None,
5347 )
5348 .unwrap(),
5349 cx
5350 )
5351 .await
5352 .unwrap(),
5353 HashMap::from_iter([(separator!("dir/package.json").to_string(), vec![8..11])]),
5354 "Only one non-ignored file should have the query"
5355 );
5356
5357 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5358 assert_eq!(
5359 search(
5360 &project,
5361 SearchQuery::text(
5362 query,
5363 false,
5364 false,
5365 true,
5366 Default::default(),
5367 Default::default(),
5368 None,
5369 )
5370 .unwrap(),
5371 cx
5372 )
5373 .await
5374 .unwrap(),
5375 HashMap::from_iter([
5376 (separator!("dir/package.json").to_string(), vec![8..11]),
5377 (separator!("dir/target/index.txt").to_string(), vec![6..9]),
5378 (
5379 separator!("dir/node_modules/prettier/package.json").to_string(),
5380 vec![9..12]
5381 ),
5382 (
5383 separator!("dir/node_modules/prettier/index.ts").to_string(),
5384 vec![15..18]
5385 ),
5386 (
5387 separator!("dir/node_modules/eslint/index.ts").to_string(),
5388 vec![13..16]
5389 ),
5390 (
5391 separator!("dir/node_modules/eslint/package.json").to_string(),
5392 vec![8..11]
5393 ),
5394 ]),
5395 "Unrestricted search with ignored directories should find every file with the query"
5396 );
5397
5398 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
5399 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
5400 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5401 assert_eq!(
5402 search(
5403 &project,
5404 SearchQuery::text(
5405 query,
5406 false,
5407 false,
5408 true,
5409 files_to_include,
5410 files_to_exclude,
5411 None,
5412 )
5413 .unwrap(),
5414 cx
5415 )
5416 .await
5417 .unwrap(),
5418 HashMap::from_iter([(
5419 separator!("dir/node_modules/prettier/package.json").to_string(),
5420 vec![9..12]
5421 )]),
5422 "With search including ignored prettier directory and excluding TS files, only one file should be found"
5423 );
5424}
5425
5426#[gpui::test]
5427async fn test_create_entry(cx: &mut gpui::TestAppContext) {
5428 init_test(cx);
5429
5430 let fs = FakeFs::new(cx.executor().clone());
5431 fs.insert_tree(
5432 "/one/two",
5433 json!({
5434 "three": {
5435 "a.txt": "",
5436 "four": {}
5437 },
5438 "c.rs": ""
5439 }),
5440 )
5441 .await;
5442
5443 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
5444 project
5445 .update(cx, |project, cx| {
5446 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5447 project.create_entry((id, "b.."), true, cx)
5448 })
5449 .await
5450 .unwrap()
5451 .to_included()
5452 .unwrap();
5453
5454 // Can't create paths outside the project
5455 let result = project
5456 .update(cx, |project, cx| {
5457 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5458 project.create_entry((id, "../../boop"), true, cx)
5459 })
5460 .await;
5461 assert!(result.is_err());
5462
5463 // Can't create paths with '..'
5464 let result = project
5465 .update(cx, |project, cx| {
5466 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5467 project.create_entry((id, "four/../beep"), true, cx)
5468 })
5469 .await;
5470 assert!(result.is_err());
5471
5472 assert_eq!(
5473 fs.paths(true),
5474 vec![
5475 PathBuf::from(path!("/")),
5476 PathBuf::from(path!("/one")),
5477 PathBuf::from(path!("/one/two")),
5478 PathBuf::from(path!("/one/two/c.rs")),
5479 PathBuf::from(path!("/one/two/three")),
5480 PathBuf::from(path!("/one/two/three/a.txt")),
5481 PathBuf::from(path!("/one/two/three/b..")),
5482 PathBuf::from(path!("/one/two/three/four")),
5483 ]
5484 );
5485
5486 // And we cannot open buffers with '..'
5487 let result = project
5488 .update(cx, |project, cx| {
5489 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5490 project.open_buffer((id, "../c.rs"), cx)
5491 })
5492 .await;
5493 assert!(result.is_err())
5494}
5495
5496#[gpui::test]
5497async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
5498 init_test(cx);
5499
5500 let fs = FakeFs::new(cx.executor());
5501 fs.insert_tree(
5502 path!("/dir"),
5503 json!({
5504 "a.tsx": "a",
5505 }),
5506 )
5507 .await;
5508
5509 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5510
5511 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5512 language_registry.add(tsx_lang());
5513 let language_server_names = [
5514 "TypeScriptServer",
5515 "TailwindServer",
5516 "ESLintServer",
5517 "NoHoverCapabilitiesServer",
5518 ];
5519 let mut language_servers = [
5520 language_registry.register_fake_lsp(
5521 "tsx",
5522 FakeLspAdapter {
5523 name: language_server_names[0],
5524 capabilities: lsp::ServerCapabilities {
5525 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5526 ..lsp::ServerCapabilities::default()
5527 },
5528 ..FakeLspAdapter::default()
5529 },
5530 ),
5531 language_registry.register_fake_lsp(
5532 "tsx",
5533 FakeLspAdapter {
5534 name: language_server_names[1],
5535 capabilities: lsp::ServerCapabilities {
5536 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5537 ..lsp::ServerCapabilities::default()
5538 },
5539 ..FakeLspAdapter::default()
5540 },
5541 ),
5542 language_registry.register_fake_lsp(
5543 "tsx",
5544 FakeLspAdapter {
5545 name: language_server_names[2],
5546 capabilities: lsp::ServerCapabilities {
5547 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5548 ..lsp::ServerCapabilities::default()
5549 },
5550 ..FakeLspAdapter::default()
5551 },
5552 ),
5553 language_registry.register_fake_lsp(
5554 "tsx",
5555 FakeLspAdapter {
5556 name: language_server_names[3],
5557 capabilities: lsp::ServerCapabilities {
5558 hover_provider: None,
5559 ..lsp::ServerCapabilities::default()
5560 },
5561 ..FakeLspAdapter::default()
5562 },
5563 ),
5564 ];
5565
5566 let (buffer, _handle) = project
5567 .update(cx, |p, cx| {
5568 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
5569 })
5570 .await
5571 .unwrap();
5572 cx.executor().run_until_parked();
5573
5574 let mut servers_with_hover_requests = HashMap::default();
5575 for i in 0..language_server_names.len() {
5576 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
5577 panic!(
5578 "Failed to get language server #{i} with name {}",
5579 &language_server_names[i]
5580 )
5581 });
5582 let new_server_name = new_server.server.name();
5583 assert!(
5584 !servers_with_hover_requests.contains_key(&new_server_name),
5585 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5586 );
5587 match new_server_name.as_ref() {
5588 "TailwindServer" | "TypeScriptServer" => {
5589 servers_with_hover_requests.insert(
5590 new_server_name.clone(),
5591 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
5592 move |_, _| {
5593 let name = new_server_name.clone();
5594 async move {
5595 Ok(Some(lsp::Hover {
5596 contents: lsp::HoverContents::Scalar(
5597 lsp::MarkedString::String(format!("{name} hover")),
5598 ),
5599 range: None,
5600 }))
5601 }
5602 },
5603 ),
5604 );
5605 }
5606 "ESLintServer" => {
5607 servers_with_hover_requests.insert(
5608 new_server_name,
5609 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
5610 |_, _| async move { Ok(None) },
5611 ),
5612 );
5613 }
5614 "NoHoverCapabilitiesServer" => {
5615 let _never_handled = new_server
5616 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
5617 panic!(
5618 "Should not call for hovers server with no corresponding capabilities"
5619 )
5620 });
5621 }
5622 unexpected => panic!("Unexpected server name: {unexpected}"),
5623 }
5624 }
5625
5626 let hover_task = project.update(cx, |project, cx| {
5627 project.hover(&buffer, Point::new(0, 0), cx)
5628 });
5629 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
5630 |mut hover_request| async move {
5631 hover_request
5632 .next()
5633 .await
5634 .expect("All hover requests should have been triggered")
5635 },
5636 ))
5637 .await;
5638 assert_eq!(
5639 vec!["TailwindServer hover", "TypeScriptServer hover"],
5640 hover_task
5641 .await
5642 .into_iter()
5643 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5644 .sorted()
5645 .collect::<Vec<_>>(),
5646 "Should receive hover responses from all related servers with hover capabilities"
5647 );
5648}
5649
5650#[gpui::test]
5651async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
5652 init_test(cx);
5653
5654 let fs = FakeFs::new(cx.executor());
5655 fs.insert_tree(
5656 path!("/dir"),
5657 json!({
5658 "a.ts": "a",
5659 }),
5660 )
5661 .await;
5662
5663 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5664
5665 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5666 language_registry.add(typescript_lang());
5667 let mut fake_language_servers = language_registry.register_fake_lsp(
5668 "TypeScript",
5669 FakeLspAdapter {
5670 capabilities: lsp::ServerCapabilities {
5671 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5672 ..lsp::ServerCapabilities::default()
5673 },
5674 ..FakeLspAdapter::default()
5675 },
5676 );
5677
5678 let (buffer, _handle) = project
5679 .update(cx, |p, cx| {
5680 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
5681 })
5682 .await
5683 .unwrap();
5684 cx.executor().run_until_parked();
5685
5686 let fake_server = fake_language_servers
5687 .next()
5688 .await
5689 .expect("failed to get the language server");
5690
5691 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
5692 move |_, _| async move {
5693 Ok(Some(lsp::Hover {
5694 contents: lsp::HoverContents::Array(vec![
5695 lsp::MarkedString::String("".to_string()),
5696 lsp::MarkedString::String(" ".to_string()),
5697 lsp::MarkedString::String("\n\n\n".to_string()),
5698 ]),
5699 range: None,
5700 }))
5701 },
5702 );
5703
5704 let hover_task = project.update(cx, |project, cx| {
5705 project.hover(&buffer, Point::new(0, 0), cx)
5706 });
5707 let () = request_handled
5708 .next()
5709 .await
5710 .expect("All hover requests should have been triggered");
5711 assert_eq!(
5712 Vec::<String>::new(),
5713 hover_task
5714 .await
5715 .into_iter()
5716 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5717 .sorted()
5718 .collect::<Vec<_>>(),
5719 "Empty hover parts should be ignored"
5720 );
5721}
5722
5723#[gpui::test]
5724async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
5725 init_test(cx);
5726
5727 let fs = FakeFs::new(cx.executor());
5728 fs.insert_tree(
5729 path!("/dir"),
5730 json!({
5731 "a.ts": "a",
5732 }),
5733 )
5734 .await;
5735
5736 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5737
5738 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5739 language_registry.add(typescript_lang());
5740 let mut fake_language_servers = language_registry.register_fake_lsp(
5741 "TypeScript",
5742 FakeLspAdapter {
5743 capabilities: lsp::ServerCapabilities {
5744 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5745 ..lsp::ServerCapabilities::default()
5746 },
5747 ..FakeLspAdapter::default()
5748 },
5749 );
5750
5751 let (buffer, _handle) = project
5752 .update(cx, |p, cx| {
5753 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
5754 })
5755 .await
5756 .unwrap();
5757 cx.executor().run_until_parked();
5758
5759 let fake_server = fake_language_servers
5760 .next()
5761 .await
5762 .expect("failed to get the language server");
5763
5764 let mut request_handled = fake_server
5765 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
5766 Ok(Some(vec![
5767 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5768 title: "organize imports".to_string(),
5769 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
5770 ..lsp::CodeAction::default()
5771 }),
5772 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5773 title: "fix code".to_string(),
5774 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
5775 ..lsp::CodeAction::default()
5776 }),
5777 ]))
5778 });
5779
5780 let code_actions_task = project.update(cx, |project, cx| {
5781 project.code_actions(
5782 &buffer,
5783 0..buffer.read(cx).len(),
5784 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
5785 cx,
5786 )
5787 });
5788
5789 let () = request_handled
5790 .next()
5791 .await
5792 .expect("The code action request should have been triggered");
5793
5794 let code_actions = code_actions_task.await.unwrap();
5795 assert_eq!(code_actions.len(), 1);
5796 assert_eq!(
5797 code_actions[0].lsp_action.action_kind(),
5798 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
5799 );
5800}
5801
5802#[gpui::test]
5803async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
5804 init_test(cx);
5805
5806 let fs = FakeFs::new(cx.executor());
5807 fs.insert_tree(
5808 path!("/dir"),
5809 json!({
5810 "a.tsx": "a",
5811 }),
5812 )
5813 .await;
5814
5815 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5816
5817 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5818 language_registry.add(tsx_lang());
5819 let language_server_names = [
5820 "TypeScriptServer",
5821 "TailwindServer",
5822 "ESLintServer",
5823 "NoActionsCapabilitiesServer",
5824 ];
5825
5826 let mut language_server_rxs = [
5827 language_registry.register_fake_lsp(
5828 "tsx",
5829 FakeLspAdapter {
5830 name: language_server_names[0],
5831 capabilities: lsp::ServerCapabilities {
5832 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5833 ..lsp::ServerCapabilities::default()
5834 },
5835 ..FakeLspAdapter::default()
5836 },
5837 ),
5838 language_registry.register_fake_lsp(
5839 "tsx",
5840 FakeLspAdapter {
5841 name: language_server_names[1],
5842 capabilities: lsp::ServerCapabilities {
5843 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5844 ..lsp::ServerCapabilities::default()
5845 },
5846 ..FakeLspAdapter::default()
5847 },
5848 ),
5849 language_registry.register_fake_lsp(
5850 "tsx",
5851 FakeLspAdapter {
5852 name: language_server_names[2],
5853 capabilities: lsp::ServerCapabilities {
5854 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5855 ..lsp::ServerCapabilities::default()
5856 },
5857 ..FakeLspAdapter::default()
5858 },
5859 ),
5860 language_registry.register_fake_lsp(
5861 "tsx",
5862 FakeLspAdapter {
5863 name: language_server_names[3],
5864 capabilities: lsp::ServerCapabilities {
5865 code_action_provider: None,
5866 ..lsp::ServerCapabilities::default()
5867 },
5868 ..FakeLspAdapter::default()
5869 },
5870 ),
5871 ];
5872
5873 let (buffer, _handle) = project
5874 .update(cx, |p, cx| {
5875 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
5876 })
5877 .await
5878 .unwrap();
5879 cx.executor().run_until_parked();
5880
5881 let mut servers_with_actions_requests = HashMap::default();
5882 for i in 0..language_server_names.len() {
5883 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
5884 panic!(
5885 "Failed to get language server #{i} with name {}",
5886 &language_server_names[i]
5887 )
5888 });
5889 let new_server_name = new_server.server.name();
5890
5891 assert!(
5892 !servers_with_actions_requests.contains_key(&new_server_name),
5893 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5894 );
5895 match new_server_name.0.as_ref() {
5896 "TailwindServer" | "TypeScriptServer" => {
5897 servers_with_actions_requests.insert(
5898 new_server_name.clone(),
5899 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
5900 move |_, _| {
5901 let name = new_server_name.clone();
5902 async move {
5903 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
5904 lsp::CodeAction {
5905 title: format!("{name} code action"),
5906 ..lsp::CodeAction::default()
5907 },
5908 )]))
5909 }
5910 },
5911 ),
5912 );
5913 }
5914 "ESLintServer" => {
5915 servers_with_actions_requests.insert(
5916 new_server_name,
5917 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
5918 |_, _| async move { Ok(None) },
5919 ),
5920 );
5921 }
5922 "NoActionsCapabilitiesServer" => {
5923 let _never_handled = new_server
5924 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
5925 panic!(
5926 "Should not call for code actions server with no corresponding capabilities"
5927 )
5928 });
5929 }
5930 unexpected => panic!("Unexpected server name: {unexpected}"),
5931 }
5932 }
5933
5934 let code_actions_task = project.update(cx, |project, cx| {
5935 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
5936 });
5937
5938 // cx.run_until_parked();
5939 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
5940 |mut code_actions_request| async move {
5941 code_actions_request
5942 .next()
5943 .await
5944 .expect("All code actions requests should have been triggered")
5945 },
5946 ))
5947 .await;
5948 assert_eq!(
5949 vec!["TailwindServer code action", "TypeScriptServer code action"],
5950 code_actions_task
5951 .await
5952 .unwrap()
5953 .into_iter()
5954 .map(|code_action| code_action.lsp_action.title().to_owned())
5955 .sorted()
5956 .collect::<Vec<_>>(),
5957 "Should receive code actions responses from all related servers with hover capabilities"
5958 );
5959}
5960
5961#[gpui::test]
5962async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
5963 init_test(cx);
5964
5965 let fs = FakeFs::new(cx.executor());
5966 fs.insert_tree(
5967 "/dir",
5968 json!({
5969 "a.rs": "let a = 1;",
5970 "b.rs": "let b = 2;",
5971 "c.rs": "let c = 2;",
5972 }),
5973 )
5974 .await;
5975
5976 let project = Project::test(
5977 fs,
5978 [
5979 "/dir/a.rs".as_ref(),
5980 "/dir/b.rs".as_ref(),
5981 "/dir/c.rs".as_ref(),
5982 ],
5983 cx,
5984 )
5985 .await;
5986
5987 // check the initial state and get the worktrees
5988 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
5989 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5990 assert_eq!(worktrees.len(), 3);
5991
5992 let worktree_a = worktrees[0].read(cx);
5993 let worktree_b = worktrees[1].read(cx);
5994 let worktree_c = worktrees[2].read(cx);
5995
5996 // check they start in the right order
5997 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
5998 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
5999 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
6000
6001 (
6002 worktrees[0].clone(),
6003 worktrees[1].clone(),
6004 worktrees[2].clone(),
6005 )
6006 });
6007
6008 // move first worktree to after the second
6009 // [a, b, c] -> [b, a, c]
6010 project
6011 .update(cx, |project, cx| {
6012 let first = worktree_a.read(cx);
6013 let second = worktree_b.read(cx);
6014 project.move_worktree(first.id(), second.id(), cx)
6015 })
6016 .expect("moving first after second");
6017
6018 // check the state after moving
6019 project.update(cx, |project, cx| {
6020 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6021 assert_eq!(worktrees.len(), 3);
6022
6023 let first = worktrees[0].read(cx);
6024 let second = worktrees[1].read(cx);
6025 let third = worktrees[2].read(cx);
6026
6027 // check they are now in the right order
6028 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6029 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
6030 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6031 });
6032
6033 // move the second worktree to before the first
6034 // [b, a, c] -> [a, b, c]
6035 project
6036 .update(cx, |project, cx| {
6037 let second = worktree_a.read(cx);
6038 let first = worktree_b.read(cx);
6039 project.move_worktree(first.id(), second.id(), cx)
6040 })
6041 .expect("moving second before first");
6042
6043 // check the state after moving
6044 project.update(cx, |project, cx| {
6045 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6046 assert_eq!(worktrees.len(), 3);
6047
6048 let first = worktrees[0].read(cx);
6049 let second = worktrees[1].read(cx);
6050 let third = worktrees[2].read(cx);
6051
6052 // check they are now in the right order
6053 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6054 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6055 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6056 });
6057
6058 // move the second worktree to after the third
6059 // [a, b, c] -> [a, c, b]
6060 project
6061 .update(cx, |project, cx| {
6062 let second = worktree_b.read(cx);
6063 let third = worktree_c.read(cx);
6064 project.move_worktree(second.id(), third.id(), cx)
6065 })
6066 .expect("moving second after third");
6067
6068 // check the state after moving
6069 project.update(cx, |project, cx| {
6070 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6071 assert_eq!(worktrees.len(), 3);
6072
6073 let first = worktrees[0].read(cx);
6074 let second = worktrees[1].read(cx);
6075 let third = worktrees[2].read(cx);
6076
6077 // check they are now in the right order
6078 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6079 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6080 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
6081 });
6082
6083 // move the third worktree to before the second
6084 // [a, c, b] -> [a, b, c]
6085 project
6086 .update(cx, |project, cx| {
6087 let third = worktree_c.read(cx);
6088 let second = worktree_b.read(cx);
6089 project.move_worktree(third.id(), second.id(), cx)
6090 })
6091 .expect("moving third before second");
6092
6093 // check the state after moving
6094 project.update(cx, |project, cx| {
6095 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6096 assert_eq!(worktrees.len(), 3);
6097
6098 let first = worktrees[0].read(cx);
6099 let second = worktrees[1].read(cx);
6100 let third = worktrees[2].read(cx);
6101
6102 // check they are now in the right order
6103 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6104 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6105 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6106 });
6107
6108 // move the first worktree to after the third
6109 // [a, b, c] -> [b, c, a]
6110 project
6111 .update(cx, |project, cx| {
6112 let first = worktree_a.read(cx);
6113 let third = worktree_c.read(cx);
6114 project.move_worktree(first.id(), third.id(), cx)
6115 })
6116 .expect("moving first after third");
6117
6118 // check the state after moving
6119 project.update(cx, |project, cx| {
6120 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6121 assert_eq!(worktrees.len(), 3);
6122
6123 let first = worktrees[0].read(cx);
6124 let second = worktrees[1].read(cx);
6125 let third = worktrees[2].read(cx);
6126
6127 // check they are now in the right order
6128 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6129 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6130 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
6131 });
6132
6133 // move the third worktree to before the first
6134 // [b, c, a] -> [a, b, c]
6135 project
6136 .update(cx, |project, cx| {
6137 let third = worktree_a.read(cx);
6138 let first = worktree_b.read(cx);
6139 project.move_worktree(third.id(), first.id(), cx)
6140 })
6141 .expect("moving third before first");
6142
6143 // check the state after moving
6144 project.update(cx, |project, cx| {
6145 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6146 assert_eq!(worktrees.len(), 3);
6147
6148 let first = worktrees[0].read(cx);
6149 let second = worktrees[1].read(cx);
6150 let third = worktrees[2].read(cx);
6151
6152 // check they are now in the right order
6153 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6154 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6155 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6156 });
6157}
6158
6159#[gpui::test]
6160async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
6161 init_test(cx);
6162
6163 let staged_contents = r#"
6164 fn main() {
6165 println!("hello world");
6166 }
6167 "#
6168 .unindent();
6169 let file_contents = r#"
6170 // print goodbye
6171 fn main() {
6172 println!("goodbye world");
6173 }
6174 "#
6175 .unindent();
6176
6177 let fs = FakeFs::new(cx.background_executor.clone());
6178 fs.insert_tree(
6179 "/dir",
6180 json!({
6181 ".git": {},
6182 "src": {
6183 "main.rs": file_contents,
6184 }
6185 }),
6186 )
6187 .await;
6188
6189 fs.set_index_for_repo(
6190 Path::new("/dir/.git"),
6191 &[("src/main.rs".into(), staged_contents)],
6192 );
6193
6194 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6195
6196 let buffer = project
6197 .update(cx, |project, cx| {
6198 project.open_local_buffer("/dir/src/main.rs", cx)
6199 })
6200 .await
6201 .unwrap();
6202 let unstaged_diff = project
6203 .update(cx, |project, cx| {
6204 project.open_unstaged_diff(buffer.clone(), cx)
6205 })
6206 .await
6207 .unwrap();
6208
6209 cx.run_until_parked();
6210 unstaged_diff.update(cx, |unstaged_diff, cx| {
6211 let snapshot = buffer.read(cx).snapshot();
6212 assert_hunks(
6213 unstaged_diff.hunks(&snapshot, cx),
6214 &snapshot,
6215 &unstaged_diff.base_text_string().unwrap(),
6216 &[
6217 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
6218 (
6219 2..3,
6220 " println!(\"hello world\");\n",
6221 " println!(\"goodbye world\");\n",
6222 DiffHunkStatus::modified_none(),
6223 ),
6224 ],
6225 );
6226 });
6227
6228 let staged_contents = r#"
6229 // print goodbye
6230 fn main() {
6231 }
6232 "#
6233 .unindent();
6234
6235 fs.set_index_for_repo(
6236 Path::new("/dir/.git"),
6237 &[("src/main.rs".into(), staged_contents)],
6238 );
6239
6240 cx.run_until_parked();
6241 unstaged_diff.update(cx, |unstaged_diff, cx| {
6242 let snapshot = buffer.read(cx).snapshot();
6243 assert_hunks(
6244 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6245 &snapshot,
6246 &unstaged_diff.base_text().text(),
6247 &[(
6248 2..3,
6249 "",
6250 " println!(\"goodbye world\");\n",
6251 DiffHunkStatus::added_none(),
6252 )],
6253 );
6254 });
6255}
6256
6257#[gpui::test]
6258async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
6259 init_test(cx);
6260
6261 let committed_contents = r#"
6262 fn main() {
6263 println!("hello world");
6264 }
6265 "#
6266 .unindent();
6267 let staged_contents = r#"
6268 fn main() {
6269 println!("goodbye world");
6270 }
6271 "#
6272 .unindent();
6273 let file_contents = r#"
6274 // print goodbye
6275 fn main() {
6276 println!("goodbye world");
6277 }
6278 "#
6279 .unindent();
6280
6281 let fs = FakeFs::new(cx.background_executor.clone());
6282 fs.insert_tree(
6283 "/dir",
6284 json!({
6285 ".git": {},
6286 "src": {
6287 "modification.rs": file_contents,
6288 }
6289 }),
6290 )
6291 .await;
6292
6293 fs.set_head_for_repo(
6294 Path::new("/dir/.git"),
6295 &[
6296 ("src/modification.rs".into(), committed_contents),
6297 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6298 ],
6299 );
6300 fs.set_index_for_repo(
6301 Path::new("/dir/.git"),
6302 &[
6303 ("src/modification.rs".into(), staged_contents),
6304 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6305 ],
6306 );
6307
6308 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6309 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6310 let language = rust_lang();
6311 language_registry.add(language.clone());
6312
6313 let buffer_1 = project
6314 .update(cx, |project, cx| {
6315 project.open_local_buffer("/dir/src/modification.rs", cx)
6316 })
6317 .await
6318 .unwrap();
6319 let diff_1 = project
6320 .update(cx, |project, cx| {
6321 project.open_uncommitted_diff(buffer_1.clone(), cx)
6322 })
6323 .await
6324 .unwrap();
6325 diff_1.read_with(cx, |diff, _| {
6326 assert_eq!(diff.base_text().language().cloned(), Some(language))
6327 });
6328 cx.run_until_parked();
6329 diff_1.update(cx, |diff, cx| {
6330 let snapshot = buffer_1.read(cx).snapshot();
6331 assert_hunks(
6332 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6333 &snapshot,
6334 &diff.base_text_string().unwrap(),
6335 &[
6336 (
6337 0..1,
6338 "",
6339 "// print goodbye\n",
6340 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
6341 ),
6342 (
6343 2..3,
6344 " println!(\"hello world\");\n",
6345 " println!(\"goodbye world\");\n",
6346 DiffHunkStatus::modified_none(),
6347 ),
6348 ],
6349 );
6350 });
6351
6352 // Reset HEAD to a version that differs from both the buffer and the index.
6353 let committed_contents = r#"
6354 // print goodbye
6355 fn main() {
6356 }
6357 "#
6358 .unindent();
6359 fs.set_head_for_repo(
6360 Path::new("/dir/.git"),
6361 &[
6362 ("src/modification.rs".into(), committed_contents.clone()),
6363 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6364 ],
6365 );
6366
6367 // Buffer now has an unstaged hunk.
6368 cx.run_until_parked();
6369 diff_1.update(cx, |diff, cx| {
6370 let snapshot = buffer_1.read(cx).snapshot();
6371 assert_hunks(
6372 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6373 &snapshot,
6374 &diff.base_text().text(),
6375 &[(
6376 2..3,
6377 "",
6378 " println!(\"goodbye world\");\n",
6379 DiffHunkStatus::added_none(),
6380 )],
6381 );
6382 });
6383
6384 // Open a buffer for a file that's been deleted.
6385 let buffer_2 = project
6386 .update(cx, |project, cx| {
6387 project.open_local_buffer("/dir/src/deletion.rs", cx)
6388 })
6389 .await
6390 .unwrap();
6391 let diff_2 = project
6392 .update(cx, |project, cx| {
6393 project.open_uncommitted_diff(buffer_2.clone(), cx)
6394 })
6395 .await
6396 .unwrap();
6397 cx.run_until_parked();
6398 diff_2.update(cx, |diff, cx| {
6399 let snapshot = buffer_2.read(cx).snapshot();
6400 assert_hunks(
6401 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6402 &snapshot,
6403 &diff.base_text_string().unwrap(),
6404 &[(
6405 0..0,
6406 "// the-deleted-contents\n",
6407 "",
6408 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
6409 )],
6410 );
6411 });
6412
6413 // Stage the deletion of this file
6414 fs.set_index_for_repo(
6415 Path::new("/dir/.git"),
6416 &[("src/modification.rs".into(), committed_contents.clone())],
6417 );
6418 cx.run_until_parked();
6419 diff_2.update(cx, |diff, cx| {
6420 let snapshot = buffer_2.read(cx).snapshot();
6421 assert_hunks(
6422 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6423 &snapshot,
6424 &diff.base_text_string().unwrap(),
6425 &[(
6426 0..0,
6427 "// the-deleted-contents\n",
6428 "",
6429 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
6430 )],
6431 );
6432 });
6433}
6434
6435#[gpui::test]
6436async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
6437 use DiffHunkSecondaryStatus::*;
6438 init_test(cx);
6439
6440 let committed_contents = r#"
6441 zero
6442 one
6443 two
6444 three
6445 four
6446 five
6447 "#
6448 .unindent();
6449 let file_contents = r#"
6450 one
6451 TWO
6452 three
6453 FOUR
6454 five
6455 "#
6456 .unindent();
6457
6458 let fs = FakeFs::new(cx.background_executor.clone());
6459 fs.insert_tree(
6460 "/dir",
6461 json!({
6462 ".git": {},
6463 "file.txt": file_contents.clone()
6464 }),
6465 )
6466 .await;
6467
6468 fs.set_head_and_index_for_repo(
6469 "/dir/.git".as_ref(),
6470 &[("file.txt".into(), committed_contents.clone())],
6471 );
6472
6473 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6474
6475 let buffer = project
6476 .update(cx, |project, cx| {
6477 project.open_local_buffer("/dir/file.txt", cx)
6478 })
6479 .await
6480 .unwrap();
6481 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6482 let uncommitted_diff = project
6483 .update(cx, |project, cx| {
6484 project.open_uncommitted_diff(buffer.clone(), cx)
6485 })
6486 .await
6487 .unwrap();
6488 let mut diff_events = cx.events(&uncommitted_diff);
6489
6490 // The hunks are initially unstaged.
6491 uncommitted_diff.read_with(cx, |diff, cx| {
6492 assert_hunks(
6493 diff.hunks(&snapshot, cx),
6494 &snapshot,
6495 &diff.base_text_string().unwrap(),
6496 &[
6497 (
6498 0..0,
6499 "zero\n",
6500 "",
6501 DiffHunkStatus::deleted(HasSecondaryHunk),
6502 ),
6503 (
6504 1..2,
6505 "two\n",
6506 "TWO\n",
6507 DiffHunkStatus::modified(HasSecondaryHunk),
6508 ),
6509 (
6510 3..4,
6511 "four\n",
6512 "FOUR\n",
6513 DiffHunkStatus::modified(HasSecondaryHunk),
6514 ),
6515 ],
6516 );
6517 });
6518
6519 // Stage a hunk. It appears as optimistically staged.
6520 uncommitted_diff.update(cx, |diff, cx| {
6521 let range =
6522 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
6523 let hunks = diff
6524 .hunks_intersecting_range(range, &snapshot, cx)
6525 .collect::<Vec<_>>();
6526 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
6527
6528 assert_hunks(
6529 diff.hunks(&snapshot, cx),
6530 &snapshot,
6531 &diff.base_text_string().unwrap(),
6532 &[
6533 (
6534 0..0,
6535 "zero\n",
6536 "",
6537 DiffHunkStatus::deleted(HasSecondaryHunk),
6538 ),
6539 (
6540 1..2,
6541 "two\n",
6542 "TWO\n",
6543 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6544 ),
6545 (
6546 3..4,
6547 "four\n",
6548 "FOUR\n",
6549 DiffHunkStatus::modified(HasSecondaryHunk),
6550 ),
6551 ],
6552 );
6553 });
6554
6555 // The diff emits a change event for the range of the staged hunk.
6556 assert!(matches!(
6557 diff_events.next().await.unwrap(),
6558 BufferDiffEvent::HunksStagedOrUnstaged(_)
6559 ));
6560 let event = diff_events.next().await.unwrap();
6561 if let BufferDiffEvent::DiffChanged {
6562 changed_range: Some(changed_range),
6563 } = event
6564 {
6565 let changed_range = changed_range.to_point(&snapshot);
6566 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
6567 } else {
6568 panic!("Unexpected event {event:?}");
6569 }
6570
6571 // When the write to the index completes, it appears as staged.
6572 cx.run_until_parked();
6573 uncommitted_diff.update(cx, |diff, cx| {
6574 assert_hunks(
6575 diff.hunks(&snapshot, cx),
6576 &snapshot,
6577 &diff.base_text_string().unwrap(),
6578 &[
6579 (
6580 0..0,
6581 "zero\n",
6582 "",
6583 DiffHunkStatus::deleted(HasSecondaryHunk),
6584 ),
6585 (
6586 1..2,
6587 "two\n",
6588 "TWO\n",
6589 DiffHunkStatus::modified(NoSecondaryHunk),
6590 ),
6591 (
6592 3..4,
6593 "four\n",
6594 "FOUR\n",
6595 DiffHunkStatus::modified(HasSecondaryHunk),
6596 ),
6597 ],
6598 );
6599 });
6600
6601 // The diff emits a change event for the changed index text.
6602 let event = diff_events.next().await.unwrap();
6603 if let BufferDiffEvent::DiffChanged {
6604 changed_range: Some(changed_range),
6605 } = event
6606 {
6607 let changed_range = changed_range.to_point(&snapshot);
6608 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
6609 } else {
6610 panic!("Unexpected event {event:?}");
6611 }
6612
6613 // Simulate a problem writing to the git index.
6614 fs.set_error_message_for_index_write(
6615 "/dir/.git".as_ref(),
6616 Some("failed to write git index".into()),
6617 );
6618
6619 // Stage another hunk.
6620 uncommitted_diff.update(cx, |diff, cx| {
6621 let range =
6622 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
6623 let hunks = diff
6624 .hunks_intersecting_range(range, &snapshot, cx)
6625 .collect::<Vec<_>>();
6626 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
6627
6628 assert_hunks(
6629 diff.hunks(&snapshot, cx),
6630 &snapshot,
6631 &diff.base_text_string().unwrap(),
6632 &[
6633 (
6634 0..0,
6635 "zero\n",
6636 "",
6637 DiffHunkStatus::deleted(HasSecondaryHunk),
6638 ),
6639 (
6640 1..2,
6641 "two\n",
6642 "TWO\n",
6643 DiffHunkStatus::modified(NoSecondaryHunk),
6644 ),
6645 (
6646 3..4,
6647 "four\n",
6648 "FOUR\n",
6649 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6650 ),
6651 ],
6652 );
6653 });
6654 assert!(matches!(
6655 diff_events.next().await.unwrap(),
6656 BufferDiffEvent::HunksStagedOrUnstaged(_)
6657 ));
6658 let event = diff_events.next().await.unwrap();
6659 if let BufferDiffEvent::DiffChanged {
6660 changed_range: Some(changed_range),
6661 } = event
6662 {
6663 let changed_range = changed_range.to_point(&snapshot);
6664 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
6665 } else {
6666 panic!("Unexpected event {event:?}");
6667 }
6668
6669 // When the write fails, the hunk returns to being unstaged.
6670 cx.run_until_parked();
6671 uncommitted_diff.update(cx, |diff, cx| {
6672 assert_hunks(
6673 diff.hunks(&snapshot, cx),
6674 &snapshot,
6675 &diff.base_text_string().unwrap(),
6676 &[
6677 (
6678 0..0,
6679 "zero\n",
6680 "",
6681 DiffHunkStatus::deleted(HasSecondaryHunk),
6682 ),
6683 (
6684 1..2,
6685 "two\n",
6686 "TWO\n",
6687 DiffHunkStatus::modified(NoSecondaryHunk),
6688 ),
6689 (
6690 3..4,
6691 "four\n",
6692 "FOUR\n",
6693 DiffHunkStatus::modified(HasSecondaryHunk),
6694 ),
6695 ],
6696 );
6697 });
6698
6699 let event = diff_events.next().await.unwrap();
6700 if let BufferDiffEvent::DiffChanged {
6701 changed_range: Some(changed_range),
6702 } = event
6703 {
6704 let changed_range = changed_range.to_point(&snapshot);
6705 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
6706 } else {
6707 panic!("Unexpected event {event:?}");
6708 }
6709
6710 // Allow writing to the git index to succeed again.
6711 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
6712
6713 // Stage two hunks with separate operations.
6714 uncommitted_diff.update(cx, |diff, cx| {
6715 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
6716 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
6717 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
6718 });
6719
6720 // Both staged hunks appear as pending.
6721 uncommitted_diff.update(cx, |diff, cx| {
6722 assert_hunks(
6723 diff.hunks(&snapshot, cx),
6724 &snapshot,
6725 &diff.base_text_string().unwrap(),
6726 &[
6727 (
6728 0..0,
6729 "zero\n",
6730 "",
6731 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
6732 ),
6733 (
6734 1..2,
6735 "two\n",
6736 "TWO\n",
6737 DiffHunkStatus::modified(NoSecondaryHunk),
6738 ),
6739 (
6740 3..4,
6741 "four\n",
6742 "FOUR\n",
6743 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6744 ),
6745 ],
6746 );
6747 });
6748
6749 // Both staging operations take effect.
6750 cx.run_until_parked();
6751 uncommitted_diff.update(cx, |diff, cx| {
6752 assert_hunks(
6753 diff.hunks(&snapshot, cx),
6754 &snapshot,
6755 &diff.base_text_string().unwrap(),
6756 &[
6757 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
6758 (
6759 1..2,
6760 "two\n",
6761 "TWO\n",
6762 DiffHunkStatus::modified(NoSecondaryHunk),
6763 ),
6764 (
6765 3..4,
6766 "four\n",
6767 "FOUR\n",
6768 DiffHunkStatus::modified(NoSecondaryHunk),
6769 ),
6770 ],
6771 );
6772 });
6773}
6774
6775#[gpui::test(seeds(340, 472))]
6776async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
6777 use DiffHunkSecondaryStatus::*;
6778 init_test(cx);
6779
6780 let committed_contents = r#"
6781 zero
6782 one
6783 two
6784 three
6785 four
6786 five
6787 "#
6788 .unindent();
6789 let file_contents = r#"
6790 one
6791 TWO
6792 three
6793 FOUR
6794 five
6795 "#
6796 .unindent();
6797
6798 let fs = FakeFs::new(cx.background_executor.clone());
6799 fs.insert_tree(
6800 "/dir",
6801 json!({
6802 ".git": {},
6803 "file.txt": file_contents.clone()
6804 }),
6805 )
6806 .await;
6807
6808 fs.set_head_for_repo(
6809 "/dir/.git".as_ref(),
6810 &[("file.txt".into(), committed_contents.clone())],
6811 );
6812 fs.set_index_for_repo(
6813 "/dir/.git".as_ref(),
6814 &[("file.txt".into(), committed_contents.clone())],
6815 );
6816
6817 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6818
6819 let buffer = project
6820 .update(cx, |project, cx| {
6821 project.open_local_buffer("/dir/file.txt", cx)
6822 })
6823 .await
6824 .unwrap();
6825 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6826 let uncommitted_diff = project
6827 .update(cx, |project, cx| {
6828 project.open_uncommitted_diff(buffer.clone(), cx)
6829 })
6830 .await
6831 .unwrap();
6832
6833 // The hunks are initially unstaged.
6834 uncommitted_diff.read_with(cx, |diff, cx| {
6835 assert_hunks(
6836 diff.hunks(&snapshot, cx),
6837 &snapshot,
6838 &diff.base_text_string().unwrap(),
6839 &[
6840 (
6841 0..0,
6842 "zero\n",
6843 "",
6844 DiffHunkStatus::deleted(HasSecondaryHunk),
6845 ),
6846 (
6847 1..2,
6848 "two\n",
6849 "TWO\n",
6850 DiffHunkStatus::modified(HasSecondaryHunk),
6851 ),
6852 (
6853 3..4,
6854 "four\n",
6855 "FOUR\n",
6856 DiffHunkStatus::modified(HasSecondaryHunk),
6857 ),
6858 ],
6859 );
6860 });
6861
6862 // Pause IO events
6863 fs.pause_events();
6864
6865 // Stage the first hunk.
6866 uncommitted_diff.update(cx, |diff, cx| {
6867 let hunk = diff.hunks(&snapshot, cx).next().unwrap();
6868 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
6869 assert_hunks(
6870 diff.hunks(&snapshot, cx),
6871 &snapshot,
6872 &diff.base_text_string().unwrap(),
6873 &[
6874 (
6875 0..0,
6876 "zero\n",
6877 "",
6878 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
6879 ),
6880 (
6881 1..2,
6882 "two\n",
6883 "TWO\n",
6884 DiffHunkStatus::modified(HasSecondaryHunk),
6885 ),
6886 (
6887 3..4,
6888 "four\n",
6889 "FOUR\n",
6890 DiffHunkStatus::modified(HasSecondaryHunk),
6891 ),
6892 ],
6893 );
6894 });
6895
6896 // Stage the second hunk *before* receiving the FS event for the first hunk.
6897 cx.run_until_parked();
6898 uncommitted_diff.update(cx, |diff, cx| {
6899 let hunk = diff.hunks(&snapshot, cx).nth(1).unwrap();
6900 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
6901 assert_hunks(
6902 diff.hunks(&snapshot, cx),
6903 &snapshot,
6904 &diff.base_text_string().unwrap(),
6905 &[
6906 (
6907 0..0,
6908 "zero\n",
6909 "",
6910 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
6911 ),
6912 (
6913 1..2,
6914 "two\n",
6915 "TWO\n",
6916 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6917 ),
6918 (
6919 3..4,
6920 "four\n",
6921 "FOUR\n",
6922 DiffHunkStatus::modified(HasSecondaryHunk),
6923 ),
6924 ],
6925 );
6926 });
6927
6928 // Process the FS event for staging the first hunk (second event is still pending).
6929 fs.flush_events(1);
6930 cx.run_until_parked();
6931
6932 // Stage the third hunk before receiving the second FS event.
6933 uncommitted_diff.update(cx, |diff, cx| {
6934 let hunk = diff.hunks(&snapshot, cx).nth(2).unwrap();
6935 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
6936 });
6937
6938 // Wait for all remaining IO.
6939 cx.run_until_parked();
6940 fs.flush_events(fs.buffered_event_count());
6941
6942 // Now all hunks are staged.
6943 cx.run_until_parked();
6944 uncommitted_diff.update(cx, |diff, cx| {
6945 assert_hunks(
6946 diff.hunks(&snapshot, cx),
6947 &snapshot,
6948 &diff.base_text_string().unwrap(),
6949 &[
6950 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
6951 (
6952 1..2,
6953 "two\n",
6954 "TWO\n",
6955 DiffHunkStatus::modified(NoSecondaryHunk),
6956 ),
6957 (
6958 3..4,
6959 "four\n",
6960 "FOUR\n",
6961 DiffHunkStatus::modified(NoSecondaryHunk),
6962 ),
6963 ],
6964 );
6965 });
6966}
6967
6968#[gpui::test]
6969async fn test_staging_lots_of_hunks_fast(cx: &mut gpui::TestAppContext) {
6970 use DiffHunkSecondaryStatus::*;
6971 init_test(cx);
6972
6973 let different_lines = (0..500)
6974 .step_by(5)
6975 .map(|i| format!("diff {}\n", i))
6976 .collect::<Vec<String>>();
6977 let committed_contents = (0..500).map(|i| format!("{}\n", i)).collect::<String>();
6978 let file_contents = (0..500)
6979 .map(|i| {
6980 if i % 5 == 0 {
6981 different_lines[i / 5].clone()
6982 } else {
6983 format!("{}\n", i)
6984 }
6985 })
6986 .collect::<String>();
6987
6988 let fs = FakeFs::new(cx.background_executor.clone());
6989 fs.insert_tree(
6990 "/dir",
6991 json!({
6992 ".git": {},
6993 "file.txt": file_contents.clone()
6994 }),
6995 )
6996 .await;
6997
6998 fs.set_head_for_repo(
6999 "/dir/.git".as_ref(),
7000 &[("file.txt".into(), committed_contents.clone())],
7001 );
7002 fs.set_index_for_repo(
7003 "/dir/.git".as_ref(),
7004 &[("file.txt".into(), committed_contents.clone())],
7005 );
7006
7007 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7008
7009 let buffer = project
7010 .update(cx, |project, cx| {
7011 project.open_local_buffer("/dir/file.txt", cx)
7012 })
7013 .await
7014 .unwrap();
7015 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7016 let uncommitted_diff = project
7017 .update(cx, |project, cx| {
7018 project.open_uncommitted_diff(buffer.clone(), cx)
7019 })
7020 .await
7021 .unwrap();
7022
7023 let mut expected_hunks: Vec<(Range<u32>, String, String, DiffHunkStatus)> = (0..500)
7024 .step_by(5)
7025 .map(|i| {
7026 (
7027 i as u32..i as u32 + 1,
7028 format!("{}\n", i),
7029 different_lines[i / 5].clone(),
7030 DiffHunkStatus::modified(HasSecondaryHunk),
7031 )
7032 })
7033 .collect();
7034
7035 // The hunks are initially unstaged
7036 uncommitted_diff.read_with(cx, |diff, cx| {
7037 assert_hunks(
7038 diff.hunks(&snapshot, cx),
7039 &snapshot,
7040 &diff.base_text_string().unwrap(),
7041 &expected_hunks,
7042 );
7043 });
7044
7045 for (_, _, _, status) in expected_hunks.iter_mut() {
7046 *status = DiffHunkStatus::modified(SecondaryHunkRemovalPending);
7047 }
7048
7049 // Stage every hunk with a different call
7050 uncommitted_diff.update(cx, |diff, cx| {
7051 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
7052 for hunk in hunks {
7053 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7054 }
7055
7056 assert_hunks(
7057 diff.hunks(&snapshot, cx),
7058 &snapshot,
7059 &diff.base_text_string().unwrap(),
7060 &expected_hunks,
7061 );
7062 });
7063
7064 // If we wait, we'll have no pending hunks
7065 cx.run_until_parked();
7066 for (_, _, _, status) in expected_hunks.iter_mut() {
7067 *status = DiffHunkStatus::modified(NoSecondaryHunk);
7068 }
7069
7070 uncommitted_diff.update(cx, |diff, cx| {
7071 assert_hunks(
7072 diff.hunks(&snapshot, cx),
7073 &snapshot,
7074 &diff.base_text_string().unwrap(),
7075 &expected_hunks,
7076 );
7077 });
7078
7079 for (_, _, _, status) in expected_hunks.iter_mut() {
7080 *status = DiffHunkStatus::modified(SecondaryHunkAdditionPending);
7081 }
7082
7083 // Unstage every hunk with a different call
7084 uncommitted_diff.update(cx, |diff, cx| {
7085 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
7086 for hunk in hunks {
7087 diff.stage_or_unstage_hunks(false, &[hunk], &snapshot, true, cx);
7088 }
7089
7090 assert_hunks(
7091 diff.hunks(&snapshot, cx),
7092 &snapshot,
7093 &diff.base_text_string().unwrap(),
7094 &expected_hunks,
7095 );
7096 });
7097
7098 // If we wait, we'll have no pending hunks, again
7099 cx.run_until_parked();
7100 for (_, _, _, status) in expected_hunks.iter_mut() {
7101 *status = DiffHunkStatus::modified(HasSecondaryHunk);
7102 }
7103
7104 uncommitted_diff.update(cx, |diff, cx| {
7105 assert_hunks(
7106 diff.hunks(&snapshot, cx),
7107 &snapshot,
7108 &diff.base_text_string().unwrap(),
7109 &expected_hunks,
7110 );
7111 });
7112}
7113
7114#[gpui::test]
7115async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
7116 init_test(cx);
7117
7118 let committed_contents = r#"
7119 fn main() {
7120 println!("hello from HEAD");
7121 }
7122 "#
7123 .unindent();
7124 let file_contents = r#"
7125 fn main() {
7126 println!("hello from the working copy");
7127 }
7128 "#
7129 .unindent();
7130
7131 let fs = FakeFs::new(cx.background_executor.clone());
7132 fs.insert_tree(
7133 "/dir",
7134 json!({
7135 ".git": {},
7136 "src": {
7137 "main.rs": file_contents,
7138 }
7139 }),
7140 )
7141 .await;
7142
7143 fs.set_head_for_repo(
7144 Path::new("/dir/.git"),
7145 &[("src/main.rs".into(), committed_contents.clone())],
7146 );
7147 fs.set_index_for_repo(
7148 Path::new("/dir/.git"),
7149 &[("src/main.rs".into(), committed_contents.clone())],
7150 );
7151
7152 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
7153
7154 let buffer = project
7155 .update(cx, |project, cx| {
7156 project.open_local_buffer("/dir/src/main.rs", cx)
7157 })
7158 .await
7159 .unwrap();
7160 let uncommitted_diff = project
7161 .update(cx, |project, cx| {
7162 project.open_uncommitted_diff(buffer.clone(), cx)
7163 })
7164 .await
7165 .unwrap();
7166
7167 cx.run_until_parked();
7168 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
7169 let snapshot = buffer.read(cx).snapshot();
7170 assert_hunks(
7171 uncommitted_diff.hunks(&snapshot, cx),
7172 &snapshot,
7173 &uncommitted_diff.base_text_string().unwrap(),
7174 &[(
7175 1..2,
7176 " println!(\"hello from HEAD\");\n",
7177 " println!(\"hello from the working copy\");\n",
7178 DiffHunkStatus {
7179 kind: DiffHunkStatusKind::Modified,
7180 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
7181 },
7182 )],
7183 );
7184 });
7185}
7186
7187#[gpui::test]
7188async fn test_repository_and_path_for_project_path(
7189 background_executor: BackgroundExecutor,
7190 cx: &mut gpui::TestAppContext,
7191) {
7192 init_test(cx);
7193 let fs = FakeFs::new(background_executor);
7194 fs.insert_tree(
7195 path!("/root"),
7196 json!({
7197 "c.txt": "",
7198 "dir1": {
7199 ".git": {},
7200 "deps": {
7201 "dep1": {
7202 ".git": {},
7203 "src": {
7204 "a.txt": ""
7205 }
7206 }
7207 },
7208 "src": {
7209 "b.txt": ""
7210 }
7211 },
7212 }),
7213 )
7214 .await;
7215
7216 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
7217 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7218 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7219 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
7220 .await;
7221 tree.flush_fs_events(cx).await;
7222
7223 project.read_with(cx, |project, cx| {
7224 let git_store = project.git_store().read(cx);
7225 let pairs = [
7226 ("c.txt", None),
7227 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
7228 (
7229 "dir1/deps/dep1/src/a.txt",
7230 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
7231 ),
7232 ];
7233 let expected = pairs
7234 .iter()
7235 .map(|(path, result)| {
7236 (
7237 path,
7238 result.map(|(repo, repo_path)| {
7239 (Path::new(repo).into(), RepoPath::from(repo_path))
7240 }),
7241 )
7242 })
7243 .collect::<Vec<_>>();
7244 let actual = pairs
7245 .iter()
7246 .map(|(path, _)| {
7247 let project_path = (tree_id, Path::new(path)).into();
7248 let result = maybe!({
7249 let (repo, repo_path) =
7250 git_store.repository_and_path_for_project_path(&project_path, cx)?;
7251 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
7252 });
7253 (path, result)
7254 })
7255 .collect::<Vec<_>>();
7256 pretty_assertions::assert_eq!(expected, actual);
7257 });
7258
7259 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
7260 .await
7261 .unwrap();
7262 tree.flush_fs_events(cx).await;
7263
7264 project.read_with(cx, |project, cx| {
7265 let git_store = project.git_store().read(cx);
7266 assert_eq!(
7267 git_store.repository_and_path_for_project_path(
7268 &(tree_id, Path::new("dir1/src/b.txt")).into(),
7269 cx
7270 ),
7271 None
7272 );
7273 });
7274}
7275
7276#[gpui::test]
7277async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
7278 init_test(cx);
7279 let fs = FakeFs::new(cx.background_executor.clone());
7280 fs.insert_tree(
7281 path!("/root"),
7282 json!({
7283 "home": {
7284 ".git": {},
7285 "project": {
7286 "a.txt": "A"
7287 },
7288 },
7289 }),
7290 )
7291 .await;
7292 fs.set_home_dir(Path::new(path!("/root/home")).to_owned());
7293
7294 let project = Project::test(fs.clone(), [path!("/root/home/project").as_ref()], cx).await;
7295 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7296 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7297 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
7298 .await;
7299 tree.flush_fs_events(cx).await;
7300
7301 project.read_with(cx, |project, cx| {
7302 let containing = project
7303 .git_store()
7304 .read(cx)
7305 .repository_and_path_for_project_path(&(tree_id, "a.txt").into(), cx);
7306 assert!(containing.is_none());
7307 });
7308
7309 let project = Project::test(fs.clone(), [path!("/root/home").as_ref()], cx).await;
7310 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7311 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7312 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
7313 .await;
7314 tree.flush_fs_events(cx).await;
7315
7316 project.read_with(cx, |project, cx| {
7317 let containing = project
7318 .git_store()
7319 .read(cx)
7320 .repository_and_path_for_project_path(&(tree_id, "project/a.txt").into(), cx);
7321 assert_eq!(
7322 containing
7323 .unwrap()
7324 .0
7325 .read(cx)
7326 .work_directory_abs_path
7327 .as_ref(),
7328 Path::new(path!("/root/home"))
7329 );
7330 });
7331}
7332
7333#[gpui::test]
7334async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
7335 init_test(cx);
7336 cx.executor().allow_parking();
7337
7338 let root = TempTree::new(json!({
7339 "project": {
7340 "a.txt": "a", // Modified
7341 "b.txt": "bb", // Added
7342 "c.txt": "ccc", // Unchanged
7343 "d.txt": "dddd", // Deleted
7344 },
7345 }));
7346
7347 // Set up git repository before creating the project.
7348 let work_dir = root.path().join("project");
7349 let repo = git_init(work_dir.as_path());
7350 git_add("a.txt", &repo);
7351 git_add("c.txt", &repo);
7352 git_add("d.txt", &repo);
7353 git_commit("Initial commit", &repo);
7354 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
7355 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
7356
7357 let project = Project::test(
7358 Arc::new(RealFs::new(None, cx.executor())),
7359 [root.path()],
7360 cx,
7361 )
7362 .await;
7363
7364 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7365 tree.flush_fs_events(cx).await;
7366 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7367 .await;
7368 cx.executor().run_until_parked();
7369
7370 let repository = project.read_with(cx, |project, cx| {
7371 project.repositories(cx).values().next().unwrap().clone()
7372 });
7373
7374 // Check that the right git state is observed on startup
7375 repository.read_with(cx, |repository, _| {
7376 let entries = repository.cached_status().collect::<Vec<_>>();
7377 assert_eq!(
7378 entries,
7379 [
7380 StatusEntry {
7381 repo_path: "a.txt".into(),
7382 status: StatusCode::Modified.worktree(),
7383 },
7384 StatusEntry {
7385 repo_path: "b.txt".into(),
7386 status: FileStatus::Untracked,
7387 },
7388 StatusEntry {
7389 repo_path: "d.txt".into(),
7390 status: StatusCode::Deleted.worktree(),
7391 },
7392 ]
7393 );
7394 });
7395
7396 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
7397
7398 tree.flush_fs_events(cx).await;
7399 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7400 .await;
7401 cx.executor().run_until_parked();
7402
7403 repository.read_with(cx, |repository, _| {
7404 let entries = repository.cached_status().collect::<Vec<_>>();
7405 assert_eq!(
7406 entries,
7407 [
7408 StatusEntry {
7409 repo_path: "a.txt".into(),
7410 status: StatusCode::Modified.worktree(),
7411 },
7412 StatusEntry {
7413 repo_path: "b.txt".into(),
7414 status: FileStatus::Untracked,
7415 },
7416 StatusEntry {
7417 repo_path: "c.txt".into(),
7418 status: StatusCode::Modified.worktree(),
7419 },
7420 StatusEntry {
7421 repo_path: "d.txt".into(),
7422 status: StatusCode::Deleted.worktree(),
7423 },
7424 ]
7425 );
7426 });
7427
7428 git_add("a.txt", &repo);
7429 git_add("c.txt", &repo);
7430 git_remove_index(Path::new("d.txt"), &repo);
7431 git_commit("Another commit", &repo);
7432 tree.flush_fs_events(cx).await;
7433 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7434 .await;
7435 cx.executor().run_until_parked();
7436
7437 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
7438 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
7439 tree.flush_fs_events(cx).await;
7440 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7441 .await;
7442 cx.executor().run_until_parked();
7443
7444 repository.read_with(cx, |repository, _cx| {
7445 let entries = repository.cached_status().collect::<Vec<_>>();
7446
7447 // Deleting an untracked entry, b.txt, should leave no status
7448 // a.txt was tracked, and so should have a status
7449 assert_eq!(
7450 entries,
7451 [StatusEntry {
7452 repo_path: "a.txt".into(),
7453 status: StatusCode::Deleted.worktree(),
7454 }]
7455 );
7456 });
7457}
7458
7459#[gpui::test]
7460async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
7461 init_test(cx);
7462 cx.executor().allow_parking();
7463
7464 let root = TempTree::new(json!({
7465 "project": {
7466 "sub": {},
7467 "a.txt": "",
7468 },
7469 }));
7470
7471 let work_dir = root.path().join("project");
7472 let repo = git_init(work_dir.as_path());
7473 // a.txt exists in HEAD and the working copy but is deleted in the index.
7474 git_add("a.txt", &repo);
7475 git_commit("Initial commit", &repo);
7476 git_remove_index("a.txt".as_ref(), &repo);
7477 // `sub` is a nested git repository.
7478 let _sub = git_init(&work_dir.join("sub"));
7479
7480 let project = Project::test(
7481 Arc::new(RealFs::new(None, cx.executor())),
7482 [root.path()],
7483 cx,
7484 )
7485 .await;
7486
7487 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7488 tree.flush_fs_events(cx).await;
7489 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7490 .await;
7491 cx.executor().run_until_parked();
7492
7493 let repository = project.read_with(cx, |project, cx| {
7494 project
7495 .repositories(cx)
7496 .values()
7497 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
7498 .unwrap()
7499 .clone()
7500 });
7501
7502 repository.read_with(cx, |repository, _cx| {
7503 let entries = repository.cached_status().collect::<Vec<_>>();
7504
7505 // `sub` doesn't appear in our computed statuses.
7506 // a.txt appears with a combined `DA` status.
7507 assert_eq!(
7508 entries,
7509 [StatusEntry {
7510 repo_path: "a.txt".into(),
7511 status: TrackedStatus {
7512 index_status: StatusCode::Deleted,
7513 worktree_status: StatusCode::Added
7514 }
7515 .into(),
7516 }]
7517 )
7518 });
7519}
7520
7521#[gpui::test]
7522async fn test_repository_subfolder_git_status(cx: &mut gpui::TestAppContext) {
7523 init_test(cx);
7524 cx.executor().allow_parking();
7525
7526 let root = TempTree::new(json!({
7527 "my-repo": {
7528 // .git folder will go here
7529 "a.txt": "a",
7530 "sub-folder-1": {
7531 "sub-folder-2": {
7532 "c.txt": "cc",
7533 "d": {
7534 "e.txt": "eee"
7535 }
7536 },
7537 }
7538 },
7539 }));
7540
7541 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
7542 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
7543
7544 // Set up git repository before creating the worktree.
7545 let git_repo_work_dir = root.path().join("my-repo");
7546 let repo = git_init(git_repo_work_dir.as_path());
7547 git_add(C_TXT, &repo);
7548 git_commit("Initial commit", &repo);
7549
7550 // Open the worktree in subfolder
7551 let project_root = Path::new("my-repo/sub-folder-1/sub-folder-2");
7552
7553 let project = Project::test(
7554 Arc::new(RealFs::new(None, cx.executor())),
7555 [root.path().join(project_root).as_path()],
7556 cx,
7557 )
7558 .await;
7559
7560 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7561 tree.flush_fs_events(cx).await;
7562 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7563 .await;
7564 cx.executor().run_until_parked();
7565
7566 let repository = project.read_with(cx, |project, cx| {
7567 project.repositories(cx).values().next().unwrap().clone()
7568 });
7569
7570 // Ensure that the git status is loaded correctly
7571 repository.read_with(cx, |repository, _cx| {
7572 assert_eq!(
7573 repository.work_directory_abs_path.canonicalize().unwrap(),
7574 root.path().join("my-repo").canonicalize().unwrap()
7575 );
7576
7577 assert_eq!(repository.status_for_path(&C_TXT.into()), None);
7578 assert_eq!(
7579 repository.status_for_path(&E_TXT.into()).unwrap().status,
7580 FileStatus::Untracked
7581 );
7582 });
7583
7584 // Now we simulate FS events, but ONLY in the .git folder that's outside
7585 // of out project root.
7586 // Meaning: we don't produce any FS events for files inside the project.
7587 git_add(E_TXT, &repo);
7588 git_commit("Second commit", &repo);
7589 tree.flush_fs_events_in_root_git_repository(cx).await;
7590 cx.executor().run_until_parked();
7591
7592 repository.read_with(cx, |repository, _cx| {
7593 assert_eq!(repository.status_for_path(&C_TXT.into()), None);
7594 assert_eq!(repository.status_for_path(&E_TXT.into()), None);
7595 });
7596}
7597
7598// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
7599#[cfg(any())]
7600#[gpui::test]
7601async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
7602 init_test(cx);
7603 cx.executor().allow_parking();
7604
7605 let root = TempTree::new(json!({
7606 "project": {
7607 "a.txt": "a",
7608 },
7609 }));
7610 let root_path = root.path();
7611
7612 let repo = git_init(&root_path.join("project"));
7613 git_add("a.txt", &repo);
7614 git_commit("init", &repo);
7615
7616 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
7617
7618 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7619 tree.flush_fs_events(cx).await;
7620 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7621 .await;
7622 cx.executor().run_until_parked();
7623
7624 let repository = project.read_with(cx, |project, cx| {
7625 project.repositories(cx).values().next().unwrap().clone()
7626 });
7627
7628 git_branch("other-branch", &repo);
7629 git_checkout("refs/heads/other-branch", &repo);
7630 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
7631 git_add("a.txt", &repo);
7632 git_commit("capitalize", &repo);
7633 let commit = repo
7634 .head()
7635 .expect("Failed to get HEAD")
7636 .peel_to_commit()
7637 .expect("HEAD is not a commit");
7638 git_checkout("refs/heads/main", &repo);
7639 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
7640 git_add("a.txt", &repo);
7641 git_commit("improve letter", &repo);
7642 git_cherry_pick(&commit, &repo);
7643 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
7644 .expect("No CHERRY_PICK_HEAD");
7645 pretty_assertions::assert_eq!(
7646 git_status(&repo),
7647 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
7648 );
7649 tree.flush_fs_events(cx).await;
7650 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7651 .await;
7652 cx.executor().run_until_parked();
7653 let conflicts = repository.update(cx, |repository, _| {
7654 repository
7655 .merge_conflicts
7656 .iter()
7657 .cloned()
7658 .collect::<Vec<_>>()
7659 });
7660 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
7661
7662 git_add("a.txt", &repo);
7663 // Attempt to manually simulate what `git cherry-pick --continue` would do.
7664 git_commit("whatevs", &repo);
7665 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
7666 .expect("Failed to remove CHERRY_PICK_HEAD");
7667 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
7668 tree.flush_fs_events(cx).await;
7669 let conflicts = repository.update(cx, |repository, _| {
7670 repository
7671 .merge_conflicts
7672 .iter()
7673 .cloned()
7674 .collect::<Vec<_>>()
7675 });
7676 pretty_assertions::assert_eq!(conflicts, []);
7677}
7678
7679#[gpui::test]
7680async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
7681 init_test(cx);
7682 let fs = FakeFs::new(cx.background_executor.clone());
7683 fs.insert_tree(
7684 path!("/root"),
7685 json!({
7686 ".git": {},
7687 ".gitignore": "*.txt\n",
7688 "a.xml": "<a></a>",
7689 "b.txt": "Some text"
7690 }),
7691 )
7692 .await;
7693
7694 fs.set_head_and_index_for_repo(
7695 path!("/root/.git").as_ref(),
7696 &[
7697 (".gitignore".into(), "*.txt\n".into()),
7698 ("a.xml".into(), "<a></a>".into()),
7699 ],
7700 );
7701
7702 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
7703
7704 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7705 tree.flush_fs_events(cx).await;
7706 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7707 .await;
7708 cx.executor().run_until_parked();
7709
7710 let repository = project.read_with(cx, |project, cx| {
7711 project.repositories(cx).values().next().unwrap().clone()
7712 });
7713
7714 // One file is unmodified, the other is ignored.
7715 cx.read(|cx| {
7716 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
7717 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
7718 });
7719
7720 // Change the gitignore, and stage the newly non-ignored file.
7721 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
7722 .await
7723 .unwrap();
7724 fs.set_index_for_repo(
7725 Path::new(path!("/root/.git")),
7726 &[
7727 (".gitignore".into(), "*.txt\n".into()),
7728 ("a.xml".into(), "<a></a>".into()),
7729 ("b.txt".into(), "Some text".into()),
7730 ],
7731 );
7732
7733 cx.executor().run_until_parked();
7734 cx.read(|cx| {
7735 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
7736 assert_entry_git_state(
7737 tree.read(cx),
7738 repository.read(cx),
7739 "b.txt",
7740 Some(StatusCode::Added),
7741 false,
7742 );
7743 });
7744}
7745
7746// NOTE:
7747// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
7748// a directory which some program has already open.
7749// This is a limitation of the Windows.
7750// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
7751#[gpui::test]
7752#[cfg_attr(target_os = "windows", ignore)]
7753async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
7754 init_test(cx);
7755 cx.executor().allow_parking();
7756 let root = TempTree::new(json!({
7757 "projects": {
7758 "project1": {
7759 "a": "",
7760 "b": "",
7761 }
7762 },
7763
7764 }));
7765 let root_path = root.path();
7766
7767 let repo = git_init(&root_path.join("projects/project1"));
7768 git_add("a", &repo);
7769 git_commit("init", &repo);
7770 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
7771
7772 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
7773
7774 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7775 tree.flush_fs_events(cx).await;
7776 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7777 .await;
7778 cx.executor().run_until_parked();
7779
7780 let repository = project.read_with(cx, |project, cx| {
7781 project.repositories(cx).values().next().unwrap().clone()
7782 });
7783
7784 repository.read_with(cx, |repository, _| {
7785 assert_eq!(
7786 repository.work_directory_abs_path.as_ref(),
7787 root_path.join("projects/project1").as_path()
7788 );
7789 assert_eq!(
7790 repository
7791 .status_for_path(&"a".into())
7792 .map(|entry| entry.status),
7793 Some(StatusCode::Modified.worktree()),
7794 );
7795 assert_eq!(
7796 repository
7797 .status_for_path(&"b".into())
7798 .map(|entry| entry.status),
7799 Some(FileStatus::Untracked),
7800 );
7801 });
7802
7803 std::fs::rename(
7804 root_path.join("projects/project1"),
7805 root_path.join("projects/project2"),
7806 )
7807 .unwrap();
7808 tree.flush_fs_events(cx).await;
7809
7810 repository.read_with(cx, |repository, _| {
7811 assert_eq!(
7812 repository.work_directory_abs_path.as_ref(),
7813 root_path.join("projects/project2").as_path()
7814 );
7815 assert_eq!(
7816 repository.status_for_path(&"a".into()).unwrap().status,
7817 StatusCode::Modified.worktree(),
7818 );
7819 assert_eq!(
7820 repository.status_for_path(&"b".into()).unwrap().status,
7821 FileStatus::Untracked,
7822 );
7823 });
7824}
7825
7826// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
7827// you can't rename a directory which some program has already open. This is a
7828// limitation of the Windows. See:
7829// https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
7830#[gpui::test]
7831#[cfg_attr(target_os = "windows", ignore)]
7832async fn test_file_status(cx: &mut gpui::TestAppContext) {
7833 init_test(cx);
7834 cx.executor().allow_parking();
7835 const IGNORE_RULE: &str = "**/target";
7836
7837 let root = TempTree::new(json!({
7838 "project": {
7839 "a.txt": "a",
7840 "b.txt": "bb",
7841 "c": {
7842 "d": {
7843 "e.txt": "eee"
7844 }
7845 },
7846 "f.txt": "ffff",
7847 "target": {
7848 "build_file": "???"
7849 },
7850 ".gitignore": IGNORE_RULE
7851 },
7852
7853 }));
7854 let root_path = root.path();
7855
7856 const A_TXT: &str = "a.txt";
7857 const B_TXT: &str = "b.txt";
7858 const E_TXT: &str = "c/d/e.txt";
7859 const F_TXT: &str = "f.txt";
7860 const DOTGITIGNORE: &str = ".gitignore";
7861 const BUILD_FILE: &str = "target/build_file";
7862
7863 // Set up git repository before creating the worktree.
7864 let work_dir = root.path().join("project");
7865 let mut repo = git_init(work_dir.as_path());
7866 repo.add_ignore_rule(IGNORE_RULE).unwrap();
7867 git_add(A_TXT, &repo);
7868 git_add(E_TXT, &repo);
7869 git_add(DOTGITIGNORE, &repo);
7870 git_commit("Initial commit", &repo);
7871
7872 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
7873
7874 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7875 tree.flush_fs_events(cx).await;
7876 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7877 .await;
7878 cx.executor().run_until_parked();
7879
7880 let repository = project.read_with(cx, |project, cx| {
7881 project.repositories(cx).values().next().unwrap().clone()
7882 });
7883
7884 // Check that the right git state is observed on startup
7885 repository.read_with(cx, |repository, _cx| {
7886 assert_eq!(
7887 repository.work_directory_abs_path.as_ref(),
7888 root_path.join("project").as_path()
7889 );
7890
7891 assert_eq!(
7892 repository.status_for_path(&B_TXT.into()).unwrap().status,
7893 FileStatus::Untracked,
7894 );
7895 assert_eq!(
7896 repository.status_for_path(&F_TXT.into()).unwrap().status,
7897 FileStatus::Untracked,
7898 );
7899 });
7900
7901 // Modify a file in the working copy.
7902 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
7903 tree.flush_fs_events(cx).await;
7904 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7905 .await;
7906 cx.executor().run_until_parked();
7907
7908 // The worktree detects that the file's git status has changed.
7909 repository.read_with(cx, |repository, _| {
7910 assert_eq!(
7911 repository.status_for_path(&A_TXT.into()).unwrap().status,
7912 StatusCode::Modified.worktree(),
7913 );
7914 });
7915
7916 // Create a commit in the git repository.
7917 git_add(A_TXT, &repo);
7918 git_add(B_TXT, &repo);
7919 git_commit("Committing modified and added", &repo);
7920 tree.flush_fs_events(cx).await;
7921 cx.executor().run_until_parked();
7922
7923 // The worktree detects that the files' git status have changed.
7924 repository.read_with(cx, |repository, _cx| {
7925 assert_eq!(
7926 repository.status_for_path(&F_TXT.into()).unwrap().status,
7927 FileStatus::Untracked,
7928 );
7929 assert_eq!(repository.status_for_path(&B_TXT.into()), None);
7930 assert_eq!(repository.status_for_path(&A_TXT.into()), None);
7931 });
7932
7933 // Modify files in the working copy and perform git operations on other files.
7934 git_reset(0, &repo);
7935 git_remove_index(Path::new(B_TXT), &repo);
7936 git_stash(&mut repo);
7937 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
7938 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
7939 tree.flush_fs_events(cx).await;
7940 cx.executor().run_until_parked();
7941
7942 // Check that more complex repo changes are tracked
7943 repository.read_with(cx, |repository, _cx| {
7944 assert_eq!(repository.status_for_path(&A_TXT.into()), None);
7945 assert_eq!(
7946 repository.status_for_path(&B_TXT.into()).unwrap().status,
7947 FileStatus::Untracked,
7948 );
7949 assert_eq!(
7950 repository.status_for_path(&E_TXT.into()).unwrap().status,
7951 StatusCode::Modified.worktree(),
7952 );
7953 });
7954
7955 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
7956 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
7957 std::fs::write(
7958 work_dir.join(DOTGITIGNORE),
7959 [IGNORE_RULE, "f.txt"].join("\n"),
7960 )
7961 .unwrap();
7962
7963 git_add(Path::new(DOTGITIGNORE), &repo);
7964 git_commit("Committing modified git ignore", &repo);
7965
7966 tree.flush_fs_events(cx).await;
7967 cx.executor().run_until_parked();
7968
7969 let mut renamed_dir_name = "first_directory/second_directory";
7970 const RENAMED_FILE: &str = "rf.txt";
7971
7972 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
7973 std::fs::write(
7974 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
7975 "new-contents",
7976 )
7977 .unwrap();
7978
7979 tree.flush_fs_events(cx).await;
7980 cx.executor().run_until_parked();
7981
7982 repository.read_with(cx, |repository, _cx| {
7983 assert_eq!(
7984 repository
7985 .status_for_path(&Path::new(renamed_dir_name).join(RENAMED_FILE).into())
7986 .unwrap()
7987 .status,
7988 FileStatus::Untracked,
7989 );
7990 });
7991
7992 renamed_dir_name = "new_first_directory/second_directory";
7993
7994 std::fs::rename(
7995 work_dir.join("first_directory"),
7996 work_dir.join("new_first_directory"),
7997 )
7998 .unwrap();
7999
8000 tree.flush_fs_events(cx).await;
8001 cx.executor().run_until_parked();
8002
8003 repository.read_with(cx, |repository, _cx| {
8004 assert_eq!(
8005 repository
8006 .status_for_path(&Path::new(renamed_dir_name).join(RENAMED_FILE).into())
8007 .unwrap()
8008 .status,
8009 FileStatus::Untracked,
8010 );
8011 });
8012}
8013
8014#[gpui::test]
8015async fn test_repos_in_invisible_worktrees(
8016 executor: BackgroundExecutor,
8017 cx: &mut gpui::TestAppContext,
8018) {
8019 init_test(cx);
8020 let fs = FakeFs::new(executor);
8021 fs.insert_tree(
8022 path!("/root"),
8023 json!({
8024 "dir1": {
8025 ".git": {},
8026 "dep1": {
8027 ".git": {},
8028 "src": {
8029 "a.txt": "",
8030 },
8031 },
8032 "b.txt": "",
8033 },
8034 }),
8035 )
8036 .await;
8037
8038 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
8039 let visible_worktree =
8040 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8041 visible_worktree
8042 .read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
8043 .await;
8044
8045 let repos = project.read_with(cx, |project, cx| {
8046 project
8047 .repositories(cx)
8048 .values()
8049 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8050 .collect::<Vec<_>>()
8051 });
8052 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
8053
8054 let (invisible_worktree, _) = project
8055 .update(cx, |project, cx| {
8056 project.worktree_store.update(cx, |worktree_store, cx| {
8057 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
8058 })
8059 })
8060 .await
8061 .expect("failed to create worktree");
8062 invisible_worktree
8063 .read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
8064 .await;
8065
8066 let repos = project.read_with(cx, |project, cx| {
8067 project
8068 .repositories(cx)
8069 .values()
8070 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8071 .collect::<Vec<_>>()
8072 });
8073 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
8074}
8075
8076#[gpui::test(iterations = 10)]
8077async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
8078 init_test(cx);
8079 cx.update(|cx| {
8080 cx.update_global::<SettingsStore, _>(|store, cx| {
8081 store.update_user_settings::<WorktreeSettings>(cx, |project_settings| {
8082 project_settings.file_scan_exclusions = Some(Vec::new());
8083 });
8084 });
8085 });
8086 let fs = FakeFs::new(cx.background_executor.clone());
8087 fs.insert_tree(
8088 path!("/root"),
8089 json!({
8090 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
8091 "tree": {
8092 ".git": {},
8093 ".gitignore": "ignored-dir\n",
8094 "tracked-dir": {
8095 "tracked-file1": "",
8096 "ancestor-ignored-file1": "",
8097 },
8098 "ignored-dir": {
8099 "ignored-file1": ""
8100 }
8101 }
8102 }),
8103 )
8104 .await;
8105 fs.set_head_and_index_for_repo(
8106 path!("/root/tree/.git").as_ref(),
8107 &[
8108 (".gitignore".into(), "ignored-dir\n".into()),
8109 ("tracked-dir/tracked-file1".into(), "".into()),
8110 ],
8111 );
8112
8113 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
8114
8115 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8116 tree.flush_fs_events(cx).await;
8117 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
8118 .await;
8119 cx.executor().run_until_parked();
8120
8121 let repository = project.read_with(cx, |project, cx| {
8122 project.repositories(cx).values().next().unwrap().clone()
8123 });
8124
8125 tree.read_with(cx, |tree, _| {
8126 tree.as_local()
8127 .unwrap()
8128 .manually_refresh_entries_for_paths(vec![Path::new("ignored-dir").into()])
8129 })
8130 .recv()
8131 .await;
8132
8133 cx.read(|cx| {
8134 assert_entry_git_state(
8135 tree.read(cx),
8136 repository.read(cx),
8137 "tracked-dir/tracked-file1",
8138 None,
8139 false,
8140 );
8141 assert_entry_git_state(
8142 tree.read(cx),
8143 repository.read(cx),
8144 "tracked-dir/ancestor-ignored-file1",
8145 None,
8146 false,
8147 );
8148 assert_entry_git_state(
8149 tree.read(cx),
8150 repository.read(cx),
8151 "ignored-dir/ignored-file1",
8152 None,
8153 true,
8154 );
8155 });
8156
8157 fs.create_file(
8158 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
8159 Default::default(),
8160 )
8161 .await
8162 .unwrap();
8163 fs.set_index_for_repo(
8164 path!("/root/tree/.git").as_ref(),
8165 &[
8166 (".gitignore".into(), "ignored-dir\n".into()),
8167 ("tracked-dir/tracked-file1".into(), "".into()),
8168 ("tracked-dir/tracked-file2".into(), "".into()),
8169 ],
8170 );
8171 fs.create_file(
8172 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
8173 Default::default(),
8174 )
8175 .await
8176 .unwrap();
8177 fs.create_file(
8178 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
8179 Default::default(),
8180 )
8181 .await
8182 .unwrap();
8183
8184 cx.executor().run_until_parked();
8185 cx.read(|cx| {
8186 assert_entry_git_state(
8187 tree.read(cx),
8188 repository.read(cx),
8189 "tracked-dir/tracked-file2",
8190 Some(StatusCode::Added),
8191 false,
8192 );
8193 assert_entry_git_state(
8194 tree.read(cx),
8195 repository.read(cx),
8196 "tracked-dir/ancestor-ignored-file2",
8197 None,
8198 false,
8199 );
8200 assert_entry_git_state(
8201 tree.read(cx),
8202 repository.read(cx),
8203 "ignored-dir/ignored-file2",
8204 None,
8205 true,
8206 );
8207 assert!(tree.read(cx).entry_for_path(".git").unwrap().is_ignored);
8208 });
8209}
8210
8211#[gpui::test]
8212async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
8213 init_test(cx);
8214 let fs = FakeFs::new(cx.background_executor.clone());
8215 fs.insert_tree(
8216 path!("/root"),
8217 json!({
8218 "project": {
8219 ".git": {},
8220 "child1": {
8221 "a.txt": "A",
8222 },
8223 "child2": {
8224 "b.txt": "B",
8225 }
8226 }
8227 }),
8228 )
8229 .await;
8230
8231 let project = Project::test(
8232 fs.clone(),
8233 [
8234 path!("/root/project/child1").as_ref(),
8235 path!("/root/project/child2").as_ref(),
8236 ],
8237 cx,
8238 )
8239 .await;
8240
8241 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8242 tree.flush_fs_events(cx).await;
8243 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
8244 .await;
8245 cx.executor().run_until_parked();
8246
8247 let repos = project.read_with(cx, |project, cx| {
8248 project
8249 .repositories(cx)
8250 .values()
8251 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8252 .collect::<Vec<_>>()
8253 });
8254 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
8255}
8256
8257async fn search(
8258 project: &Entity<Project>,
8259 query: SearchQuery,
8260 cx: &mut gpui::TestAppContext,
8261) -> Result<HashMap<String, Vec<Range<usize>>>> {
8262 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
8263 let mut results = HashMap::default();
8264 while let Ok(search_result) = search_rx.recv().await {
8265 match search_result {
8266 SearchResult::Buffer { buffer, ranges } => {
8267 results.entry(buffer).or_insert(ranges);
8268 }
8269 SearchResult::LimitReached => {}
8270 }
8271 }
8272 Ok(results
8273 .into_iter()
8274 .map(|(buffer, ranges)| {
8275 buffer.update(cx, |buffer, cx| {
8276 let path = buffer
8277 .file()
8278 .unwrap()
8279 .full_path(cx)
8280 .to_string_lossy()
8281 .to_string();
8282 let ranges = ranges
8283 .into_iter()
8284 .map(|range| range.to_offset(buffer))
8285 .collect::<Vec<_>>();
8286 (path, ranges)
8287 })
8288 })
8289 .collect())
8290}
8291
8292pub fn init_test(cx: &mut gpui::TestAppContext) {
8293 if std::env::var("RUST_LOG").is_ok() {
8294 env_logger::try_init().ok();
8295 }
8296
8297 cx.update(|cx| {
8298 let settings_store = SettingsStore::test(cx);
8299 cx.set_global(settings_store);
8300 release_channel::init(SemanticVersion::default(), cx);
8301 language::init(cx);
8302 Project::init_settings(cx);
8303 });
8304}
8305
8306fn json_lang() -> Arc<Language> {
8307 Arc::new(Language::new(
8308 LanguageConfig {
8309 name: "JSON".into(),
8310 matcher: LanguageMatcher {
8311 path_suffixes: vec!["json".to_string()],
8312 ..Default::default()
8313 },
8314 ..Default::default()
8315 },
8316 None,
8317 ))
8318}
8319
8320fn js_lang() -> Arc<Language> {
8321 Arc::new(Language::new(
8322 LanguageConfig {
8323 name: "JavaScript".into(),
8324 matcher: LanguageMatcher {
8325 path_suffixes: vec!["js".to_string()],
8326 ..Default::default()
8327 },
8328 ..Default::default()
8329 },
8330 None,
8331 ))
8332}
8333
8334fn rust_lang() -> Arc<Language> {
8335 Arc::new(Language::new(
8336 LanguageConfig {
8337 name: "Rust".into(),
8338 matcher: LanguageMatcher {
8339 path_suffixes: vec!["rs".to_string()],
8340 ..Default::default()
8341 },
8342 ..Default::default()
8343 },
8344 Some(tree_sitter_rust::LANGUAGE.into()),
8345 ))
8346}
8347
8348fn typescript_lang() -> Arc<Language> {
8349 Arc::new(Language::new(
8350 LanguageConfig {
8351 name: "TypeScript".into(),
8352 matcher: LanguageMatcher {
8353 path_suffixes: vec!["ts".to_string()],
8354 ..Default::default()
8355 },
8356 ..Default::default()
8357 },
8358 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
8359 ))
8360}
8361
8362fn tsx_lang() -> Arc<Language> {
8363 Arc::new(Language::new(
8364 LanguageConfig {
8365 name: "tsx".into(),
8366 matcher: LanguageMatcher {
8367 path_suffixes: vec!["tsx".to_string()],
8368 ..Default::default()
8369 },
8370 ..Default::default()
8371 },
8372 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
8373 ))
8374}
8375
8376fn get_all_tasks(
8377 project: &Entity<Project>,
8378 task_contexts: &TaskContexts,
8379 cx: &mut App,
8380) -> Vec<(TaskSourceKind, ResolvedTask)> {
8381 let (mut old, new) = project.update(cx, |project, cx| {
8382 project
8383 .task_store
8384 .read(cx)
8385 .task_inventory()
8386 .unwrap()
8387 .read(cx)
8388 .used_and_current_resolved_tasks(task_contexts, cx)
8389 });
8390 old.extend(new);
8391 old
8392}
8393
8394#[track_caller]
8395fn assert_entry_git_state(
8396 tree: &Worktree,
8397 repository: &Repository,
8398 path: &str,
8399 index_status: Option<StatusCode>,
8400 is_ignored: bool,
8401) {
8402 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
8403 let entry = tree
8404 .entry_for_path(path)
8405 .unwrap_or_else(|| panic!("entry {path} not found"));
8406 let status = repository
8407 .status_for_path(&path.into())
8408 .map(|entry| entry.status);
8409 let expected = index_status.map(|index_status| {
8410 TrackedStatus {
8411 index_status,
8412 worktree_status: StatusCode::Unmodified,
8413 }
8414 .into()
8415 });
8416 assert_eq!(
8417 status, expected,
8418 "expected {path} to have git status: {expected:?}"
8419 );
8420 assert_eq!(
8421 entry.is_ignored, is_ignored,
8422 "expected {path} to have is_ignored: {is_ignored}"
8423 );
8424}
8425
8426#[track_caller]
8427fn git_init(path: &Path) -> git2::Repository {
8428 let mut init_opts = RepositoryInitOptions::new();
8429 init_opts.initial_head("main");
8430 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
8431}
8432
8433#[track_caller]
8434fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
8435 let path = path.as_ref();
8436 let mut index = repo.index().expect("Failed to get index");
8437 index.add_path(path).expect("Failed to add file");
8438 index.write().expect("Failed to write index");
8439}
8440
8441#[track_caller]
8442fn git_remove_index(path: &Path, repo: &git2::Repository) {
8443 let mut index = repo.index().expect("Failed to get index");
8444 index.remove_path(path).expect("Failed to add file");
8445 index.write().expect("Failed to write index");
8446}
8447
8448#[track_caller]
8449fn git_commit(msg: &'static str, repo: &git2::Repository) {
8450 use git2::Signature;
8451
8452 let signature = Signature::now("test", "test@zed.dev").unwrap();
8453 let oid = repo.index().unwrap().write_tree().unwrap();
8454 let tree = repo.find_tree(oid).unwrap();
8455 if let Ok(head) = repo.head() {
8456 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
8457
8458 let parent_commit = parent_obj.as_commit().unwrap();
8459
8460 repo.commit(
8461 Some("HEAD"),
8462 &signature,
8463 &signature,
8464 msg,
8465 &tree,
8466 &[parent_commit],
8467 )
8468 .expect("Failed to commit with parent");
8469 } else {
8470 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
8471 .expect("Failed to commit");
8472 }
8473}
8474
8475#[cfg(any())]
8476#[track_caller]
8477fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
8478 repo.cherrypick(commit, None).expect("Failed to cherrypick");
8479}
8480
8481#[track_caller]
8482fn git_stash(repo: &mut git2::Repository) {
8483 use git2::Signature;
8484
8485 let signature = Signature::now("test", "test@zed.dev").unwrap();
8486 repo.stash_save(&signature, "N/A", None)
8487 .expect("Failed to stash");
8488}
8489
8490#[track_caller]
8491fn git_reset(offset: usize, repo: &git2::Repository) {
8492 let head = repo.head().expect("Couldn't get repo head");
8493 let object = head.peel(git2::ObjectType::Commit).unwrap();
8494 let commit = object.as_commit().unwrap();
8495 let new_head = commit
8496 .parents()
8497 .inspect(|parnet| {
8498 parnet.message();
8499 })
8500 .nth(offset)
8501 .expect("Not enough history");
8502 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
8503 .expect("Could not reset");
8504}
8505
8506#[cfg(any())]
8507#[track_caller]
8508fn git_branch(name: &str, repo: &git2::Repository) {
8509 let head = repo
8510 .head()
8511 .expect("Couldn't get repo head")
8512 .peel_to_commit()
8513 .expect("HEAD is not a commit");
8514 repo.branch(name, &head, false).expect("Failed to commit");
8515}
8516
8517#[cfg(any())]
8518#[track_caller]
8519fn git_checkout(name: &str, repo: &git2::Repository) {
8520 repo.set_head(name).expect("Failed to set head");
8521 repo.checkout_head(None).expect("Failed to check out head");
8522}
8523
8524#[cfg(any())]
8525#[track_caller]
8526fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
8527 repo.statuses(None)
8528 .unwrap()
8529 .iter()
8530 .map(|status| (status.path().unwrap().to_string(), status.status()))
8531 .collect()
8532}