1#![allow(clippy::format_collect)]
2
3use crate::{
4 Event, git_store::StatusEntry, task_inventory::TaskContexts, task_store::TaskSettingsLocation,
5 *,
6};
7use buffer_diff::{
8 BufferDiffEvent, DiffHunkSecondaryStatus, DiffHunkStatus, DiffHunkStatusKind, assert_hunks,
9};
10use fs::FakeFs;
11use futures::{StreamExt, future};
12use git::{
13 repository::RepoPath,
14 status::{StatusCode, TrackedStatus},
15};
16use git2::RepositoryInitOptions;
17use gpui::{App, BackgroundExecutor, SemanticVersion, UpdateGlobal};
18use http_client::Url;
19use language::{
20 Diagnostic, DiagnosticEntry, DiagnosticSet, DiskState, FakeLspAdapter, LanguageConfig,
21 LanguageMatcher, LanguageName, LineEnding, OffsetRangeExt, Point, ToPoint,
22 language_settings::{AllLanguageSettings, LanguageSettingsContent, language_settings},
23 tree_sitter_rust, tree_sitter_typescript,
24};
25use lsp::{
26 DiagnosticSeverity, DocumentChanges, FileOperationFilter, NumberOrString, TextDocumentEdit,
27 WillRenameFiles, notification::DidRenameFiles,
28};
29use parking_lot::Mutex;
30use paths::tasks_file;
31use postage::stream::Stream as _;
32use pretty_assertions::{assert_eq, assert_matches};
33use serde_json::json;
34#[cfg(not(windows))]
35use std::os;
36use std::{mem, num::NonZeroU32, ops::Range, str::FromStr, sync::OnceLock, task::Poll};
37use task::{ResolvedTask, TaskContext};
38use unindent::Unindent as _;
39use util::{
40 TryFutureExt as _, assert_set_eq, path,
41 paths::PathMatcher,
42 separator,
43 test::{TempTree, marked_text_offsets},
44 uri,
45};
46use worktree::WorktreeModelHandle as _;
47
48#[gpui::test]
49async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
50 cx.executor().allow_parking();
51
52 let (tx, mut rx) = futures::channel::mpsc::unbounded();
53 let _thread = std::thread::spawn(move || {
54 #[cfg(not(target_os = "windows"))]
55 std::fs::metadata("/tmp").unwrap();
56 #[cfg(target_os = "windows")]
57 std::fs::metadata("C:/Windows").unwrap();
58 std::thread::sleep(Duration::from_millis(1000));
59 tx.unbounded_send(1).unwrap();
60 });
61 rx.next().await.unwrap();
62}
63
64#[gpui::test]
65async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
66 cx.executor().allow_parking();
67
68 let io_task = smol::unblock(move || {
69 println!("sleeping on thread {:?}", std::thread::current().id());
70 std::thread::sleep(Duration::from_millis(10));
71 1
72 });
73
74 let task = cx.foreground_executor().spawn(async move {
75 io_task.await;
76 });
77
78 task.await;
79}
80
81#[cfg(not(windows))]
82#[gpui::test]
83async fn test_symlinks(cx: &mut gpui::TestAppContext) {
84 init_test(cx);
85 cx.executor().allow_parking();
86
87 let dir = TempTree::new(json!({
88 "root": {
89 "apple": "",
90 "banana": {
91 "carrot": {
92 "date": "",
93 "endive": "",
94 }
95 },
96 "fennel": {
97 "grape": "",
98 }
99 }
100 }));
101
102 let root_link_path = dir.path().join("root_link");
103 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
104 os::unix::fs::symlink(
105 dir.path().join("root/fennel"),
106 dir.path().join("root/finnochio"),
107 )
108 .unwrap();
109
110 let project = Project::test(
111 Arc::new(RealFs::new(None, cx.executor())),
112 [root_link_path.as_ref()],
113 cx,
114 )
115 .await;
116
117 project.update(cx, |project, cx| {
118 let tree = project.worktrees(cx).next().unwrap().read(cx);
119 assert_eq!(tree.file_count(), 5);
120 assert_eq!(
121 tree.inode_for_path("fennel/grape"),
122 tree.inode_for_path("finnochio/grape")
123 );
124 });
125}
126
127#[gpui::test]
128async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
129 init_test(cx);
130
131 let dir = TempTree::new(json!({
132 ".editorconfig": r#"
133 root = true
134 [*.rs]
135 indent_style = tab
136 indent_size = 3
137 end_of_line = lf
138 insert_final_newline = true
139 trim_trailing_whitespace = true
140 [*.js]
141 tab_width = 10
142 "#,
143 ".zed": {
144 "settings.json": r#"{
145 "tab_size": 8,
146 "hard_tabs": false,
147 "ensure_final_newline_on_save": false,
148 "remove_trailing_whitespace_on_save": false,
149 "soft_wrap": "editor_width"
150 }"#,
151 },
152 "a.rs": "fn a() {\n A\n}",
153 "b": {
154 ".editorconfig": r#"
155 [*.rs]
156 indent_size = 2
157 "#,
158 "b.rs": "fn b() {\n B\n}",
159 },
160 "c.js": "def c\n C\nend",
161 "README.json": "tabs are better\n",
162 }));
163
164 let path = dir.path();
165 let fs = FakeFs::new(cx.executor());
166 fs.insert_tree_from_real_fs(path, path).await;
167 let project = Project::test(fs, [path], cx).await;
168
169 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
170 language_registry.add(js_lang());
171 language_registry.add(json_lang());
172 language_registry.add(rust_lang());
173
174 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
175
176 cx.executor().run_until_parked();
177
178 cx.update(|cx| {
179 let tree = worktree.read(cx);
180 let settings_for = |path: &str| {
181 let file_entry = tree.entry_for_path(path).unwrap().clone();
182 let file = File::for_entry(file_entry, worktree.clone());
183 let file_language = project
184 .read(cx)
185 .languages()
186 .language_for_file_path(file.path.as_ref());
187 let file_language = cx
188 .background_executor()
189 .block(file_language)
190 .expect("Failed to get file language");
191 let file = file as _;
192 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
193 };
194
195 let settings_a = settings_for("a.rs");
196 let settings_b = settings_for("b/b.rs");
197 let settings_c = settings_for("c.js");
198 let settings_readme = settings_for("README.json");
199
200 // .editorconfig overrides .zed/settings
201 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
202 assert_eq!(settings_a.hard_tabs, true);
203 assert_eq!(settings_a.ensure_final_newline_on_save, true);
204 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
205
206 // .editorconfig in b/ overrides .editorconfig in root
207 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
208
209 // "indent_size" is not set, so "tab_width" is used
210 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
211
212 // README.md should not be affected by .editorconfig's globe "*.rs"
213 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
214 });
215}
216
217#[gpui::test]
218async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
219 init_test(cx);
220 TaskStore::init(None);
221
222 let fs = FakeFs::new(cx.executor());
223 fs.insert_tree(
224 path!("/dir"),
225 json!({
226 ".zed": {
227 "settings.json": r#"{ "tab_size": 8 }"#,
228 "tasks.json": r#"[{
229 "label": "cargo check all",
230 "command": "cargo",
231 "args": ["check", "--all"]
232 },]"#,
233 },
234 "a": {
235 "a.rs": "fn a() {\n A\n}"
236 },
237 "b": {
238 ".zed": {
239 "settings.json": r#"{ "tab_size": 2 }"#,
240 "tasks.json": r#"[{
241 "label": "cargo check",
242 "command": "cargo",
243 "args": ["check"]
244 },]"#,
245 },
246 "b.rs": "fn b() {\n B\n}"
247 }
248 }),
249 )
250 .await;
251
252 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
253 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
254
255 cx.executor().run_until_parked();
256 let worktree_id = cx.update(|cx| {
257 project.update(cx, |project, cx| {
258 project.worktrees(cx).next().unwrap().read(cx).id()
259 })
260 });
261
262 let mut task_contexts = TaskContexts::default();
263 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
264
265 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
266 id: worktree_id,
267 directory_in_worktree: PathBuf::from(".zed"),
268 id_base: "local worktree tasks from directory \".zed\"".into(),
269 };
270
271 let all_tasks = cx
272 .update(|cx| {
273 let tree = worktree.read(cx);
274
275 let file_a = File::for_entry(
276 tree.entry_for_path("a/a.rs").unwrap().clone(),
277 worktree.clone(),
278 ) as _;
279 let settings_a = language_settings(None, Some(&file_a), cx);
280 let file_b = File::for_entry(
281 tree.entry_for_path("b/b.rs").unwrap().clone(),
282 worktree.clone(),
283 ) as _;
284 let settings_b = language_settings(None, Some(&file_b), cx);
285
286 assert_eq!(settings_a.tab_size.get(), 8);
287 assert_eq!(settings_b.tab_size.get(), 2);
288
289 get_all_tasks(&project, &task_contexts, cx)
290 })
291 .into_iter()
292 .map(|(source_kind, task)| {
293 let resolved = task.resolved.unwrap();
294 (
295 source_kind,
296 task.resolved_label,
297 resolved.args,
298 resolved.env,
299 )
300 })
301 .collect::<Vec<_>>();
302 assert_eq!(
303 all_tasks,
304 vec![
305 (
306 TaskSourceKind::Worktree {
307 id: worktree_id,
308 directory_in_worktree: PathBuf::from(separator!("b/.zed")),
309 id_base: if cfg!(windows) {
310 "local worktree tasks from directory \"b\\\\.zed\"".into()
311 } else {
312 "local worktree tasks from directory \"b/.zed\"".into()
313 },
314 },
315 "cargo check".to_string(),
316 vec!["check".to_string()],
317 HashMap::default(),
318 ),
319 (
320 topmost_local_task_source_kind.clone(),
321 "cargo check all".to_string(),
322 vec!["check".to_string(), "--all".to_string()],
323 HashMap::default(),
324 ),
325 ]
326 );
327
328 let (_, resolved_task) = cx
329 .update(|cx| get_all_tasks(&project, &task_contexts, cx))
330 .into_iter()
331 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
332 .expect("should have one global task");
333 project.update(cx, |project, cx| {
334 let task_inventory = project
335 .task_store
336 .read(cx)
337 .task_inventory()
338 .cloned()
339 .unwrap();
340 task_inventory.update(cx, |inventory, _| {
341 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
342 inventory
343 .update_file_based_tasks(
344 TaskSettingsLocation::Global(tasks_file()),
345 Some(
346 &json!([{
347 "label": "cargo check unstable",
348 "command": "cargo",
349 "args": [
350 "check",
351 "--all",
352 "--all-targets"
353 ],
354 "env": {
355 "RUSTFLAGS": "-Zunstable-options"
356 }
357 }])
358 .to_string(),
359 ),
360 settings::TaskKind::Script,
361 )
362 .unwrap();
363 });
364 });
365 cx.run_until_parked();
366
367 let all_tasks = cx
368 .update(|cx| get_all_tasks(&project, &task_contexts, cx))
369 .into_iter()
370 .map(|(source_kind, task)| {
371 let resolved = task.resolved.unwrap();
372 (
373 source_kind,
374 task.resolved_label,
375 resolved.args,
376 resolved.env,
377 )
378 })
379 .collect::<Vec<_>>();
380 assert_eq!(
381 all_tasks,
382 vec![
383 (
384 topmost_local_task_source_kind.clone(),
385 "cargo check all".to_string(),
386 vec!["check".to_string(), "--all".to_string()],
387 HashMap::default(),
388 ),
389 (
390 TaskSourceKind::Worktree {
391 id: worktree_id,
392 directory_in_worktree: PathBuf::from(separator!("b/.zed")),
393 id_base: if cfg!(windows) {
394 "local worktree tasks from directory \"b\\\\.zed\"".into()
395 } else {
396 "local worktree tasks from directory \"b/.zed\"".into()
397 },
398 },
399 "cargo check".to_string(),
400 vec!["check".to_string()],
401 HashMap::default(),
402 ),
403 (
404 TaskSourceKind::AbsPath {
405 abs_path: paths::tasks_file().clone(),
406 id_base: "global tasks.json".into(),
407 },
408 "cargo check unstable".to_string(),
409 vec![
410 "check".to_string(),
411 "--all".to_string(),
412 "--all-targets".to_string(),
413 ],
414 HashMap::from_iter(Some((
415 "RUSTFLAGS".to_string(),
416 "-Zunstable-options".to_string()
417 ))),
418 ),
419 ]
420 );
421}
422
423#[gpui::test]
424async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
425 init_test(cx);
426 TaskStore::init(None);
427
428 let fs = FakeFs::new(cx.executor());
429 fs.insert_tree(
430 path!("/dir"),
431 json!({
432 ".zed": {
433 "tasks.json": r#"[{
434 "label": "test worktree root",
435 "command": "echo $ZED_WORKTREE_ROOT"
436 }]"#,
437 },
438 "a": {
439 "a.rs": "fn a() {\n A\n}"
440 },
441 }),
442 )
443 .await;
444
445 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
446 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
447
448 cx.executor().run_until_parked();
449 let worktree_id = cx.update(|cx| {
450 project.update(cx, |project, cx| {
451 project.worktrees(cx).next().unwrap().read(cx).id()
452 })
453 });
454
455 let active_non_worktree_item_tasks = cx.update(|cx| {
456 get_all_tasks(
457 &project,
458 &TaskContexts {
459 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
460 active_worktree_context: None,
461 other_worktree_contexts: Vec::new(),
462 },
463 cx,
464 )
465 });
466 assert!(
467 active_non_worktree_item_tasks.is_empty(),
468 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
469 );
470
471 let active_worktree_tasks = cx.update(|cx| {
472 get_all_tasks(
473 &project,
474 &TaskContexts {
475 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
476 active_worktree_context: Some((worktree_id, {
477 let mut worktree_context = TaskContext::default();
478 worktree_context
479 .task_variables
480 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
481 worktree_context
482 })),
483 other_worktree_contexts: Vec::new(),
484 },
485 cx,
486 )
487 });
488 assert_eq!(
489 active_worktree_tasks
490 .into_iter()
491 .map(|(source_kind, task)| {
492 let resolved = task.resolved.unwrap();
493 (source_kind, resolved.command)
494 })
495 .collect::<Vec<_>>(),
496 vec![(
497 TaskSourceKind::Worktree {
498 id: worktree_id,
499 directory_in_worktree: PathBuf::from(separator!(".zed")),
500 id_base: if cfg!(windows) {
501 "local worktree tasks from directory \".zed\"".into()
502 } else {
503 "local worktree tasks from directory \".zed\"".into()
504 },
505 },
506 "echo /dir".to_string(),
507 )]
508 );
509}
510
511#[gpui::test]
512async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
513 init_test(cx);
514
515 let fs = FakeFs::new(cx.executor());
516 fs.insert_tree(
517 path!("/dir"),
518 json!({
519 "test.rs": "const A: i32 = 1;",
520 "test2.rs": "",
521 "Cargo.toml": "a = 1",
522 "package.json": "{\"a\": 1}",
523 }),
524 )
525 .await;
526
527 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
528 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
529
530 let mut fake_rust_servers = language_registry.register_fake_lsp(
531 "Rust",
532 FakeLspAdapter {
533 name: "the-rust-language-server",
534 capabilities: lsp::ServerCapabilities {
535 completion_provider: Some(lsp::CompletionOptions {
536 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
537 ..Default::default()
538 }),
539 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
540 lsp::TextDocumentSyncOptions {
541 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
542 ..Default::default()
543 },
544 )),
545 ..Default::default()
546 },
547 ..Default::default()
548 },
549 );
550 let mut fake_json_servers = language_registry.register_fake_lsp(
551 "JSON",
552 FakeLspAdapter {
553 name: "the-json-language-server",
554 capabilities: lsp::ServerCapabilities {
555 completion_provider: Some(lsp::CompletionOptions {
556 trigger_characters: Some(vec![":".to_string()]),
557 ..Default::default()
558 }),
559 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
560 lsp::TextDocumentSyncOptions {
561 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
562 ..Default::default()
563 },
564 )),
565 ..Default::default()
566 },
567 ..Default::default()
568 },
569 );
570
571 // Open a buffer without an associated language server.
572 let (toml_buffer, _handle) = project
573 .update(cx, |project, cx| {
574 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
575 })
576 .await
577 .unwrap();
578
579 // Open a buffer with an associated language server before the language for it has been loaded.
580 let (rust_buffer, _handle2) = project
581 .update(cx, |project, cx| {
582 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
583 })
584 .await
585 .unwrap();
586 rust_buffer.update(cx, |buffer, _| {
587 assert_eq!(buffer.language().map(|l| l.name()), None);
588 });
589
590 // Now we add the languages to the project, and ensure they get assigned to all
591 // the relevant open buffers.
592 language_registry.add(json_lang());
593 language_registry.add(rust_lang());
594 cx.executor().run_until_parked();
595 rust_buffer.update(cx, |buffer, _| {
596 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
597 });
598
599 // A server is started up, and it is notified about Rust files.
600 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
601 assert_eq!(
602 fake_rust_server
603 .receive_notification::<lsp::notification::DidOpenTextDocument>()
604 .await
605 .text_document,
606 lsp::TextDocumentItem {
607 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
608 version: 0,
609 text: "const A: i32 = 1;".to_string(),
610 language_id: "rust".to_string(),
611 }
612 );
613
614 // The buffer is configured based on the language server's capabilities.
615 rust_buffer.update(cx, |buffer, _| {
616 assert_eq!(
617 buffer
618 .completion_triggers()
619 .into_iter()
620 .cloned()
621 .collect::<Vec<_>>(),
622 &[".".to_string(), "::".to_string()]
623 );
624 });
625 toml_buffer.update(cx, |buffer, _| {
626 assert!(buffer.completion_triggers().is_empty());
627 });
628
629 // Edit a buffer. The changes are reported to the language server.
630 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
631 assert_eq!(
632 fake_rust_server
633 .receive_notification::<lsp::notification::DidChangeTextDocument>()
634 .await
635 .text_document,
636 lsp::VersionedTextDocumentIdentifier::new(
637 lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
638 1
639 )
640 );
641
642 // Open a third buffer with a different associated language server.
643 let (json_buffer, _json_handle) = project
644 .update(cx, |project, cx| {
645 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
646 })
647 .await
648 .unwrap();
649
650 // A json language server is started up and is only notified about the json buffer.
651 let mut fake_json_server = fake_json_servers.next().await.unwrap();
652 assert_eq!(
653 fake_json_server
654 .receive_notification::<lsp::notification::DidOpenTextDocument>()
655 .await
656 .text_document,
657 lsp::TextDocumentItem {
658 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
659 version: 0,
660 text: "{\"a\": 1}".to_string(),
661 language_id: "json".to_string(),
662 }
663 );
664
665 // This buffer is configured based on the second language server's
666 // capabilities.
667 json_buffer.update(cx, |buffer, _| {
668 assert_eq!(
669 buffer
670 .completion_triggers()
671 .into_iter()
672 .cloned()
673 .collect::<Vec<_>>(),
674 &[":".to_string()]
675 );
676 });
677
678 // When opening another buffer whose language server is already running,
679 // it is also configured based on the existing language server's capabilities.
680 let (rust_buffer2, _handle4) = project
681 .update(cx, |project, cx| {
682 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
683 })
684 .await
685 .unwrap();
686 rust_buffer2.update(cx, |buffer, _| {
687 assert_eq!(
688 buffer
689 .completion_triggers()
690 .into_iter()
691 .cloned()
692 .collect::<Vec<_>>(),
693 &[".".to_string(), "::".to_string()]
694 );
695 });
696
697 // Changes are reported only to servers matching the buffer's language.
698 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
699 rust_buffer2.update(cx, |buffer, cx| {
700 buffer.edit([(0..0, "let x = 1;")], None, cx)
701 });
702 assert_eq!(
703 fake_rust_server
704 .receive_notification::<lsp::notification::DidChangeTextDocument>()
705 .await
706 .text_document,
707 lsp::VersionedTextDocumentIdentifier::new(
708 lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap(),
709 1
710 )
711 );
712
713 // Save notifications are reported to all servers.
714 project
715 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
716 .await
717 .unwrap();
718 assert_eq!(
719 fake_rust_server
720 .receive_notification::<lsp::notification::DidSaveTextDocument>()
721 .await
722 .text_document,
723 lsp::TextDocumentIdentifier::new(
724 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
725 )
726 );
727 assert_eq!(
728 fake_json_server
729 .receive_notification::<lsp::notification::DidSaveTextDocument>()
730 .await
731 .text_document,
732 lsp::TextDocumentIdentifier::new(
733 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
734 )
735 );
736
737 // Renames are reported only to servers matching the buffer's language.
738 fs.rename(
739 Path::new(path!("/dir/test2.rs")),
740 Path::new(path!("/dir/test3.rs")),
741 Default::default(),
742 )
743 .await
744 .unwrap();
745 assert_eq!(
746 fake_rust_server
747 .receive_notification::<lsp::notification::DidCloseTextDocument>()
748 .await
749 .text_document,
750 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap()),
751 );
752 assert_eq!(
753 fake_rust_server
754 .receive_notification::<lsp::notification::DidOpenTextDocument>()
755 .await
756 .text_document,
757 lsp::TextDocumentItem {
758 uri: lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),
759 version: 0,
760 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
761 language_id: "rust".to_string(),
762 },
763 );
764
765 rust_buffer2.update(cx, |buffer, cx| {
766 buffer.update_diagnostics(
767 LanguageServerId(0),
768 DiagnosticSet::from_sorted_entries(
769 vec![DiagnosticEntry {
770 diagnostic: Default::default(),
771 range: Anchor::MIN..Anchor::MAX,
772 }],
773 &buffer.snapshot(),
774 ),
775 cx,
776 );
777 assert_eq!(
778 buffer
779 .snapshot()
780 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
781 .count(),
782 1
783 );
784 });
785
786 // When the rename changes the extension of the file, the buffer gets closed on the old
787 // language server and gets opened on the new one.
788 fs.rename(
789 Path::new(path!("/dir/test3.rs")),
790 Path::new(path!("/dir/test3.json")),
791 Default::default(),
792 )
793 .await
794 .unwrap();
795 assert_eq!(
796 fake_rust_server
797 .receive_notification::<lsp::notification::DidCloseTextDocument>()
798 .await
799 .text_document,
800 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),),
801 );
802 assert_eq!(
803 fake_json_server
804 .receive_notification::<lsp::notification::DidOpenTextDocument>()
805 .await
806 .text_document,
807 lsp::TextDocumentItem {
808 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
809 version: 0,
810 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
811 language_id: "json".to_string(),
812 },
813 );
814
815 // We clear the diagnostics, since the language has changed.
816 rust_buffer2.update(cx, |buffer, _| {
817 assert_eq!(
818 buffer
819 .snapshot()
820 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
821 .count(),
822 0
823 );
824 });
825
826 // The renamed file's version resets after changing language server.
827 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
828 assert_eq!(
829 fake_json_server
830 .receive_notification::<lsp::notification::DidChangeTextDocument>()
831 .await
832 .text_document,
833 lsp::VersionedTextDocumentIdentifier::new(
834 lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
835 1
836 )
837 );
838
839 // Restart language servers
840 project.update(cx, |project, cx| {
841 project.restart_language_servers_for_buffers(
842 vec![rust_buffer.clone(), json_buffer.clone()],
843 cx,
844 );
845 });
846
847 let mut rust_shutdown_requests = fake_rust_server
848 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
849 let mut json_shutdown_requests = fake_json_server
850 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
851 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
852
853 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
854 let mut fake_json_server = fake_json_servers.next().await.unwrap();
855
856 // Ensure rust document is reopened in new rust language server
857 assert_eq!(
858 fake_rust_server
859 .receive_notification::<lsp::notification::DidOpenTextDocument>()
860 .await
861 .text_document,
862 lsp::TextDocumentItem {
863 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
864 version: 0,
865 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
866 language_id: "rust".to_string(),
867 }
868 );
869
870 // Ensure json documents are reopened in new json language server
871 assert_set_eq!(
872 [
873 fake_json_server
874 .receive_notification::<lsp::notification::DidOpenTextDocument>()
875 .await
876 .text_document,
877 fake_json_server
878 .receive_notification::<lsp::notification::DidOpenTextDocument>()
879 .await
880 .text_document,
881 ],
882 [
883 lsp::TextDocumentItem {
884 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
885 version: 0,
886 text: json_buffer.update(cx, |buffer, _| buffer.text()),
887 language_id: "json".to_string(),
888 },
889 lsp::TextDocumentItem {
890 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
891 version: 0,
892 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
893 language_id: "json".to_string(),
894 }
895 ]
896 );
897
898 // Close notifications are reported only to servers matching the buffer's language.
899 cx.update(|_| drop(_json_handle));
900 let close_message = lsp::DidCloseTextDocumentParams {
901 text_document: lsp::TextDocumentIdentifier::new(
902 lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
903 ),
904 };
905 assert_eq!(
906 fake_json_server
907 .receive_notification::<lsp::notification::DidCloseTextDocument>()
908 .await,
909 close_message,
910 );
911}
912
913#[gpui::test]
914async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
915 init_test(cx);
916
917 let fs = FakeFs::new(cx.executor());
918 fs.insert_tree(
919 path!("/the-root"),
920 json!({
921 ".gitignore": "target\n",
922 "src": {
923 "a.rs": "",
924 "b.rs": "",
925 },
926 "target": {
927 "x": {
928 "out": {
929 "x.rs": ""
930 }
931 },
932 "y": {
933 "out": {
934 "y.rs": "",
935 }
936 },
937 "z": {
938 "out": {
939 "z.rs": ""
940 }
941 }
942 }
943 }),
944 )
945 .await;
946
947 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
948 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
949 language_registry.add(rust_lang());
950 let mut fake_servers = language_registry.register_fake_lsp(
951 "Rust",
952 FakeLspAdapter {
953 name: "the-language-server",
954 ..Default::default()
955 },
956 );
957
958 cx.executor().run_until_parked();
959
960 // Start the language server by opening a buffer with a compatible file extension.
961 project
962 .update(cx, |project, cx| {
963 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
964 })
965 .await
966 .unwrap();
967
968 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
969 project.update(cx, |project, cx| {
970 let worktree = project.worktrees(cx).next().unwrap();
971 assert_eq!(
972 worktree
973 .read(cx)
974 .snapshot()
975 .entries(true, 0)
976 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
977 .collect::<Vec<_>>(),
978 &[
979 (Path::new(""), false),
980 (Path::new(".gitignore"), false),
981 (Path::new("src"), false),
982 (Path::new("src/a.rs"), false),
983 (Path::new("src/b.rs"), false),
984 (Path::new("target"), true),
985 ]
986 );
987 });
988
989 let prev_read_dir_count = fs.read_dir_call_count();
990
991 // Keep track of the FS events reported to the language server.
992 let fake_server = fake_servers.next().await.unwrap();
993 let file_changes = Arc::new(Mutex::new(Vec::new()));
994 fake_server
995 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
996 registrations: vec![lsp::Registration {
997 id: Default::default(),
998 method: "workspace/didChangeWatchedFiles".to_string(),
999 register_options: serde_json::to_value(
1000 lsp::DidChangeWatchedFilesRegistrationOptions {
1001 watchers: vec![
1002 lsp::FileSystemWatcher {
1003 glob_pattern: lsp::GlobPattern::String(
1004 path!("/the-root/Cargo.toml").to_string(),
1005 ),
1006 kind: None,
1007 },
1008 lsp::FileSystemWatcher {
1009 glob_pattern: lsp::GlobPattern::String(
1010 path!("/the-root/src/*.{rs,c}").to_string(),
1011 ),
1012 kind: None,
1013 },
1014 lsp::FileSystemWatcher {
1015 glob_pattern: lsp::GlobPattern::String(
1016 path!("/the-root/target/y/**/*.rs").to_string(),
1017 ),
1018 kind: None,
1019 },
1020 ],
1021 },
1022 )
1023 .ok(),
1024 }],
1025 })
1026 .await
1027 .unwrap();
1028 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
1029 let file_changes = file_changes.clone();
1030 move |params, _| {
1031 let mut file_changes = file_changes.lock();
1032 file_changes.extend(params.changes);
1033 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
1034 }
1035 });
1036
1037 cx.executor().run_until_parked();
1038 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
1039 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
1040
1041 // Now the language server has asked us to watch an ignored directory path,
1042 // so we recursively load it.
1043 project.update(cx, |project, cx| {
1044 let worktree = project.worktrees(cx).next().unwrap();
1045 assert_eq!(
1046 worktree
1047 .read(cx)
1048 .snapshot()
1049 .entries(true, 0)
1050 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1051 .collect::<Vec<_>>(),
1052 &[
1053 (Path::new(""), false),
1054 (Path::new(".gitignore"), false),
1055 (Path::new("src"), false),
1056 (Path::new("src/a.rs"), false),
1057 (Path::new("src/b.rs"), false),
1058 (Path::new("target"), true),
1059 (Path::new("target/x"), true),
1060 (Path::new("target/y"), true),
1061 (Path::new("target/y/out"), true),
1062 (Path::new("target/y/out/y.rs"), true),
1063 (Path::new("target/z"), true),
1064 ]
1065 );
1066 });
1067
1068 // Perform some file system mutations, two of which match the watched patterns,
1069 // and one of which does not.
1070 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
1071 .await
1072 .unwrap();
1073 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
1074 .await
1075 .unwrap();
1076 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
1077 .await
1078 .unwrap();
1079 fs.create_file(
1080 path!("/the-root/target/x/out/x2.rs").as_ref(),
1081 Default::default(),
1082 )
1083 .await
1084 .unwrap();
1085 fs.create_file(
1086 path!("/the-root/target/y/out/y2.rs").as_ref(),
1087 Default::default(),
1088 )
1089 .await
1090 .unwrap();
1091
1092 // The language server receives events for the FS mutations that match its watch patterns.
1093 cx.executor().run_until_parked();
1094 assert_eq!(
1095 &*file_changes.lock(),
1096 &[
1097 lsp::FileEvent {
1098 uri: lsp::Url::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
1099 typ: lsp::FileChangeType::DELETED,
1100 },
1101 lsp::FileEvent {
1102 uri: lsp::Url::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
1103 typ: lsp::FileChangeType::CREATED,
1104 },
1105 lsp::FileEvent {
1106 uri: lsp::Url::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
1107 typ: lsp::FileChangeType::CREATED,
1108 },
1109 ]
1110 );
1111}
1112
1113#[gpui::test]
1114async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1115 init_test(cx);
1116
1117 let fs = FakeFs::new(cx.executor());
1118 fs.insert_tree(
1119 path!("/dir"),
1120 json!({
1121 "a.rs": "let a = 1;",
1122 "b.rs": "let b = 2;"
1123 }),
1124 )
1125 .await;
1126
1127 let project = Project::test(
1128 fs,
1129 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1130 cx,
1131 )
1132 .await;
1133 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1134
1135 let buffer_a = project
1136 .update(cx, |project, cx| {
1137 project.open_local_buffer(path!("/dir/a.rs"), cx)
1138 })
1139 .await
1140 .unwrap();
1141 let buffer_b = project
1142 .update(cx, |project, cx| {
1143 project.open_local_buffer(path!("/dir/b.rs"), cx)
1144 })
1145 .await
1146 .unwrap();
1147
1148 lsp_store.update(cx, |lsp_store, cx| {
1149 lsp_store
1150 .update_diagnostics(
1151 LanguageServerId(0),
1152 lsp::PublishDiagnosticsParams {
1153 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1154 version: None,
1155 diagnostics: vec![lsp::Diagnostic {
1156 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1157 severity: Some(lsp::DiagnosticSeverity::ERROR),
1158 message: "error 1".to_string(),
1159 ..Default::default()
1160 }],
1161 },
1162 &[],
1163 cx,
1164 )
1165 .unwrap();
1166 lsp_store
1167 .update_diagnostics(
1168 LanguageServerId(0),
1169 lsp::PublishDiagnosticsParams {
1170 uri: Url::from_file_path(path!("/dir/b.rs")).unwrap(),
1171 version: None,
1172 diagnostics: vec![lsp::Diagnostic {
1173 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1174 severity: Some(DiagnosticSeverity::WARNING),
1175 message: "error 2".to_string(),
1176 ..Default::default()
1177 }],
1178 },
1179 &[],
1180 cx,
1181 )
1182 .unwrap();
1183 });
1184
1185 buffer_a.update(cx, |buffer, _| {
1186 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1187 assert_eq!(
1188 chunks
1189 .iter()
1190 .map(|(s, d)| (s.as_str(), *d))
1191 .collect::<Vec<_>>(),
1192 &[
1193 ("let ", None),
1194 ("a", Some(DiagnosticSeverity::ERROR)),
1195 (" = 1;", None),
1196 ]
1197 );
1198 });
1199 buffer_b.update(cx, |buffer, _| {
1200 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1201 assert_eq!(
1202 chunks
1203 .iter()
1204 .map(|(s, d)| (s.as_str(), *d))
1205 .collect::<Vec<_>>(),
1206 &[
1207 ("let ", None),
1208 ("b", Some(DiagnosticSeverity::WARNING)),
1209 (" = 2;", None),
1210 ]
1211 );
1212 });
1213}
1214
1215#[gpui::test]
1216async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1217 init_test(cx);
1218
1219 let fs = FakeFs::new(cx.executor());
1220 fs.insert_tree(
1221 path!("/root"),
1222 json!({
1223 "dir": {
1224 ".git": {
1225 "HEAD": "ref: refs/heads/main",
1226 },
1227 ".gitignore": "b.rs",
1228 "a.rs": "let a = 1;",
1229 "b.rs": "let b = 2;",
1230 },
1231 "other.rs": "let b = c;"
1232 }),
1233 )
1234 .await;
1235
1236 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1237 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1238 let (worktree, _) = project
1239 .update(cx, |project, cx| {
1240 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1241 })
1242 .await
1243 .unwrap();
1244 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1245
1246 let (worktree, _) = project
1247 .update(cx, |project, cx| {
1248 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1249 })
1250 .await
1251 .unwrap();
1252 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1253
1254 let server_id = LanguageServerId(0);
1255 lsp_store.update(cx, |lsp_store, cx| {
1256 lsp_store
1257 .update_diagnostics(
1258 server_id,
1259 lsp::PublishDiagnosticsParams {
1260 uri: Url::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1261 version: None,
1262 diagnostics: vec![lsp::Diagnostic {
1263 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1264 severity: Some(lsp::DiagnosticSeverity::ERROR),
1265 message: "unused variable 'b'".to_string(),
1266 ..Default::default()
1267 }],
1268 },
1269 &[],
1270 cx,
1271 )
1272 .unwrap();
1273 lsp_store
1274 .update_diagnostics(
1275 server_id,
1276 lsp::PublishDiagnosticsParams {
1277 uri: Url::from_file_path(path!("/root/other.rs")).unwrap(),
1278 version: None,
1279 diagnostics: vec![lsp::Diagnostic {
1280 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1281 severity: Some(lsp::DiagnosticSeverity::ERROR),
1282 message: "unknown variable 'c'".to_string(),
1283 ..Default::default()
1284 }],
1285 },
1286 &[],
1287 cx,
1288 )
1289 .unwrap();
1290 });
1291
1292 let main_ignored_buffer = project
1293 .update(cx, |project, cx| {
1294 project.open_buffer((main_worktree_id, "b.rs"), cx)
1295 })
1296 .await
1297 .unwrap();
1298 main_ignored_buffer.update(cx, |buffer, _| {
1299 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1300 assert_eq!(
1301 chunks
1302 .iter()
1303 .map(|(s, d)| (s.as_str(), *d))
1304 .collect::<Vec<_>>(),
1305 &[
1306 ("let ", None),
1307 ("b", Some(DiagnosticSeverity::ERROR)),
1308 (" = 2;", None),
1309 ],
1310 "Gigitnored buffers should still get in-buffer diagnostics",
1311 );
1312 });
1313 let other_buffer = project
1314 .update(cx, |project, cx| {
1315 project.open_buffer((other_worktree_id, ""), cx)
1316 })
1317 .await
1318 .unwrap();
1319 other_buffer.update(cx, |buffer, _| {
1320 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1321 assert_eq!(
1322 chunks
1323 .iter()
1324 .map(|(s, d)| (s.as_str(), *d))
1325 .collect::<Vec<_>>(),
1326 &[
1327 ("let b = ", None),
1328 ("c", Some(DiagnosticSeverity::ERROR)),
1329 (";", None),
1330 ],
1331 "Buffers from hidden projects should still get in-buffer diagnostics"
1332 );
1333 });
1334
1335 project.update(cx, |project, cx| {
1336 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1337 assert_eq!(
1338 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1339 vec![(
1340 ProjectPath {
1341 worktree_id: main_worktree_id,
1342 path: Arc::from(Path::new("b.rs")),
1343 },
1344 server_id,
1345 DiagnosticSummary {
1346 error_count: 1,
1347 warning_count: 0,
1348 }
1349 )]
1350 );
1351 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1352 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1353 });
1354}
1355
1356#[gpui::test]
1357async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1358 init_test(cx);
1359
1360 let progress_token = "the-progress-token";
1361
1362 let fs = FakeFs::new(cx.executor());
1363 fs.insert_tree(
1364 path!("/dir"),
1365 json!({
1366 "a.rs": "fn a() { A }",
1367 "b.rs": "const y: i32 = 1",
1368 }),
1369 )
1370 .await;
1371
1372 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1373 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1374
1375 language_registry.add(rust_lang());
1376 let mut fake_servers = language_registry.register_fake_lsp(
1377 "Rust",
1378 FakeLspAdapter {
1379 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1380 disk_based_diagnostics_sources: vec!["disk".into()],
1381 ..Default::default()
1382 },
1383 );
1384
1385 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1386
1387 // Cause worktree to start the fake language server
1388 let _ = project
1389 .update(cx, |project, cx| {
1390 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1391 })
1392 .await
1393 .unwrap();
1394
1395 let mut events = cx.events(&project);
1396
1397 let fake_server = fake_servers.next().await.unwrap();
1398 assert_eq!(
1399 events.next().await.unwrap(),
1400 Event::LanguageServerAdded(
1401 LanguageServerId(0),
1402 fake_server.server.name(),
1403 Some(worktree_id)
1404 ),
1405 );
1406
1407 fake_server
1408 .start_progress(format!("{}/0", progress_token))
1409 .await;
1410 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1411 assert_eq!(
1412 events.next().await.unwrap(),
1413 Event::DiskBasedDiagnosticsStarted {
1414 language_server_id: LanguageServerId(0),
1415 }
1416 );
1417
1418 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1419 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1420 version: None,
1421 diagnostics: vec![lsp::Diagnostic {
1422 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1423 severity: Some(lsp::DiagnosticSeverity::ERROR),
1424 message: "undefined variable 'A'".to_string(),
1425 ..Default::default()
1426 }],
1427 });
1428 assert_eq!(
1429 events.next().await.unwrap(),
1430 Event::DiagnosticsUpdated {
1431 language_server_id: LanguageServerId(0),
1432 path: (worktree_id, Path::new("a.rs")).into()
1433 }
1434 );
1435
1436 fake_server.end_progress(format!("{}/0", progress_token));
1437 assert_eq!(
1438 events.next().await.unwrap(),
1439 Event::DiskBasedDiagnosticsFinished {
1440 language_server_id: LanguageServerId(0)
1441 }
1442 );
1443
1444 let buffer = project
1445 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1446 .await
1447 .unwrap();
1448
1449 buffer.update(cx, |buffer, _| {
1450 let snapshot = buffer.snapshot();
1451 let diagnostics = snapshot
1452 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1453 .collect::<Vec<_>>();
1454 assert_eq!(
1455 diagnostics,
1456 &[DiagnosticEntry {
1457 range: Point::new(0, 9)..Point::new(0, 10),
1458 diagnostic: Diagnostic {
1459 severity: lsp::DiagnosticSeverity::ERROR,
1460 message: "undefined variable 'A'".to_string(),
1461 group_id: 0,
1462 is_primary: true,
1463 ..Default::default()
1464 }
1465 }]
1466 )
1467 });
1468
1469 // Ensure publishing empty diagnostics twice only results in one update event.
1470 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1471 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1472 version: None,
1473 diagnostics: Default::default(),
1474 });
1475 assert_eq!(
1476 events.next().await.unwrap(),
1477 Event::DiagnosticsUpdated {
1478 language_server_id: LanguageServerId(0),
1479 path: (worktree_id, Path::new("a.rs")).into()
1480 }
1481 );
1482
1483 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1484 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1485 version: None,
1486 diagnostics: Default::default(),
1487 });
1488 cx.executor().run_until_parked();
1489 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1490}
1491
1492#[gpui::test]
1493async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1494 init_test(cx);
1495
1496 let progress_token = "the-progress-token";
1497
1498 let fs = FakeFs::new(cx.executor());
1499 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1500
1501 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1502
1503 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1504 language_registry.add(rust_lang());
1505 let mut fake_servers = language_registry.register_fake_lsp(
1506 "Rust",
1507 FakeLspAdapter {
1508 name: "the-language-server",
1509 disk_based_diagnostics_sources: vec!["disk".into()],
1510 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1511 ..Default::default()
1512 },
1513 );
1514
1515 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1516
1517 let (buffer, _handle) = project
1518 .update(cx, |project, cx| {
1519 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1520 })
1521 .await
1522 .unwrap();
1523 // Simulate diagnostics starting to update.
1524 let fake_server = fake_servers.next().await.unwrap();
1525 fake_server.start_progress(progress_token).await;
1526
1527 // Restart the server before the diagnostics finish updating.
1528 project.update(cx, |project, cx| {
1529 project.restart_language_servers_for_buffers(vec![buffer], cx);
1530 });
1531 let mut events = cx.events(&project);
1532
1533 // Simulate the newly started server sending more diagnostics.
1534 let fake_server = fake_servers.next().await.unwrap();
1535 assert_eq!(
1536 events.next().await.unwrap(),
1537 Event::LanguageServerAdded(
1538 LanguageServerId(1),
1539 fake_server.server.name(),
1540 Some(worktree_id)
1541 )
1542 );
1543 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1544 fake_server.start_progress(progress_token).await;
1545 assert_eq!(
1546 events.next().await.unwrap(),
1547 Event::DiskBasedDiagnosticsStarted {
1548 language_server_id: LanguageServerId(1)
1549 }
1550 );
1551 project.update(cx, |project, cx| {
1552 assert_eq!(
1553 project
1554 .language_servers_running_disk_based_diagnostics(cx)
1555 .collect::<Vec<_>>(),
1556 [LanguageServerId(1)]
1557 );
1558 });
1559
1560 // All diagnostics are considered done, despite the old server's diagnostic
1561 // task never completing.
1562 fake_server.end_progress(progress_token);
1563 assert_eq!(
1564 events.next().await.unwrap(),
1565 Event::DiskBasedDiagnosticsFinished {
1566 language_server_id: LanguageServerId(1)
1567 }
1568 );
1569 project.update(cx, |project, cx| {
1570 assert_eq!(
1571 project
1572 .language_servers_running_disk_based_diagnostics(cx)
1573 .collect::<Vec<_>>(),
1574 [] as [language::LanguageServerId; 0]
1575 );
1576 });
1577}
1578
1579#[gpui::test]
1580async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1581 init_test(cx);
1582
1583 let fs = FakeFs::new(cx.executor());
1584 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
1585
1586 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1587
1588 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1589 language_registry.add(rust_lang());
1590 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1591
1592 let (buffer, _) = project
1593 .update(cx, |project, cx| {
1594 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1595 })
1596 .await
1597 .unwrap();
1598
1599 // Publish diagnostics
1600 let fake_server = fake_servers.next().await.unwrap();
1601 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1602 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1603 version: None,
1604 diagnostics: vec![lsp::Diagnostic {
1605 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1606 severity: Some(lsp::DiagnosticSeverity::ERROR),
1607 message: "the message".to_string(),
1608 ..Default::default()
1609 }],
1610 });
1611
1612 cx.executor().run_until_parked();
1613 buffer.update(cx, |buffer, _| {
1614 assert_eq!(
1615 buffer
1616 .snapshot()
1617 .diagnostics_in_range::<_, usize>(0..1, false)
1618 .map(|entry| entry.diagnostic.message.clone())
1619 .collect::<Vec<_>>(),
1620 ["the message".to_string()]
1621 );
1622 });
1623 project.update(cx, |project, cx| {
1624 assert_eq!(
1625 project.diagnostic_summary(false, cx),
1626 DiagnosticSummary {
1627 error_count: 1,
1628 warning_count: 0,
1629 }
1630 );
1631 });
1632
1633 project.update(cx, |project, cx| {
1634 project.restart_language_servers_for_buffers(vec![buffer.clone()], cx);
1635 });
1636
1637 // The diagnostics are cleared.
1638 cx.executor().run_until_parked();
1639 buffer.update(cx, |buffer, _| {
1640 assert_eq!(
1641 buffer
1642 .snapshot()
1643 .diagnostics_in_range::<_, usize>(0..1, false)
1644 .map(|entry| entry.diagnostic.message.clone())
1645 .collect::<Vec<_>>(),
1646 Vec::<String>::new(),
1647 );
1648 });
1649 project.update(cx, |project, cx| {
1650 assert_eq!(
1651 project.diagnostic_summary(false, cx),
1652 DiagnosticSummary {
1653 error_count: 0,
1654 warning_count: 0,
1655 }
1656 );
1657 });
1658}
1659
1660#[gpui::test]
1661async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1662 init_test(cx);
1663
1664 let fs = FakeFs::new(cx.executor());
1665 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1666
1667 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1668 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1669
1670 language_registry.add(rust_lang());
1671 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1672
1673 let (buffer, _handle) = project
1674 .update(cx, |project, cx| {
1675 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1676 })
1677 .await
1678 .unwrap();
1679
1680 // Before restarting the server, report diagnostics with an unknown buffer version.
1681 let fake_server = fake_servers.next().await.unwrap();
1682 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1683 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1684 version: Some(10000),
1685 diagnostics: Vec::new(),
1686 });
1687 cx.executor().run_until_parked();
1688 project.update(cx, |project, cx| {
1689 project.restart_language_servers_for_buffers(vec![buffer.clone()], cx);
1690 });
1691
1692 let mut fake_server = fake_servers.next().await.unwrap();
1693 let notification = fake_server
1694 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1695 .await
1696 .text_document;
1697 assert_eq!(notification.version, 0);
1698}
1699
1700#[gpui::test]
1701async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1702 init_test(cx);
1703
1704 let progress_token = "the-progress-token";
1705
1706 let fs = FakeFs::new(cx.executor());
1707 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1708
1709 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1710
1711 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1712 language_registry.add(rust_lang());
1713 let mut fake_servers = language_registry.register_fake_lsp(
1714 "Rust",
1715 FakeLspAdapter {
1716 name: "the-language-server",
1717 disk_based_diagnostics_sources: vec!["disk".into()],
1718 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1719 ..Default::default()
1720 },
1721 );
1722
1723 let (buffer, _handle) = project
1724 .update(cx, |project, cx| {
1725 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1726 })
1727 .await
1728 .unwrap();
1729
1730 // Simulate diagnostics starting to update.
1731 let mut fake_server = fake_servers.next().await.unwrap();
1732 fake_server
1733 .start_progress_with(
1734 "another-token",
1735 lsp::WorkDoneProgressBegin {
1736 cancellable: Some(false),
1737 ..Default::default()
1738 },
1739 )
1740 .await;
1741 fake_server
1742 .start_progress_with(
1743 progress_token,
1744 lsp::WorkDoneProgressBegin {
1745 cancellable: Some(true),
1746 ..Default::default()
1747 },
1748 )
1749 .await;
1750 cx.executor().run_until_parked();
1751
1752 project.update(cx, |project, cx| {
1753 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1754 });
1755
1756 let cancel_notification = fake_server
1757 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1758 .await;
1759 assert_eq!(
1760 cancel_notification.token,
1761 NumberOrString::String(progress_token.into())
1762 );
1763}
1764
1765#[gpui::test]
1766async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1767 init_test(cx);
1768
1769 let fs = FakeFs::new(cx.executor());
1770 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
1771 .await;
1772
1773 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1774 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1775
1776 let mut fake_rust_servers = language_registry.register_fake_lsp(
1777 "Rust",
1778 FakeLspAdapter {
1779 name: "rust-lsp",
1780 ..Default::default()
1781 },
1782 );
1783 let mut fake_js_servers = language_registry.register_fake_lsp(
1784 "JavaScript",
1785 FakeLspAdapter {
1786 name: "js-lsp",
1787 ..Default::default()
1788 },
1789 );
1790 language_registry.add(rust_lang());
1791 language_registry.add(js_lang());
1792
1793 let _rs_buffer = project
1794 .update(cx, |project, cx| {
1795 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1796 })
1797 .await
1798 .unwrap();
1799 let _js_buffer = project
1800 .update(cx, |project, cx| {
1801 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
1802 })
1803 .await
1804 .unwrap();
1805
1806 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1807 assert_eq!(
1808 fake_rust_server_1
1809 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1810 .await
1811 .text_document
1812 .uri
1813 .as_str(),
1814 uri!("file:///dir/a.rs")
1815 );
1816
1817 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1818 assert_eq!(
1819 fake_js_server
1820 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1821 .await
1822 .text_document
1823 .uri
1824 .as_str(),
1825 uri!("file:///dir/b.js")
1826 );
1827
1828 // Disable Rust language server, ensuring only that server gets stopped.
1829 cx.update(|cx| {
1830 SettingsStore::update_global(cx, |settings, cx| {
1831 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1832 settings.languages.insert(
1833 "Rust".into(),
1834 LanguageSettingsContent {
1835 enable_language_server: Some(false),
1836 ..Default::default()
1837 },
1838 );
1839 });
1840 })
1841 });
1842 fake_rust_server_1
1843 .receive_notification::<lsp::notification::Exit>()
1844 .await;
1845
1846 // Enable Rust and disable JavaScript language servers, ensuring that the
1847 // former gets started again and that the latter stops.
1848 cx.update(|cx| {
1849 SettingsStore::update_global(cx, |settings, cx| {
1850 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1851 settings.languages.insert(
1852 LanguageName::new("Rust"),
1853 LanguageSettingsContent {
1854 enable_language_server: Some(true),
1855 ..Default::default()
1856 },
1857 );
1858 settings.languages.insert(
1859 LanguageName::new("JavaScript"),
1860 LanguageSettingsContent {
1861 enable_language_server: Some(false),
1862 ..Default::default()
1863 },
1864 );
1865 });
1866 })
1867 });
1868 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1869 assert_eq!(
1870 fake_rust_server_2
1871 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1872 .await
1873 .text_document
1874 .uri
1875 .as_str(),
1876 uri!("file:///dir/a.rs")
1877 );
1878 fake_js_server
1879 .receive_notification::<lsp::notification::Exit>()
1880 .await;
1881}
1882
1883#[gpui::test(iterations = 3)]
1884async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1885 init_test(cx);
1886
1887 let text = "
1888 fn a() { A }
1889 fn b() { BB }
1890 fn c() { CCC }
1891 "
1892 .unindent();
1893
1894 let fs = FakeFs::new(cx.executor());
1895 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
1896
1897 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1898 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1899
1900 language_registry.add(rust_lang());
1901 let mut fake_servers = language_registry.register_fake_lsp(
1902 "Rust",
1903 FakeLspAdapter {
1904 disk_based_diagnostics_sources: vec!["disk".into()],
1905 ..Default::default()
1906 },
1907 );
1908
1909 let buffer = project
1910 .update(cx, |project, cx| {
1911 project.open_local_buffer(path!("/dir/a.rs"), cx)
1912 })
1913 .await
1914 .unwrap();
1915
1916 let _handle = project.update(cx, |project, cx| {
1917 project.register_buffer_with_language_servers(&buffer, cx)
1918 });
1919
1920 let mut fake_server = fake_servers.next().await.unwrap();
1921 let open_notification = fake_server
1922 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1923 .await;
1924
1925 // Edit the buffer, moving the content down
1926 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1927 let change_notification_1 = fake_server
1928 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1929 .await;
1930 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1931
1932 // Report some diagnostics for the initial version of the buffer
1933 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1934 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1935 version: Some(open_notification.text_document.version),
1936 diagnostics: vec![
1937 lsp::Diagnostic {
1938 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1939 severity: Some(DiagnosticSeverity::ERROR),
1940 message: "undefined variable 'A'".to_string(),
1941 source: Some("disk".to_string()),
1942 ..Default::default()
1943 },
1944 lsp::Diagnostic {
1945 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1946 severity: Some(DiagnosticSeverity::ERROR),
1947 message: "undefined variable 'BB'".to_string(),
1948 source: Some("disk".to_string()),
1949 ..Default::default()
1950 },
1951 lsp::Diagnostic {
1952 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1953 severity: Some(DiagnosticSeverity::ERROR),
1954 source: Some("disk".to_string()),
1955 message: "undefined variable 'CCC'".to_string(),
1956 ..Default::default()
1957 },
1958 ],
1959 });
1960
1961 // The diagnostics have moved down since they were created.
1962 cx.executor().run_until_parked();
1963 buffer.update(cx, |buffer, _| {
1964 assert_eq!(
1965 buffer
1966 .snapshot()
1967 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1968 .collect::<Vec<_>>(),
1969 &[
1970 DiagnosticEntry {
1971 range: Point::new(3, 9)..Point::new(3, 11),
1972 diagnostic: Diagnostic {
1973 source: Some("disk".into()),
1974 severity: DiagnosticSeverity::ERROR,
1975 message: "undefined variable 'BB'".to_string(),
1976 is_disk_based: true,
1977 group_id: 1,
1978 is_primary: true,
1979 ..Default::default()
1980 },
1981 },
1982 DiagnosticEntry {
1983 range: Point::new(4, 9)..Point::new(4, 12),
1984 diagnostic: Diagnostic {
1985 source: Some("disk".into()),
1986 severity: DiagnosticSeverity::ERROR,
1987 message: "undefined variable 'CCC'".to_string(),
1988 is_disk_based: true,
1989 group_id: 2,
1990 is_primary: true,
1991 ..Default::default()
1992 }
1993 }
1994 ]
1995 );
1996 assert_eq!(
1997 chunks_with_diagnostics(buffer, 0..buffer.len()),
1998 [
1999 ("\n\nfn a() { ".to_string(), None),
2000 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2001 (" }\nfn b() { ".to_string(), None),
2002 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
2003 (" }\nfn c() { ".to_string(), None),
2004 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
2005 (" }\n".to_string(), None),
2006 ]
2007 );
2008 assert_eq!(
2009 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
2010 [
2011 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
2012 (" }\nfn c() { ".to_string(), None),
2013 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
2014 ]
2015 );
2016 });
2017
2018 // Ensure overlapping diagnostics are highlighted correctly.
2019 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2020 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2021 version: Some(open_notification.text_document.version),
2022 diagnostics: vec![
2023 lsp::Diagnostic {
2024 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2025 severity: Some(DiagnosticSeverity::ERROR),
2026 message: "undefined variable 'A'".to_string(),
2027 source: Some("disk".to_string()),
2028 ..Default::default()
2029 },
2030 lsp::Diagnostic {
2031 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
2032 severity: Some(DiagnosticSeverity::WARNING),
2033 message: "unreachable statement".to_string(),
2034 source: Some("disk".to_string()),
2035 ..Default::default()
2036 },
2037 ],
2038 });
2039
2040 cx.executor().run_until_parked();
2041 buffer.update(cx, |buffer, _| {
2042 assert_eq!(
2043 buffer
2044 .snapshot()
2045 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
2046 .collect::<Vec<_>>(),
2047 &[
2048 DiagnosticEntry {
2049 range: Point::new(2, 9)..Point::new(2, 12),
2050 diagnostic: Diagnostic {
2051 source: Some("disk".into()),
2052 severity: DiagnosticSeverity::WARNING,
2053 message: "unreachable statement".to_string(),
2054 is_disk_based: true,
2055 group_id: 4,
2056 is_primary: true,
2057 ..Default::default()
2058 }
2059 },
2060 DiagnosticEntry {
2061 range: Point::new(2, 9)..Point::new(2, 10),
2062 diagnostic: Diagnostic {
2063 source: Some("disk".into()),
2064 severity: DiagnosticSeverity::ERROR,
2065 message: "undefined variable 'A'".to_string(),
2066 is_disk_based: true,
2067 group_id: 3,
2068 is_primary: true,
2069 ..Default::default()
2070 },
2071 }
2072 ]
2073 );
2074 assert_eq!(
2075 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
2076 [
2077 ("fn a() { ".to_string(), None),
2078 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2079 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2080 ("\n".to_string(), None),
2081 ]
2082 );
2083 assert_eq!(
2084 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
2085 [
2086 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2087 ("\n".to_string(), None),
2088 ]
2089 );
2090 });
2091
2092 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
2093 // changes since the last save.
2094 buffer.update(cx, |buffer, cx| {
2095 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
2096 buffer.edit(
2097 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
2098 None,
2099 cx,
2100 );
2101 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
2102 });
2103 let change_notification_2 = fake_server
2104 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2105 .await;
2106 assert!(
2107 change_notification_2.text_document.version > change_notification_1.text_document.version
2108 );
2109
2110 // Handle out-of-order diagnostics
2111 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2112 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2113 version: Some(change_notification_2.text_document.version),
2114 diagnostics: vec![
2115 lsp::Diagnostic {
2116 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2117 severity: Some(DiagnosticSeverity::ERROR),
2118 message: "undefined variable 'BB'".to_string(),
2119 source: Some("disk".to_string()),
2120 ..Default::default()
2121 },
2122 lsp::Diagnostic {
2123 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2124 severity: Some(DiagnosticSeverity::WARNING),
2125 message: "undefined variable 'A'".to_string(),
2126 source: Some("disk".to_string()),
2127 ..Default::default()
2128 },
2129 ],
2130 });
2131
2132 cx.executor().run_until_parked();
2133 buffer.update(cx, |buffer, _| {
2134 assert_eq!(
2135 buffer
2136 .snapshot()
2137 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2138 .collect::<Vec<_>>(),
2139 &[
2140 DiagnosticEntry {
2141 range: Point::new(2, 21)..Point::new(2, 22),
2142 diagnostic: Diagnostic {
2143 source: Some("disk".into()),
2144 severity: DiagnosticSeverity::WARNING,
2145 message: "undefined variable 'A'".to_string(),
2146 is_disk_based: true,
2147 group_id: 6,
2148 is_primary: true,
2149 ..Default::default()
2150 }
2151 },
2152 DiagnosticEntry {
2153 range: Point::new(3, 9)..Point::new(3, 14),
2154 diagnostic: Diagnostic {
2155 source: Some("disk".into()),
2156 severity: DiagnosticSeverity::ERROR,
2157 message: "undefined variable 'BB'".to_string(),
2158 is_disk_based: true,
2159 group_id: 5,
2160 is_primary: true,
2161 ..Default::default()
2162 },
2163 }
2164 ]
2165 );
2166 });
2167}
2168
2169#[gpui::test]
2170async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2171 init_test(cx);
2172
2173 let text = concat!(
2174 "let one = ;\n", //
2175 "let two = \n",
2176 "let three = 3;\n",
2177 );
2178
2179 let fs = FakeFs::new(cx.executor());
2180 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2181
2182 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2183 let buffer = project
2184 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2185 .await
2186 .unwrap();
2187
2188 project.update(cx, |project, cx| {
2189 project.lsp_store.update(cx, |lsp_store, cx| {
2190 lsp_store
2191 .update_diagnostic_entries(
2192 LanguageServerId(0),
2193 PathBuf::from("/dir/a.rs"),
2194 None,
2195 vec![
2196 DiagnosticEntry {
2197 range: Unclipped(PointUtf16::new(0, 10))
2198 ..Unclipped(PointUtf16::new(0, 10)),
2199 diagnostic: Diagnostic {
2200 severity: DiagnosticSeverity::ERROR,
2201 message: "syntax error 1".to_string(),
2202 ..Default::default()
2203 },
2204 },
2205 DiagnosticEntry {
2206 range: Unclipped(PointUtf16::new(1, 10))
2207 ..Unclipped(PointUtf16::new(1, 10)),
2208 diagnostic: Diagnostic {
2209 severity: DiagnosticSeverity::ERROR,
2210 message: "syntax error 2".to_string(),
2211 ..Default::default()
2212 },
2213 },
2214 ],
2215 cx,
2216 )
2217 .unwrap();
2218 })
2219 });
2220
2221 // An empty range is extended forward to include the following character.
2222 // At the end of a line, an empty range is extended backward to include
2223 // the preceding character.
2224 buffer.update(cx, |buffer, _| {
2225 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2226 assert_eq!(
2227 chunks
2228 .iter()
2229 .map(|(s, d)| (s.as_str(), *d))
2230 .collect::<Vec<_>>(),
2231 &[
2232 ("let one = ", None),
2233 (";", Some(DiagnosticSeverity::ERROR)),
2234 ("\nlet two =", None),
2235 (" ", Some(DiagnosticSeverity::ERROR)),
2236 ("\nlet three = 3;\n", None)
2237 ]
2238 );
2239 });
2240}
2241
2242#[gpui::test]
2243async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2244 init_test(cx);
2245
2246 let fs = FakeFs::new(cx.executor());
2247 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2248 .await;
2249
2250 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2251 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2252
2253 lsp_store.update(cx, |lsp_store, cx| {
2254 lsp_store
2255 .update_diagnostic_entries(
2256 LanguageServerId(0),
2257 Path::new("/dir/a.rs").to_owned(),
2258 None,
2259 vec![DiagnosticEntry {
2260 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2261 diagnostic: Diagnostic {
2262 severity: DiagnosticSeverity::ERROR,
2263 is_primary: true,
2264 message: "syntax error a1".to_string(),
2265 ..Default::default()
2266 },
2267 }],
2268 cx,
2269 )
2270 .unwrap();
2271 lsp_store
2272 .update_diagnostic_entries(
2273 LanguageServerId(1),
2274 Path::new("/dir/a.rs").to_owned(),
2275 None,
2276 vec![DiagnosticEntry {
2277 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2278 diagnostic: Diagnostic {
2279 severity: DiagnosticSeverity::ERROR,
2280 is_primary: true,
2281 message: "syntax error b1".to_string(),
2282 ..Default::default()
2283 },
2284 }],
2285 cx,
2286 )
2287 .unwrap();
2288
2289 assert_eq!(
2290 lsp_store.diagnostic_summary(false, cx),
2291 DiagnosticSummary {
2292 error_count: 2,
2293 warning_count: 0,
2294 }
2295 );
2296 });
2297}
2298
2299#[gpui::test]
2300async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2301 init_test(cx);
2302
2303 let text = "
2304 fn a() {
2305 f1();
2306 }
2307 fn b() {
2308 f2();
2309 }
2310 fn c() {
2311 f3();
2312 }
2313 "
2314 .unindent();
2315
2316 let fs = FakeFs::new(cx.executor());
2317 fs.insert_tree(
2318 path!("/dir"),
2319 json!({
2320 "a.rs": text.clone(),
2321 }),
2322 )
2323 .await;
2324
2325 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2326 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2327
2328 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2329 language_registry.add(rust_lang());
2330 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2331
2332 let (buffer, _handle) = project
2333 .update(cx, |project, cx| {
2334 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2335 })
2336 .await
2337 .unwrap();
2338
2339 let mut fake_server = fake_servers.next().await.unwrap();
2340 let lsp_document_version = fake_server
2341 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2342 .await
2343 .text_document
2344 .version;
2345
2346 // Simulate editing the buffer after the language server computes some edits.
2347 buffer.update(cx, |buffer, cx| {
2348 buffer.edit(
2349 [(
2350 Point::new(0, 0)..Point::new(0, 0),
2351 "// above first function\n",
2352 )],
2353 None,
2354 cx,
2355 );
2356 buffer.edit(
2357 [(
2358 Point::new(2, 0)..Point::new(2, 0),
2359 " // inside first function\n",
2360 )],
2361 None,
2362 cx,
2363 );
2364 buffer.edit(
2365 [(
2366 Point::new(6, 4)..Point::new(6, 4),
2367 "// inside second function ",
2368 )],
2369 None,
2370 cx,
2371 );
2372
2373 assert_eq!(
2374 buffer.text(),
2375 "
2376 // above first function
2377 fn a() {
2378 // inside first function
2379 f1();
2380 }
2381 fn b() {
2382 // inside second function f2();
2383 }
2384 fn c() {
2385 f3();
2386 }
2387 "
2388 .unindent()
2389 );
2390 });
2391
2392 let edits = lsp_store
2393 .update(cx, |lsp_store, cx| {
2394 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2395 &buffer,
2396 vec![
2397 // replace body of first function
2398 lsp::TextEdit {
2399 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2400 new_text: "
2401 fn a() {
2402 f10();
2403 }
2404 "
2405 .unindent(),
2406 },
2407 // edit inside second function
2408 lsp::TextEdit {
2409 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2410 new_text: "00".into(),
2411 },
2412 // edit inside third function via two distinct edits
2413 lsp::TextEdit {
2414 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2415 new_text: "4000".into(),
2416 },
2417 lsp::TextEdit {
2418 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2419 new_text: "".into(),
2420 },
2421 ],
2422 LanguageServerId(0),
2423 Some(lsp_document_version),
2424 cx,
2425 )
2426 })
2427 .await
2428 .unwrap();
2429
2430 buffer.update(cx, |buffer, cx| {
2431 for (range, new_text) in edits {
2432 buffer.edit([(range, new_text)], None, cx);
2433 }
2434 assert_eq!(
2435 buffer.text(),
2436 "
2437 // above first function
2438 fn a() {
2439 // inside first function
2440 f10();
2441 }
2442 fn b() {
2443 // inside second function f200();
2444 }
2445 fn c() {
2446 f4000();
2447 }
2448 "
2449 .unindent()
2450 );
2451 });
2452}
2453
2454#[gpui::test]
2455async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2456 init_test(cx);
2457
2458 let text = "
2459 use a::b;
2460 use a::c;
2461
2462 fn f() {
2463 b();
2464 c();
2465 }
2466 "
2467 .unindent();
2468
2469 let fs = FakeFs::new(cx.executor());
2470 fs.insert_tree(
2471 path!("/dir"),
2472 json!({
2473 "a.rs": text.clone(),
2474 }),
2475 )
2476 .await;
2477
2478 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2479 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2480 let buffer = project
2481 .update(cx, |project, cx| {
2482 project.open_local_buffer(path!("/dir/a.rs"), cx)
2483 })
2484 .await
2485 .unwrap();
2486
2487 // Simulate the language server sending us a small edit in the form of a very large diff.
2488 // Rust-analyzer does this when performing a merge-imports code action.
2489 let edits = lsp_store
2490 .update(cx, |lsp_store, cx| {
2491 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2492 &buffer,
2493 [
2494 // Replace the first use statement without editing the semicolon.
2495 lsp::TextEdit {
2496 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2497 new_text: "a::{b, c}".into(),
2498 },
2499 // Reinsert the remainder of the file between the semicolon and the final
2500 // newline of the file.
2501 lsp::TextEdit {
2502 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2503 new_text: "\n\n".into(),
2504 },
2505 lsp::TextEdit {
2506 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2507 new_text: "
2508 fn f() {
2509 b();
2510 c();
2511 }"
2512 .unindent(),
2513 },
2514 // Delete everything after the first newline of the file.
2515 lsp::TextEdit {
2516 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2517 new_text: "".into(),
2518 },
2519 ],
2520 LanguageServerId(0),
2521 None,
2522 cx,
2523 )
2524 })
2525 .await
2526 .unwrap();
2527
2528 buffer.update(cx, |buffer, cx| {
2529 let edits = edits
2530 .into_iter()
2531 .map(|(range, text)| {
2532 (
2533 range.start.to_point(buffer)..range.end.to_point(buffer),
2534 text,
2535 )
2536 })
2537 .collect::<Vec<_>>();
2538
2539 assert_eq!(
2540 edits,
2541 [
2542 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2543 (Point::new(1, 0)..Point::new(2, 0), "".into())
2544 ]
2545 );
2546
2547 for (range, new_text) in edits {
2548 buffer.edit([(range, new_text)], None, cx);
2549 }
2550 assert_eq!(
2551 buffer.text(),
2552 "
2553 use a::{b, c};
2554
2555 fn f() {
2556 b();
2557 c();
2558 }
2559 "
2560 .unindent()
2561 );
2562 });
2563}
2564
2565#[gpui::test]
2566async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2567 init_test(cx);
2568
2569 let text = "
2570 use a::b;
2571 use a::c;
2572
2573 fn f() {
2574 b();
2575 c();
2576 }
2577 "
2578 .unindent();
2579
2580 let fs = FakeFs::new(cx.executor());
2581 fs.insert_tree(
2582 path!("/dir"),
2583 json!({
2584 "a.rs": text.clone(),
2585 }),
2586 )
2587 .await;
2588
2589 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2590 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2591 let buffer = project
2592 .update(cx, |project, cx| {
2593 project.open_local_buffer(path!("/dir/a.rs"), cx)
2594 })
2595 .await
2596 .unwrap();
2597
2598 // Simulate the language server sending us edits in a non-ordered fashion,
2599 // with ranges sometimes being inverted or pointing to invalid locations.
2600 let edits = lsp_store
2601 .update(cx, |lsp_store, cx| {
2602 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2603 &buffer,
2604 [
2605 lsp::TextEdit {
2606 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2607 new_text: "\n\n".into(),
2608 },
2609 lsp::TextEdit {
2610 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2611 new_text: "a::{b, c}".into(),
2612 },
2613 lsp::TextEdit {
2614 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2615 new_text: "".into(),
2616 },
2617 lsp::TextEdit {
2618 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2619 new_text: "
2620 fn f() {
2621 b();
2622 c();
2623 }"
2624 .unindent(),
2625 },
2626 ],
2627 LanguageServerId(0),
2628 None,
2629 cx,
2630 )
2631 })
2632 .await
2633 .unwrap();
2634
2635 buffer.update(cx, |buffer, cx| {
2636 let edits = edits
2637 .into_iter()
2638 .map(|(range, text)| {
2639 (
2640 range.start.to_point(buffer)..range.end.to_point(buffer),
2641 text,
2642 )
2643 })
2644 .collect::<Vec<_>>();
2645
2646 assert_eq!(
2647 edits,
2648 [
2649 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2650 (Point::new(1, 0)..Point::new(2, 0), "".into())
2651 ]
2652 );
2653
2654 for (range, new_text) in edits {
2655 buffer.edit([(range, new_text)], None, cx);
2656 }
2657 assert_eq!(
2658 buffer.text(),
2659 "
2660 use a::{b, c};
2661
2662 fn f() {
2663 b();
2664 c();
2665 }
2666 "
2667 .unindent()
2668 );
2669 });
2670}
2671
2672fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2673 buffer: &Buffer,
2674 range: Range<T>,
2675) -> Vec<(String, Option<DiagnosticSeverity>)> {
2676 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2677 for chunk in buffer.snapshot().chunks(range, true) {
2678 if chunks.last().map_or(false, |prev_chunk| {
2679 prev_chunk.1 == chunk.diagnostic_severity
2680 }) {
2681 chunks.last_mut().unwrap().0.push_str(chunk.text);
2682 } else {
2683 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2684 }
2685 }
2686 chunks
2687}
2688
2689#[gpui::test(iterations = 10)]
2690async fn test_definition(cx: &mut gpui::TestAppContext) {
2691 init_test(cx);
2692
2693 let fs = FakeFs::new(cx.executor());
2694 fs.insert_tree(
2695 path!("/dir"),
2696 json!({
2697 "a.rs": "const fn a() { A }",
2698 "b.rs": "const y: i32 = crate::a()",
2699 }),
2700 )
2701 .await;
2702
2703 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
2704
2705 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2706 language_registry.add(rust_lang());
2707 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2708
2709 let (buffer, _handle) = project
2710 .update(cx, |project, cx| {
2711 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2712 })
2713 .await
2714 .unwrap();
2715
2716 let fake_server = fake_servers.next().await.unwrap();
2717 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2718 let params = params.text_document_position_params;
2719 assert_eq!(
2720 params.text_document.uri.to_file_path().unwrap(),
2721 Path::new(path!("/dir/b.rs")),
2722 );
2723 assert_eq!(params.position, lsp::Position::new(0, 22));
2724
2725 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2726 lsp::Location::new(
2727 lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2728 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2729 ),
2730 )))
2731 });
2732 let mut definitions = project
2733 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2734 .await
2735 .unwrap();
2736
2737 // Assert no new language server started
2738 cx.executor().run_until_parked();
2739 assert!(fake_servers.try_next().is_err());
2740
2741 assert_eq!(definitions.len(), 1);
2742 let definition = definitions.pop().unwrap();
2743 cx.update(|cx| {
2744 let target_buffer = definition.target.buffer.read(cx);
2745 assert_eq!(
2746 target_buffer
2747 .file()
2748 .unwrap()
2749 .as_local()
2750 .unwrap()
2751 .abs_path(cx),
2752 Path::new(path!("/dir/a.rs")),
2753 );
2754 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2755 assert_eq!(
2756 list_worktrees(&project, cx),
2757 [
2758 (path!("/dir/a.rs").as_ref(), false),
2759 (path!("/dir/b.rs").as_ref(), true)
2760 ],
2761 );
2762
2763 drop(definition);
2764 });
2765 cx.update(|cx| {
2766 assert_eq!(
2767 list_worktrees(&project, cx),
2768 [(path!("/dir/b.rs").as_ref(), true)]
2769 );
2770 });
2771
2772 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
2773 project
2774 .read(cx)
2775 .worktrees(cx)
2776 .map(|worktree| {
2777 let worktree = worktree.read(cx);
2778 (
2779 worktree.as_local().unwrap().abs_path().as_ref(),
2780 worktree.is_visible(),
2781 )
2782 })
2783 .collect::<Vec<_>>()
2784 }
2785}
2786
2787#[gpui::test]
2788async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
2789 init_test(cx);
2790
2791 let fs = FakeFs::new(cx.executor());
2792 fs.insert_tree(
2793 path!("/dir"),
2794 json!({
2795 "a.ts": "",
2796 }),
2797 )
2798 .await;
2799
2800 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2801
2802 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2803 language_registry.add(typescript_lang());
2804 let mut fake_language_servers = language_registry.register_fake_lsp(
2805 "TypeScript",
2806 FakeLspAdapter {
2807 capabilities: lsp::ServerCapabilities {
2808 completion_provider: Some(lsp::CompletionOptions {
2809 trigger_characters: Some(vec![".".to_string()]),
2810 ..Default::default()
2811 }),
2812 ..Default::default()
2813 },
2814 ..Default::default()
2815 },
2816 );
2817
2818 let (buffer, _handle) = project
2819 .update(cx, |p, cx| {
2820 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
2821 })
2822 .await
2823 .unwrap();
2824
2825 let fake_server = fake_language_servers.next().await.unwrap();
2826
2827 // When text_edit exists, it takes precedence over insert_text and label
2828 let text = "let a = obj.fqn";
2829 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2830 let completions = project.update(cx, |project, cx| {
2831 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2832 });
2833
2834 fake_server
2835 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
2836 Ok(Some(lsp::CompletionResponse::Array(vec![
2837 lsp::CompletionItem {
2838 label: "labelText".into(),
2839 insert_text: Some("insertText".into()),
2840 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
2841 range: lsp::Range::new(
2842 lsp::Position::new(0, text.len() as u32 - 3),
2843 lsp::Position::new(0, text.len() as u32),
2844 ),
2845 new_text: "textEditText".into(),
2846 })),
2847 ..Default::default()
2848 },
2849 ])))
2850 })
2851 .next()
2852 .await;
2853
2854 let completions = completions.await.unwrap().unwrap();
2855 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2856
2857 assert_eq!(completions.len(), 1);
2858 assert_eq!(completions[0].new_text, "textEditText");
2859 assert_eq!(
2860 completions[0].old_range.to_offset(&snapshot),
2861 text.len() - 3..text.len()
2862 );
2863}
2864
2865#[gpui::test]
2866async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
2867 init_test(cx);
2868
2869 let fs = FakeFs::new(cx.executor());
2870 fs.insert_tree(
2871 path!("/dir"),
2872 json!({
2873 "a.ts": "",
2874 }),
2875 )
2876 .await;
2877
2878 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2879
2880 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2881 language_registry.add(typescript_lang());
2882 let mut fake_language_servers = language_registry.register_fake_lsp(
2883 "TypeScript",
2884 FakeLspAdapter {
2885 capabilities: lsp::ServerCapabilities {
2886 completion_provider: Some(lsp::CompletionOptions {
2887 trigger_characters: Some(vec![".".to_string()]),
2888 ..Default::default()
2889 }),
2890 ..Default::default()
2891 },
2892 ..Default::default()
2893 },
2894 );
2895
2896 let (buffer, _handle) = project
2897 .update(cx, |p, cx| {
2898 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
2899 })
2900 .await
2901 .unwrap();
2902
2903 let fake_server = fake_language_servers.next().await.unwrap();
2904 let text = "let a = obj.fqn";
2905
2906 // Test 1: When text_edit is None but insert_text exists with default edit_range
2907 {
2908 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2909 let completions = project.update(cx, |project, cx| {
2910 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2911 });
2912
2913 fake_server
2914 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
2915 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
2916 is_incomplete: false,
2917 item_defaults: Some(lsp::CompletionListItemDefaults {
2918 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
2919 lsp::Range::new(
2920 lsp::Position::new(0, text.len() as u32 - 3),
2921 lsp::Position::new(0, text.len() as u32),
2922 ),
2923 )),
2924 ..Default::default()
2925 }),
2926 items: vec![lsp::CompletionItem {
2927 label: "labelText".into(),
2928 insert_text: Some("insertText".into()),
2929 text_edit: None,
2930 ..Default::default()
2931 }],
2932 })))
2933 })
2934 .next()
2935 .await;
2936
2937 let completions = completions.await.unwrap().unwrap();
2938 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2939
2940 assert_eq!(completions.len(), 1);
2941 assert_eq!(completions[0].new_text, "insertText");
2942 assert_eq!(
2943 completions[0].old_range.to_offset(&snapshot),
2944 text.len() - 3..text.len()
2945 );
2946 }
2947
2948 // Test 2: When both text_edit and insert_text are None with default edit_range
2949 {
2950 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2951 let completions = project.update(cx, |project, cx| {
2952 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2953 });
2954
2955 fake_server
2956 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
2957 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
2958 is_incomplete: false,
2959 item_defaults: Some(lsp::CompletionListItemDefaults {
2960 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
2961 lsp::Range::new(
2962 lsp::Position::new(0, text.len() as u32 - 3),
2963 lsp::Position::new(0, text.len() as u32),
2964 ),
2965 )),
2966 ..Default::default()
2967 }),
2968 items: vec![lsp::CompletionItem {
2969 label: "labelText".into(),
2970 insert_text: None,
2971 text_edit: None,
2972 ..Default::default()
2973 }],
2974 })))
2975 })
2976 .next()
2977 .await;
2978
2979 let completions = completions.await.unwrap().unwrap();
2980 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2981
2982 assert_eq!(completions.len(), 1);
2983 assert_eq!(completions[0].new_text, "labelText");
2984 assert_eq!(
2985 completions[0].old_range.to_offset(&snapshot),
2986 text.len() - 3..text.len()
2987 );
2988 }
2989}
2990
2991#[gpui::test]
2992async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2993 init_test(cx);
2994
2995 let fs = FakeFs::new(cx.executor());
2996 fs.insert_tree(
2997 path!("/dir"),
2998 json!({
2999 "a.ts": "",
3000 }),
3001 )
3002 .await;
3003
3004 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3005
3006 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3007 language_registry.add(typescript_lang());
3008 let mut fake_language_servers = language_registry.register_fake_lsp(
3009 "TypeScript",
3010 FakeLspAdapter {
3011 capabilities: lsp::ServerCapabilities {
3012 completion_provider: Some(lsp::CompletionOptions {
3013 trigger_characters: Some(vec![":".to_string()]),
3014 ..Default::default()
3015 }),
3016 ..Default::default()
3017 },
3018 ..Default::default()
3019 },
3020 );
3021
3022 let (buffer, _handle) = project
3023 .update(cx, |p, cx| {
3024 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3025 })
3026 .await
3027 .unwrap();
3028
3029 let fake_server = fake_language_servers.next().await.unwrap();
3030
3031 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
3032 let text = "let a = b.fqn";
3033 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3034 let completions = project.update(cx, |project, cx| {
3035 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3036 });
3037
3038 fake_server
3039 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3040 Ok(Some(lsp::CompletionResponse::Array(vec![
3041 lsp::CompletionItem {
3042 label: "fullyQualifiedName?".into(),
3043 insert_text: Some("fullyQualifiedName".into()),
3044 ..Default::default()
3045 },
3046 ])))
3047 })
3048 .next()
3049 .await;
3050 let completions = completions.await.unwrap().unwrap();
3051 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3052 assert_eq!(completions.len(), 1);
3053 assert_eq!(completions[0].new_text, "fullyQualifiedName");
3054 assert_eq!(
3055 completions[0].old_range.to_offset(&snapshot),
3056 text.len() - 3..text.len()
3057 );
3058
3059 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
3060 let text = "let a = \"atoms/cmp\"";
3061 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3062 let completions = project.update(cx, |project, cx| {
3063 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
3064 });
3065
3066 fake_server
3067 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3068 Ok(Some(lsp::CompletionResponse::Array(vec![
3069 lsp::CompletionItem {
3070 label: "component".into(),
3071 ..Default::default()
3072 },
3073 ])))
3074 })
3075 .next()
3076 .await;
3077 let completions = completions.await.unwrap().unwrap();
3078 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3079 assert_eq!(completions.len(), 1);
3080 assert_eq!(completions[0].new_text, "component");
3081 assert_eq!(
3082 completions[0].old_range.to_offset(&snapshot),
3083 text.len() - 4..text.len() - 1
3084 );
3085}
3086
3087#[gpui::test]
3088async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
3089 init_test(cx);
3090
3091 let fs = FakeFs::new(cx.executor());
3092 fs.insert_tree(
3093 path!("/dir"),
3094 json!({
3095 "a.ts": "",
3096 }),
3097 )
3098 .await;
3099
3100 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3101
3102 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3103 language_registry.add(typescript_lang());
3104 let mut fake_language_servers = language_registry.register_fake_lsp(
3105 "TypeScript",
3106 FakeLspAdapter {
3107 capabilities: lsp::ServerCapabilities {
3108 completion_provider: Some(lsp::CompletionOptions {
3109 trigger_characters: Some(vec![":".to_string()]),
3110 ..Default::default()
3111 }),
3112 ..Default::default()
3113 },
3114 ..Default::default()
3115 },
3116 );
3117
3118 let (buffer, _handle) = project
3119 .update(cx, |p, cx| {
3120 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3121 })
3122 .await
3123 .unwrap();
3124
3125 let fake_server = fake_language_servers.next().await.unwrap();
3126
3127 let text = "let a = b.fqn";
3128 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3129 let completions = project.update(cx, |project, cx| {
3130 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3131 });
3132
3133 fake_server
3134 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3135 Ok(Some(lsp::CompletionResponse::Array(vec![
3136 lsp::CompletionItem {
3137 label: "fullyQualifiedName?".into(),
3138 insert_text: Some("fully\rQualified\r\nName".into()),
3139 ..Default::default()
3140 },
3141 ])))
3142 })
3143 .next()
3144 .await;
3145 let completions = completions.await.unwrap().unwrap();
3146 assert_eq!(completions.len(), 1);
3147 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
3148}
3149
3150#[gpui::test(iterations = 10)]
3151async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
3152 init_test(cx);
3153
3154 let fs = FakeFs::new(cx.executor());
3155 fs.insert_tree(
3156 path!("/dir"),
3157 json!({
3158 "a.ts": "a",
3159 }),
3160 )
3161 .await;
3162
3163 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3164
3165 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3166 language_registry.add(typescript_lang());
3167 let mut fake_language_servers = language_registry.register_fake_lsp(
3168 "TypeScript",
3169 FakeLspAdapter {
3170 capabilities: lsp::ServerCapabilities {
3171 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
3172 lsp::CodeActionOptions {
3173 resolve_provider: Some(true),
3174 ..lsp::CodeActionOptions::default()
3175 },
3176 )),
3177 execute_command_provider: Some(lsp::ExecuteCommandOptions {
3178 commands: vec!["_the/command".to_string()],
3179 ..lsp::ExecuteCommandOptions::default()
3180 }),
3181 ..lsp::ServerCapabilities::default()
3182 },
3183 ..FakeLspAdapter::default()
3184 },
3185 );
3186
3187 let (buffer, _handle) = project
3188 .update(cx, |p, cx| {
3189 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3190 })
3191 .await
3192 .unwrap();
3193
3194 let fake_server = fake_language_servers.next().await.unwrap();
3195
3196 // Language server returns code actions that contain commands, and not edits.
3197 let actions = project.update(cx, |project, cx| {
3198 project.code_actions(&buffer, 0..0, None, cx)
3199 });
3200 fake_server
3201 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
3202 Ok(Some(vec![
3203 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3204 title: "The code action".into(),
3205 data: Some(serde_json::json!({
3206 "command": "_the/command",
3207 })),
3208 ..lsp::CodeAction::default()
3209 }),
3210 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3211 title: "two".into(),
3212 ..lsp::CodeAction::default()
3213 }),
3214 ]))
3215 })
3216 .next()
3217 .await;
3218
3219 let action = actions.await.unwrap()[0].clone();
3220 let apply = project.update(cx, |project, cx| {
3221 project.apply_code_action(buffer.clone(), action, true, cx)
3222 });
3223
3224 // Resolving the code action does not populate its edits. In absence of
3225 // edits, we must execute the given command.
3226 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
3227 |mut action, _| async move {
3228 if action.data.is_some() {
3229 action.command = Some(lsp::Command {
3230 title: "The command".into(),
3231 command: "_the/command".into(),
3232 arguments: Some(vec![json!("the-argument")]),
3233 });
3234 }
3235 Ok(action)
3236 },
3237 );
3238
3239 // While executing the command, the language server sends the editor
3240 // a `workspaceEdit` request.
3241 fake_server
3242 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
3243 let fake = fake_server.clone();
3244 move |params, _| {
3245 assert_eq!(params.command, "_the/command");
3246 let fake = fake.clone();
3247 async move {
3248 fake.server
3249 .request::<lsp::request::ApplyWorkspaceEdit>(
3250 lsp::ApplyWorkspaceEditParams {
3251 label: None,
3252 edit: lsp::WorkspaceEdit {
3253 changes: Some(
3254 [(
3255 lsp::Url::from_file_path(path!("/dir/a.ts")).unwrap(),
3256 vec![lsp::TextEdit {
3257 range: lsp::Range::new(
3258 lsp::Position::new(0, 0),
3259 lsp::Position::new(0, 0),
3260 ),
3261 new_text: "X".into(),
3262 }],
3263 )]
3264 .into_iter()
3265 .collect(),
3266 ),
3267 ..Default::default()
3268 },
3269 },
3270 )
3271 .await
3272 .unwrap();
3273 Ok(Some(json!(null)))
3274 }
3275 }
3276 })
3277 .next()
3278 .await;
3279
3280 // Applying the code action returns a project transaction containing the edits
3281 // sent by the language server in its `workspaceEdit` request.
3282 let transaction = apply.await.unwrap();
3283 assert!(transaction.0.contains_key(&buffer));
3284 buffer.update(cx, |buffer, cx| {
3285 assert_eq!(buffer.text(), "Xa");
3286 buffer.undo(cx);
3287 assert_eq!(buffer.text(), "a");
3288 });
3289}
3290
3291#[gpui::test(iterations = 10)]
3292async fn test_save_file(cx: &mut gpui::TestAppContext) {
3293 init_test(cx);
3294
3295 let fs = FakeFs::new(cx.executor());
3296 fs.insert_tree(
3297 path!("/dir"),
3298 json!({
3299 "file1": "the old contents",
3300 }),
3301 )
3302 .await;
3303
3304 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3305 let buffer = project
3306 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3307 .await
3308 .unwrap();
3309 buffer.update(cx, |buffer, cx| {
3310 assert_eq!(buffer.text(), "the old contents");
3311 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3312 });
3313
3314 project
3315 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3316 .await
3317 .unwrap();
3318
3319 let new_text = fs
3320 .load(Path::new(path!("/dir/file1")))
3321 .await
3322 .unwrap()
3323 .replace("\r\n", "\n");
3324 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3325}
3326
3327#[gpui::test(iterations = 30)]
3328async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
3329 init_test(cx);
3330
3331 let fs = FakeFs::new(cx.executor().clone());
3332 fs.insert_tree(
3333 path!("/dir"),
3334 json!({
3335 "file1": "the original contents",
3336 }),
3337 )
3338 .await;
3339
3340 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3341 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3342 let buffer = project
3343 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3344 .await
3345 .unwrap();
3346
3347 // Simulate buffer diffs being slow, so that they don't complete before
3348 // the next file change occurs.
3349 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3350
3351 // Change the buffer's file on disk, and then wait for the file change
3352 // to be detected by the worktree, so that the buffer starts reloading.
3353 fs.save(
3354 path!("/dir/file1").as_ref(),
3355 &"the first contents".into(),
3356 Default::default(),
3357 )
3358 .await
3359 .unwrap();
3360 worktree.next_event(cx).await;
3361
3362 // Change the buffer's file again. Depending on the random seed, the
3363 // previous file change may still be in progress.
3364 fs.save(
3365 path!("/dir/file1").as_ref(),
3366 &"the second contents".into(),
3367 Default::default(),
3368 )
3369 .await
3370 .unwrap();
3371 worktree.next_event(cx).await;
3372
3373 cx.executor().run_until_parked();
3374 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3375 buffer.read_with(cx, |buffer, _| {
3376 assert_eq!(buffer.text(), on_disk_text);
3377 assert!(!buffer.is_dirty(), "buffer should not be dirty");
3378 assert!(!buffer.has_conflict(), "buffer should not be dirty");
3379 });
3380}
3381
3382#[gpui::test(iterations = 30)]
3383async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
3384 init_test(cx);
3385
3386 let fs = FakeFs::new(cx.executor().clone());
3387 fs.insert_tree(
3388 path!("/dir"),
3389 json!({
3390 "file1": "the original contents",
3391 }),
3392 )
3393 .await;
3394
3395 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3396 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3397 let buffer = project
3398 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3399 .await
3400 .unwrap();
3401
3402 // Simulate buffer diffs being slow, so that they don't complete before
3403 // the next file change occurs.
3404 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3405
3406 // Change the buffer's file on disk, and then wait for the file change
3407 // to be detected by the worktree, so that the buffer starts reloading.
3408 fs.save(
3409 path!("/dir/file1").as_ref(),
3410 &"the first contents".into(),
3411 Default::default(),
3412 )
3413 .await
3414 .unwrap();
3415 worktree.next_event(cx).await;
3416
3417 cx.executor()
3418 .spawn(cx.executor().simulate_random_delay())
3419 .await;
3420
3421 // Perform a noop edit, causing the buffer's version to increase.
3422 buffer.update(cx, |buffer, cx| {
3423 buffer.edit([(0..0, " ")], None, cx);
3424 buffer.undo(cx);
3425 });
3426
3427 cx.executor().run_until_parked();
3428 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3429 buffer.read_with(cx, |buffer, _| {
3430 let buffer_text = buffer.text();
3431 if buffer_text == on_disk_text {
3432 assert!(
3433 !buffer.is_dirty() && !buffer.has_conflict(),
3434 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
3435 );
3436 }
3437 // If the file change occurred while the buffer was processing the first
3438 // change, the buffer will be in a conflicting state.
3439 else {
3440 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3441 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3442 }
3443 });
3444}
3445
3446#[gpui::test]
3447async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
3448 init_test(cx);
3449
3450 let fs = FakeFs::new(cx.executor());
3451 fs.insert_tree(
3452 path!("/dir"),
3453 json!({
3454 "file1": "the old contents",
3455 }),
3456 )
3457 .await;
3458
3459 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
3460 let buffer = project
3461 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3462 .await
3463 .unwrap();
3464 buffer.update(cx, |buffer, cx| {
3465 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3466 });
3467
3468 project
3469 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3470 .await
3471 .unwrap();
3472
3473 let new_text = fs
3474 .load(Path::new(path!("/dir/file1")))
3475 .await
3476 .unwrap()
3477 .replace("\r\n", "\n");
3478 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3479}
3480
3481#[gpui::test]
3482async fn test_save_as(cx: &mut gpui::TestAppContext) {
3483 init_test(cx);
3484
3485 let fs = FakeFs::new(cx.executor());
3486 fs.insert_tree("/dir", json!({})).await;
3487
3488 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3489
3490 let languages = project.update(cx, |project, _| project.languages().clone());
3491 languages.add(rust_lang());
3492
3493 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
3494 buffer.update(cx, |buffer, cx| {
3495 buffer.edit([(0..0, "abc")], None, cx);
3496 assert!(buffer.is_dirty());
3497 assert!(!buffer.has_conflict());
3498 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
3499 });
3500 project
3501 .update(cx, |project, cx| {
3502 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
3503 let path = ProjectPath {
3504 worktree_id,
3505 path: Arc::from(Path::new("file1.rs")),
3506 };
3507 project.save_buffer_as(buffer.clone(), path, cx)
3508 })
3509 .await
3510 .unwrap();
3511 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
3512
3513 cx.executor().run_until_parked();
3514 buffer.update(cx, |buffer, cx| {
3515 assert_eq!(
3516 buffer.file().unwrap().full_path(cx),
3517 Path::new("dir/file1.rs")
3518 );
3519 assert!(!buffer.is_dirty());
3520 assert!(!buffer.has_conflict());
3521 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
3522 });
3523
3524 let opened_buffer = project
3525 .update(cx, |project, cx| {
3526 project.open_local_buffer("/dir/file1.rs", cx)
3527 })
3528 .await
3529 .unwrap();
3530 assert_eq!(opened_buffer, buffer);
3531}
3532
3533#[gpui::test(retries = 5)]
3534async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3535 use worktree::WorktreeModelHandle as _;
3536
3537 init_test(cx);
3538 cx.executor().allow_parking();
3539
3540 let dir = TempTree::new(json!({
3541 "a": {
3542 "file1": "",
3543 "file2": "",
3544 "file3": "",
3545 },
3546 "b": {
3547 "c": {
3548 "file4": "",
3549 "file5": "",
3550 }
3551 }
3552 }));
3553
3554 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
3555
3556 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3557 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3558 async move { buffer.await.unwrap() }
3559 };
3560 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3561 project.update(cx, |project, cx| {
3562 let tree = project.worktrees(cx).next().unwrap();
3563 tree.read(cx)
3564 .entry_for_path(path)
3565 .unwrap_or_else(|| panic!("no entry for path {}", path))
3566 .id
3567 })
3568 };
3569
3570 let buffer2 = buffer_for_path("a/file2", cx).await;
3571 let buffer3 = buffer_for_path("a/file3", cx).await;
3572 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3573 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3574
3575 let file2_id = id_for_path("a/file2", cx);
3576 let file3_id = id_for_path("a/file3", cx);
3577 let file4_id = id_for_path("b/c/file4", cx);
3578
3579 // Create a remote copy of this worktree.
3580 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3581 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3582
3583 let updates = Arc::new(Mutex::new(Vec::new()));
3584 tree.update(cx, |tree, cx| {
3585 let updates = updates.clone();
3586 tree.observe_updates(0, cx, move |update| {
3587 updates.lock().push(update);
3588 async { true }
3589 });
3590 });
3591
3592 let remote =
3593 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
3594
3595 cx.executor().run_until_parked();
3596
3597 cx.update(|cx| {
3598 assert!(!buffer2.read(cx).is_dirty());
3599 assert!(!buffer3.read(cx).is_dirty());
3600 assert!(!buffer4.read(cx).is_dirty());
3601 assert!(!buffer5.read(cx).is_dirty());
3602 });
3603
3604 // Rename and delete files and directories.
3605 tree.flush_fs_events(cx).await;
3606 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3607 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3608 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3609 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3610 tree.flush_fs_events(cx).await;
3611
3612 cx.update(|app| {
3613 assert_eq!(
3614 tree.read(app)
3615 .paths()
3616 .map(|p| p.to_str().unwrap())
3617 .collect::<Vec<_>>(),
3618 vec![
3619 "a",
3620 separator!("a/file1"),
3621 separator!("a/file2.new"),
3622 "b",
3623 "d",
3624 separator!("d/file3"),
3625 separator!("d/file4"),
3626 ]
3627 );
3628 });
3629
3630 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3631 assert_eq!(id_for_path("d/file3", cx), file3_id);
3632 assert_eq!(id_for_path("d/file4", cx), file4_id);
3633
3634 cx.update(|cx| {
3635 assert_eq!(
3636 buffer2.read(cx).file().unwrap().path().as_ref(),
3637 Path::new("a/file2.new")
3638 );
3639 assert_eq!(
3640 buffer3.read(cx).file().unwrap().path().as_ref(),
3641 Path::new("d/file3")
3642 );
3643 assert_eq!(
3644 buffer4.read(cx).file().unwrap().path().as_ref(),
3645 Path::new("d/file4")
3646 );
3647 assert_eq!(
3648 buffer5.read(cx).file().unwrap().path().as_ref(),
3649 Path::new("b/c/file5")
3650 );
3651
3652 assert_matches!(
3653 buffer2.read(cx).file().unwrap().disk_state(),
3654 DiskState::Present { .. }
3655 );
3656 assert_matches!(
3657 buffer3.read(cx).file().unwrap().disk_state(),
3658 DiskState::Present { .. }
3659 );
3660 assert_matches!(
3661 buffer4.read(cx).file().unwrap().disk_state(),
3662 DiskState::Present { .. }
3663 );
3664 assert_eq!(
3665 buffer5.read(cx).file().unwrap().disk_state(),
3666 DiskState::Deleted
3667 );
3668 });
3669
3670 // Update the remote worktree. Check that it becomes consistent with the
3671 // local worktree.
3672 cx.executor().run_until_parked();
3673
3674 remote.update(cx, |remote, _| {
3675 for update in updates.lock().drain(..) {
3676 remote.as_remote_mut().unwrap().update_from_remote(update);
3677 }
3678 });
3679 cx.executor().run_until_parked();
3680 remote.update(cx, |remote, _| {
3681 assert_eq!(
3682 remote
3683 .paths()
3684 .map(|p| p.to_str().unwrap())
3685 .collect::<Vec<_>>(),
3686 vec![
3687 "a",
3688 separator!("a/file1"),
3689 separator!("a/file2.new"),
3690 "b",
3691 "d",
3692 separator!("d/file3"),
3693 separator!("d/file4"),
3694 ]
3695 );
3696 });
3697}
3698
3699#[gpui::test(iterations = 10)]
3700async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3701 init_test(cx);
3702
3703 let fs = FakeFs::new(cx.executor());
3704 fs.insert_tree(
3705 path!("/dir"),
3706 json!({
3707 "a": {
3708 "file1": "",
3709 }
3710 }),
3711 )
3712 .await;
3713
3714 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3715 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3716 let tree_id = tree.update(cx, |tree, _| tree.id());
3717
3718 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3719 project.update(cx, |project, cx| {
3720 let tree = project.worktrees(cx).next().unwrap();
3721 tree.read(cx)
3722 .entry_for_path(path)
3723 .unwrap_or_else(|| panic!("no entry for path {}", path))
3724 .id
3725 })
3726 };
3727
3728 let dir_id = id_for_path("a", cx);
3729 let file_id = id_for_path("a/file1", cx);
3730 let buffer = project
3731 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3732 .await
3733 .unwrap();
3734 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3735
3736 project
3737 .update(cx, |project, cx| {
3738 project.rename_entry(dir_id, Path::new("b"), cx)
3739 })
3740 .unwrap()
3741 .await
3742 .to_included()
3743 .unwrap();
3744 cx.executor().run_until_parked();
3745
3746 assert_eq!(id_for_path("b", cx), dir_id);
3747 assert_eq!(id_for_path("b/file1", cx), file_id);
3748 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3749}
3750
3751#[gpui::test]
3752async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3753 init_test(cx);
3754
3755 let fs = FakeFs::new(cx.executor());
3756 fs.insert_tree(
3757 "/dir",
3758 json!({
3759 "a.txt": "a-contents",
3760 "b.txt": "b-contents",
3761 }),
3762 )
3763 .await;
3764
3765 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3766
3767 // Spawn multiple tasks to open paths, repeating some paths.
3768 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3769 (
3770 p.open_local_buffer("/dir/a.txt", cx),
3771 p.open_local_buffer("/dir/b.txt", cx),
3772 p.open_local_buffer("/dir/a.txt", cx),
3773 )
3774 });
3775
3776 let buffer_a_1 = buffer_a_1.await.unwrap();
3777 let buffer_a_2 = buffer_a_2.await.unwrap();
3778 let buffer_b = buffer_b.await.unwrap();
3779 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3780 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3781
3782 // There is only one buffer per path.
3783 let buffer_a_id = buffer_a_1.entity_id();
3784 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3785
3786 // Open the same path again while it is still open.
3787 drop(buffer_a_1);
3788 let buffer_a_3 = project
3789 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3790 .await
3791 .unwrap();
3792
3793 // There's still only one buffer per path.
3794 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3795}
3796
3797#[gpui::test]
3798async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3799 init_test(cx);
3800
3801 let fs = FakeFs::new(cx.executor());
3802 fs.insert_tree(
3803 path!("/dir"),
3804 json!({
3805 "file1": "abc",
3806 "file2": "def",
3807 "file3": "ghi",
3808 }),
3809 )
3810 .await;
3811
3812 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3813
3814 let buffer1 = project
3815 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3816 .await
3817 .unwrap();
3818 let events = Arc::new(Mutex::new(Vec::new()));
3819
3820 // initially, the buffer isn't dirty.
3821 buffer1.update(cx, |buffer, cx| {
3822 cx.subscribe(&buffer1, {
3823 let events = events.clone();
3824 move |_, _, event, _| match event {
3825 BufferEvent::Operation { .. } => {}
3826 _ => events.lock().push(event.clone()),
3827 }
3828 })
3829 .detach();
3830
3831 assert!(!buffer.is_dirty());
3832 assert!(events.lock().is_empty());
3833
3834 buffer.edit([(1..2, "")], None, cx);
3835 });
3836
3837 // after the first edit, the buffer is dirty, and emits a dirtied event.
3838 buffer1.update(cx, |buffer, cx| {
3839 assert!(buffer.text() == "ac");
3840 assert!(buffer.is_dirty());
3841 assert_eq!(
3842 *events.lock(),
3843 &[
3844 language::BufferEvent::Edited,
3845 language::BufferEvent::DirtyChanged
3846 ]
3847 );
3848 events.lock().clear();
3849 buffer.did_save(
3850 buffer.version(),
3851 buffer.file().unwrap().disk_state().mtime(),
3852 cx,
3853 );
3854 });
3855
3856 // after saving, the buffer is not dirty, and emits a saved event.
3857 buffer1.update(cx, |buffer, cx| {
3858 assert!(!buffer.is_dirty());
3859 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
3860 events.lock().clear();
3861
3862 buffer.edit([(1..1, "B")], None, cx);
3863 buffer.edit([(2..2, "D")], None, cx);
3864 });
3865
3866 // after editing again, the buffer is dirty, and emits another dirty event.
3867 buffer1.update(cx, |buffer, cx| {
3868 assert!(buffer.text() == "aBDc");
3869 assert!(buffer.is_dirty());
3870 assert_eq!(
3871 *events.lock(),
3872 &[
3873 language::BufferEvent::Edited,
3874 language::BufferEvent::DirtyChanged,
3875 language::BufferEvent::Edited,
3876 ],
3877 );
3878 events.lock().clear();
3879
3880 // After restoring the buffer to its previously-saved state,
3881 // the buffer is not considered dirty anymore.
3882 buffer.edit([(1..3, "")], None, cx);
3883 assert!(buffer.text() == "ac");
3884 assert!(!buffer.is_dirty());
3885 });
3886
3887 assert_eq!(
3888 *events.lock(),
3889 &[
3890 language::BufferEvent::Edited,
3891 language::BufferEvent::DirtyChanged
3892 ]
3893 );
3894
3895 // When a file is deleted, it is not considered dirty.
3896 let events = Arc::new(Mutex::new(Vec::new()));
3897 let buffer2 = project
3898 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
3899 .await
3900 .unwrap();
3901 buffer2.update(cx, |_, cx| {
3902 cx.subscribe(&buffer2, {
3903 let events = events.clone();
3904 move |_, _, event, _| match event {
3905 BufferEvent::Operation { .. } => {}
3906 _ => events.lock().push(event.clone()),
3907 }
3908 })
3909 .detach();
3910 });
3911
3912 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
3913 .await
3914 .unwrap();
3915 cx.executor().run_until_parked();
3916 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3917 assert_eq!(
3918 mem::take(&mut *events.lock()),
3919 &[language::BufferEvent::FileHandleChanged]
3920 );
3921
3922 // Buffer becomes dirty when edited.
3923 buffer2.update(cx, |buffer, cx| {
3924 buffer.edit([(2..3, "")], None, cx);
3925 assert_eq!(buffer.is_dirty(), true);
3926 });
3927 assert_eq!(
3928 mem::take(&mut *events.lock()),
3929 &[
3930 language::BufferEvent::Edited,
3931 language::BufferEvent::DirtyChanged
3932 ]
3933 );
3934
3935 // Buffer becomes clean again when all of its content is removed, because
3936 // the file was deleted.
3937 buffer2.update(cx, |buffer, cx| {
3938 buffer.edit([(0..2, "")], None, cx);
3939 assert_eq!(buffer.is_empty(), true);
3940 assert_eq!(buffer.is_dirty(), false);
3941 });
3942 assert_eq!(
3943 *events.lock(),
3944 &[
3945 language::BufferEvent::Edited,
3946 language::BufferEvent::DirtyChanged
3947 ]
3948 );
3949
3950 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3951 let events = Arc::new(Mutex::new(Vec::new()));
3952 let buffer3 = project
3953 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
3954 .await
3955 .unwrap();
3956 buffer3.update(cx, |_, cx| {
3957 cx.subscribe(&buffer3, {
3958 let events = events.clone();
3959 move |_, _, event, _| match event {
3960 BufferEvent::Operation { .. } => {}
3961 _ => events.lock().push(event.clone()),
3962 }
3963 })
3964 .detach();
3965 });
3966
3967 buffer3.update(cx, |buffer, cx| {
3968 buffer.edit([(0..0, "x")], None, cx);
3969 });
3970 events.lock().clear();
3971 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
3972 .await
3973 .unwrap();
3974 cx.executor().run_until_parked();
3975 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
3976 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3977}
3978
3979#[gpui::test]
3980async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3981 init_test(cx);
3982
3983 let (initial_contents, initial_offsets) =
3984 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
3985 let fs = FakeFs::new(cx.executor());
3986 fs.insert_tree(
3987 path!("/dir"),
3988 json!({
3989 "the-file": initial_contents,
3990 }),
3991 )
3992 .await;
3993 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3994 let buffer = project
3995 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
3996 .await
3997 .unwrap();
3998
3999 let anchors = initial_offsets
4000 .iter()
4001 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
4002 .collect::<Vec<_>>();
4003
4004 // Change the file on disk, adding two new lines of text, and removing
4005 // one line.
4006 buffer.update(cx, |buffer, _| {
4007 assert!(!buffer.is_dirty());
4008 assert!(!buffer.has_conflict());
4009 });
4010
4011 let (new_contents, new_offsets) =
4012 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
4013 fs.save(
4014 path!("/dir/the-file").as_ref(),
4015 &new_contents.as_str().into(),
4016 LineEnding::Unix,
4017 )
4018 .await
4019 .unwrap();
4020
4021 // Because the buffer was not modified, it is reloaded from disk. Its
4022 // contents are edited according to the diff between the old and new
4023 // file contents.
4024 cx.executor().run_until_parked();
4025 buffer.update(cx, |buffer, _| {
4026 assert_eq!(buffer.text(), new_contents);
4027 assert!(!buffer.is_dirty());
4028 assert!(!buffer.has_conflict());
4029
4030 let anchor_offsets = anchors
4031 .iter()
4032 .map(|anchor| anchor.to_offset(&*buffer))
4033 .collect::<Vec<_>>();
4034 assert_eq!(anchor_offsets, new_offsets);
4035 });
4036
4037 // Modify the buffer
4038 buffer.update(cx, |buffer, cx| {
4039 buffer.edit([(0..0, " ")], None, cx);
4040 assert!(buffer.is_dirty());
4041 assert!(!buffer.has_conflict());
4042 });
4043
4044 // Change the file on disk again, adding blank lines to the beginning.
4045 fs.save(
4046 path!("/dir/the-file").as_ref(),
4047 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
4048 LineEnding::Unix,
4049 )
4050 .await
4051 .unwrap();
4052
4053 // Because the buffer is modified, it doesn't reload from disk, but is
4054 // marked as having a conflict.
4055 cx.executor().run_until_parked();
4056 buffer.update(cx, |buffer, _| {
4057 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
4058 assert!(buffer.has_conflict());
4059 });
4060}
4061
4062#[gpui::test]
4063async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
4064 init_test(cx);
4065
4066 let fs = FakeFs::new(cx.executor());
4067 fs.insert_tree(
4068 path!("/dir"),
4069 json!({
4070 "file1": "a\nb\nc\n",
4071 "file2": "one\r\ntwo\r\nthree\r\n",
4072 }),
4073 )
4074 .await;
4075
4076 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4077 let buffer1 = project
4078 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4079 .await
4080 .unwrap();
4081 let buffer2 = project
4082 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4083 .await
4084 .unwrap();
4085
4086 buffer1.update(cx, |buffer, _| {
4087 assert_eq!(buffer.text(), "a\nb\nc\n");
4088 assert_eq!(buffer.line_ending(), LineEnding::Unix);
4089 });
4090 buffer2.update(cx, |buffer, _| {
4091 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
4092 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4093 });
4094
4095 // Change a file's line endings on disk from unix to windows. The buffer's
4096 // state updates correctly.
4097 fs.save(
4098 path!("/dir/file1").as_ref(),
4099 &"aaa\nb\nc\n".into(),
4100 LineEnding::Windows,
4101 )
4102 .await
4103 .unwrap();
4104 cx.executor().run_until_parked();
4105 buffer1.update(cx, |buffer, _| {
4106 assert_eq!(buffer.text(), "aaa\nb\nc\n");
4107 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4108 });
4109
4110 // Save a file with windows line endings. The file is written correctly.
4111 buffer2.update(cx, |buffer, cx| {
4112 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
4113 });
4114 project
4115 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
4116 .await
4117 .unwrap();
4118 assert_eq!(
4119 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
4120 "one\r\ntwo\r\nthree\r\nfour\r\n",
4121 );
4122}
4123
4124#[gpui::test]
4125async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
4126 init_test(cx);
4127
4128 let fs = FakeFs::new(cx.executor());
4129 fs.insert_tree(
4130 path!("/dir"),
4131 json!({
4132 "a.rs": "
4133 fn foo(mut v: Vec<usize>) {
4134 for x in &v {
4135 v.push(1);
4136 }
4137 }
4138 "
4139 .unindent(),
4140 }),
4141 )
4142 .await;
4143
4144 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4145 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4146 let buffer = project
4147 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
4148 .await
4149 .unwrap();
4150
4151 let buffer_uri = Url::from_file_path(path!("/dir/a.rs")).unwrap();
4152 let message = lsp::PublishDiagnosticsParams {
4153 uri: buffer_uri.clone(),
4154 diagnostics: vec![
4155 lsp::Diagnostic {
4156 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4157 severity: Some(DiagnosticSeverity::WARNING),
4158 message: "error 1".to_string(),
4159 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4160 location: lsp::Location {
4161 uri: buffer_uri.clone(),
4162 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4163 },
4164 message: "error 1 hint 1".to_string(),
4165 }]),
4166 ..Default::default()
4167 },
4168 lsp::Diagnostic {
4169 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4170 severity: Some(DiagnosticSeverity::HINT),
4171 message: "error 1 hint 1".to_string(),
4172 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4173 location: lsp::Location {
4174 uri: buffer_uri.clone(),
4175 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4176 },
4177 message: "original diagnostic".to_string(),
4178 }]),
4179 ..Default::default()
4180 },
4181 lsp::Diagnostic {
4182 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4183 severity: Some(DiagnosticSeverity::ERROR),
4184 message: "error 2".to_string(),
4185 related_information: Some(vec![
4186 lsp::DiagnosticRelatedInformation {
4187 location: lsp::Location {
4188 uri: buffer_uri.clone(),
4189 range: lsp::Range::new(
4190 lsp::Position::new(1, 13),
4191 lsp::Position::new(1, 15),
4192 ),
4193 },
4194 message: "error 2 hint 1".to_string(),
4195 },
4196 lsp::DiagnosticRelatedInformation {
4197 location: lsp::Location {
4198 uri: buffer_uri.clone(),
4199 range: lsp::Range::new(
4200 lsp::Position::new(1, 13),
4201 lsp::Position::new(1, 15),
4202 ),
4203 },
4204 message: "error 2 hint 2".to_string(),
4205 },
4206 ]),
4207 ..Default::default()
4208 },
4209 lsp::Diagnostic {
4210 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4211 severity: Some(DiagnosticSeverity::HINT),
4212 message: "error 2 hint 1".to_string(),
4213 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4214 location: lsp::Location {
4215 uri: buffer_uri.clone(),
4216 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4217 },
4218 message: "original diagnostic".to_string(),
4219 }]),
4220 ..Default::default()
4221 },
4222 lsp::Diagnostic {
4223 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4224 severity: Some(DiagnosticSeverity::HINT),
4225 message: "error 2 hint 2".to_string(),
4226 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4227 location: lsp::Location {
4228 uri: buffer_uri,
4229 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4230 },
4231 message: "original diagnostic".to_string(),
4232 }]),
4233 ..Default::default()
4234 },
4235 ],
4236 version: None,
4237 };
4238
4239 lsp_store
4240 .update(cx, |lsp_store, cx| {
4241 lsp_store.update_diagnostics(LanguageServerId(0), message, &[], cx)
4242 })
4243 .unwrap();
4244 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
4245
4246 assert_eq!(
4247 buffer
4248 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
4249 .collect::<Vec<_>>(),
4250 &[
4251 DiagnosticEntry {
4252 range: Point::new(1, 8)..Point::new(1, 9),
4253 diagnostic: Diagnostic {
4254 severity: DiagnosticSeverity::WARNING,
4255 message: "error 1".to_string(),
4256 group_id: 1,
4257 is_primary: true,
4258 ..Default::default()
4259 }
4260 },
4261 DiagnosticEntry {
4262 range: Point::new(1, 8)..Point::new(1, 9),
4263 diagnostic: Diagnostic {
4264 severity: DiagnosticSeverity::HINT,
4265 message: "error 1 hint 1".to_string(),
4266 group_id: 1,
4267 is_primary: false,
4268 ..Default::default()
4269 }
4270 },
4271 DiagnosticEntry {
4272 range: Point::new(1, 13)..Point::new(1, 15),
4273 diagnostic: Diagnostic {
4274 severity: DiagnosticSeverity::HINT,
4275 message: "error 2 hint 1".to_string(),
4276 group_id: 0,
4277 is_primary: false,
4278 ..Default::default()
4279 }
4280 },
4281 DiagnosticEntry {
4282 range: Point::new(1, 13)..Point::new(1, 15),
4283 diagnostic: Diagnostic {
4284 severity: DiagnosticSeverity::HINT,
4285 message: "error 2 hint 2".to_string(),
4286 group_id: 0,
4287 is_primary: false,
4288 ..Default::default()
4289 }
4290 },
4291 DiagnosticEntry {
4292 range: Point::new(2, 8)..Point::new(2, 17),
4293 diagnostic: Diagnostic {
4294 severity: DiagnosticSeverity::ERROR,
4295 message: "error 2".to_string(),
4296 group_id: 0,
4297 is_primary: true,
4298 ..Default::default()
4299 }
4300 }
4301 ]
4302 );
4303
4304 assert_eq!(
4305 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4306 &[
4307 DiagnosticEntry {
4308 range: Point::new(1, 13)..Point::new(1, 15),
4309 diagnostic: Diagnostic {
4310 severity: DiagnosticSeverity::HINT,
4311 message: "error 2 hint 1".to_string(),
4312 group_id: 0,
4313 is_primary: false,
4314 ..Default::default()
4315 }
4316 },
4317 DiagnosticEntry {
4318 range: Point::new(1, 13)..Point::new(1, 15),
4319 diagnostic: Diagnostic {
4320 severity: DiagnosticSeverity::HINT,
4321 message: "error 2 hint 2".to_string(),
4322 group_id: 0,
4323 is_primary: false,
4324 ..Default::default()
4325 }
4326 },
4327 DiagnosticEntry {
4328 range: Point::new(2, 8)..Point::new(2, 17),
4329 diagnostic: Diagnostic {
4330 severity: DiagnosticSeverity::ERROR,
4331 message: "error 2".to_string(),
4332 group_id: 0,
4333 is_primary: true,
4334 ..Default::default()
4335 }
4336 }
4337 ]
4338 );
4339
4340 assert_eq!(
4341 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4342 &[
4343 DiagnosticEntry {
4344 range: Point::new(1, 8)..Point::new(1, 9),
4345 diagnostic: Diagnostic {
4346 severity: DiagnosticSeverity::WARNING,
4347 message: "error 1".to_string(),
4348 group_id: 1,
4349 is_primary: true,
4350 ..Default::default()
4351 }
4352 },
4353 DiagnosticEntry {
4354 range: Point::new(1, 8)..Point::new(1, 9),
4355 diagnostic: Diagnostic {
4356 severity: DiagnosticSeverity::HINT,
4357 message: "error 1 hint 1".to_string(),
4358 group_id: 1,
4359 is_primary: false,
4360 ..Default::default()
4361 }
4362 },
4363 ]
4364 );
4365}
4366
4367#[gpui::test]
4368async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
4369 init_test(cx);
4370
4371 let fs = FakeFs::new(cx.executor());
4372 fs.insert_tree(
4373 path!("/dir"),
4374 json!({
4375 "one.rs": "const ONE: usize = 1;",
4376 "two": {
4377 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4378 }
4379
4380 }),
4381 )
4382 .await;
4383 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4384
4385 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4386 language_registry.add(rust_lang());
4387 let watched_paths = lsp::FileOperationRegistrationOptions {
4388 filters: vec![
4389 FileOperationFilter {
4390 scheme: Some("file".to_owned()),
4391 pattern: lsp::FileOperationPattern {
4392 glob: "**/*.rs".to_owned(),
4393 matches: Some(lsp::FileOperationPatternKind::File),
4394 options: None,
4395 },
4396 },
4397 FileOperationFilter {
4398 scheme: Some("file".to_owned()),
4399 pattern: lsp::FileOperationPattern {
4400 glob: "**/**".to_owned(),
4401 matches: Some(lsp::FileOperationPatternKind::Folder),
4402 options: None,
4403 },
4404 },
4405 ],
4406 };
4407 let mut fake_servers = language_registry.register_fake_lsp(
4408 "Rust",
4409 FakeLspAdapter {
4410 capabilities: lsp::ServerCapabilities {
4411 workspace: Some(lsp::WorkspaceServerCapabilities {
4412 workspace_folders: None,
4413 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
4414 did_rename: Some(watched_paths.clone()),
4415 will_rename: Some(watched_paths),
4416 ..Default::default()
4417 }),
4418 }),
4419 ..Default::default()
4420 },
4421 ..Default::default()
4422 },
4423 );
4424
4425 let _ = project
4426 .update(cx, |project, cx| {
4427 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4428 })
4429 .await
4430 .unwrap();
4431
4432 let fake_server = fake_servers.next().await.unwrap();
4433 let response = project.update(cx, |project, cx| {
4434 let worktree = project.worktrees(cx).next().unwrap();
4435 let entry = worktree.read(cx).entry_for_path("one.rs").unwrap();
4436 project.rename_entry(entry.id, "three.rs".as_ref(), cx)
4437 });
4438 let expected_edit = lsp::WorkspaceEdit {
4439 changes: None,
4440 document_changes: Some(DocumentChanges::Edits({
4441 vec![TextDocumentEdit {
4442 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
4443 range: lsp::Range {
4444 start: lsp::Position {
4445 line: 0,
4446 character: 1,
4447 },
4448 end: lsp::Position {
4449 line: 0,
4450 character: 3,
4451 },
4452 },
4453 new_text: "This is not a drill".to_owned(),
4454 })],
4455 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
4456 uri: Url::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
4457 version: Some(1337),
4458 },
4459 }]
4460 })),
4461 change_annotations: None,
4462 };
4463 let resolved_workspace_edit = Arc::new(OnceLock::new());
4464 fake_server
4465 .set_request_handler::<WillRenameFiles, _, _>({
4466 let resolved_workspace_edit = resolved_workspace_edit.clone();
4467 let expected_edit = expected_edit.clone();
4468 move |params, _| {
4469 let resolved_workspace_edit = resolved_workspace_edit.clone();
4470 let expected_edit = expected_edit.clone();
4471 async move {
4472 assert_eq!(params.files.len(), 1);
4473 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4474 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4475 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
4476 Ok(Some(expected_edit))
4477 }
4478 }
4479 })
4480 .next()
4481 .await
4482 .unwrap();
4483 let _ = response.await.unwrap();
4484 fake_server
4485 .handle_notification::<DidRenameFiles, _>(|params, _| {
4486 assert_eq!(params.files.len(), 1);
4487 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4488 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4489 })
4490 .next()
4491 .await
4492 .unwrap();
4493 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
4494}
4495
4496#[gpui::test]
4497async fn test_rename(cx: &mut gpui::TestAppContext) {
4498 // hi
4499 init_test(cx);
4500
4501 let fs = FakeFs::new(cx.executor());
4502 fs.insert_tree(
4503 path!("/dir"),
4504 json!({
4505 "one.rs": "const ONE: usize = 1;",
4506 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4507 }),
4508 )
4509 .await;
4510
4511 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4512
4513 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4514 language_registry.add(rust_lang());
4515 let mut fake_servers = language_registry.register_fake_lsp(
4516 "Rust",
4517 FakeLspAdapter {
4518 capabilities: lsp::ServerCapabilities {
4519 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
4520 prepare_provider: Some(true),
4521 work_done_progress_options: Default::default(),
4522 })),
4523 ..Default::default()
4524 },
4525 ..Default::default()
4526 },
4527 );
4528
4529 let (buffer, _handle) = project
4530 .update(cx, |project, cx| {
4531 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4532 })
4533 .await
4534 .unwrap();
4535
4536 let fake_server = fake_servers.next().await.unwrap();
4537
4538 let response = project.update(cx, |project, cx| {
4539 project.prepare_rename(buffer.clone(), 7, cx)
4540 });
4541 fake_server
4542 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
4543 assert_eq!(
4544 params.text_document.uri.as_str(),
4545 uri!("file:///dir/one.rs")
4546 );
4547 assert_eq!(params.position, lsp::Position::new(0, 7));
4548 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4549 lsp::Position::new(0, 6),
4550 lsp::Position::new(0, 9),
4551 ))))
4552 })
4553 .next()
4554 .await
4555 .unwrap();
4556 let response = response.await.unwrap();
4557 let PrepareRenameResponse::Success(range) = response else {
4558 panic!("{:?}", response);
4559 };
4560 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
4561 assert_eq!(range, 6..9);
4562
4563 let response = project.update(cx, |project, cx| {
4564 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
4565 });
4566 fake_server
4567 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
4568 assert_eq!(
4569 params.text_document_position.text_document.uri.as_str(),
4570 uri!("file:///dir/one.rs")
4571 );
4572 assert_eq!(
4573 params.text_document_position.position,
4574 lsp::Position::new(0, 7)
4575 );
4576 assert_eq!(params.new_name, "THREE");
4577 Ok(Some(lsp::WorkspaceEdit {
4578 changes: Some(
4579 [
4580 (
4581 lsp::Url::from_file_path(path!("/dir/one.rs")).unwrap(),
4582 vec![lsp::TextEdit::new(
4583 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
4584 "THREE".to_string(),
4585 )],
4586 ),
4587 (
4588 lsp::Url::from_file_path(path!("/dir/two.rs")).unwrap(),
4589 vec![
4590 lsp::TextEdit::new(
4591 lsp::Range::new(
4592 lsp::Position::new(0, 24),
4593 lsp::Position::new(0, 27),
4594 ),
4595 "THREE".to_string(),
4596 ),
4597 lsp::TextEdit::new(
4598 lsp::Range::new(
4599 lsp::Position::new(0, 35),
4600 lsp::Position::new(0, 38),
4601 ),
4602 "THREE".to_string(),
4603 ),
4604 ],
4605 ),
4606 ]
4607 .into_iter()
4608 .collect(),
4609 ),
4610 ..Default::default()
4611 }))
4612 })
4613 .next()
4614 .await
4615 .unwrap();
4616 let mut transaction = response.await.unwrap().0;
4617 assert_eq!(transaction.len(), 2);
4618 assert_eq!(
4619 transaction
4620 .remove_entry(&buffer)
4621 .unwrap()
4622 .0
4623 .update(cx, |buffer, _| buffer.text()),
4624 "const THREE: usize = 1;"
4625 );
4626 assert_eq!(
4627 transaction
4628 .into_keys()
4629 .next()
4630 .unwrap()
4631 .update(cx, |buffer, _| buffer.text()),
4632 "const TWO: usize = one::THREE + one::THREE;"
4633 );
4634}
4635
4636#[gpui::test]
4637async fn test_search(cx: &mut gpui::TestAppContext) {
4638 init_test(cx);
4639
4640 let fs = FakeFs::new(cx.executor());
4641 fs.insert_tree(
4642 path!("/dir"),
4643 json!({
4644 "one.rs": "const ONE: usize = 1;",
4645 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
4646 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
4647 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
4648 }),
4649 )
4650 .await;
4651 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4652 assert_eq!(
4653 search(
4654 &project,
4655 SearchQuery::text(
4656 "TWO",
4657 false,
4658 true,
4659 false,
4660 Default::default(),
4661 Default::default(),
4662 None
4663 )
4664 .unwrap(),
4665 cx
4666 )
4667 .await
4668 .unwrap(),
4669 HashMap::from_iter([
4670 (separator!("dir/two.rs").to_string(), vec![6..9]),
4671 (separator!("dir/three.rs").to_string(), vec![37..40])
4672 ])
4673 );
4674
4675 let buffer_4 = project
4676 .update(cx, |project, cx| {
4677 project.open_local_buffer(path!("/dir/four.rs"), cx)
4678 })
4679 .await
4680 .unwrap();
4681 buffer_4.update(cx, |buffer, cx| {
4682 let text = "two::TWO";
4683 buffer.edit([(20..28, text), (31..43, text)], None, cx);
4684 });
4685
4686 assert_eq!(
4687 search(
4688 &project,
4689 SearchQuery::text(
4690 "TWO",
4691 false,
4692 true,
4693 false,
4694 Default::default(),
4695 Default::default(),
4696 None,
4697 )
4698 .unwrap(),
4699 cx
4700 )
4701 .await
4702 .unwrap(),
4703 HashMap::from_iter([
4704 (separator!("dir/two.rs").to_string(), vec![6..9]),
4705 (separator!("dir/three.rs").to_string(), vec![37..40]),
4706 (separator!("dir/four.rs").to_string(), vec![25..28, 36..39])
4707 ])
4708 );
4709}
4710
4711#[gpui::test]
4712async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
4713 init_test(cx);
4714
4715 let search_query = "file";
4716
4717 let fs = FakeFs::new(cx.executor());
4718 fs.insert_tree(
4719 path!("/dir"),
4720 json!({
4721 "one.rs": r#"// Rust file one"#,
4722 "one.ts": r#"// TypeScript file one"#,
4723 "two.rs": r#"// Rust file two"#,
4724 "two.ts": r#"// TypeScript file two"#,
4725 }),
4726 )
4727 .await;
4728 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4729
4730 assert!(
4731 search(
4732 &project,
4733 SearchQuery::text(
4734 search_query,
4735 false,
4736 true,
4737 false,
4738 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4739 Default::default(),
4740 None
4741 )
4742 .unwrap(),
4743 cx
4744 )
4745 .await
4746 .unwrap()
4747 .is_empty(),
4748 "If no inclusions match, no files should be returned"
4749 );
4750
4751 assert_eq!(
4752 search(
4753 &project,
4754 SearchQuery::text(
4755 search_query,
4756 false,
4757 true,
4758 false,
4759 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4760 Default::default(),
4761 None
4762 )
4763 .unwrap(),
4764 cx
4765 )
4766 .await
4767 .unwrap(),
4768 HashMap::from_iter([
4769 (separator!("dir/one.rs").to_string(), vec![8..12]),
4770 (separator!("dir/two.rs").to_string(), vec![8..12]),
4771 ]),
4772 "Rust only search should give only Rust files"
4773 );
4774
4775 assert_eq!(
4776 search(
4777 &project,
4778 SearchQuery::text(
4779 search_query,
4780 false,
4781 true,
4782 false,
4783 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4784 Default::default(),
4785 None,
4786 )
4787 .unwrap(),
4788 cx
4789 )
4790 .await
4791 .unwrap(),
4792 HashMap::from_iter([
4793 (separator!("dir/one.ts").to_string(), vec![14..18]),
4794 (separator!("dir/two.ts").to_string(), vec![14..18]),
4795 ]),
4796 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
4797 );
4798
4799 assert_eq!(
4800 search(
4801 &project,
4802 SearchQuery::text(
4803 search_query,
4804 false,
4805 true,
4806 false,
4807 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
4808 .unwrap(),
4809 Default::default(),
4810 None,
4811 )
4812 .unwrap(),
4813 cx
4814 )
4815 .await
4816 .unwrap(),
4817 HashMap::from_iter([
4818 (separator!("dir/two.ts").to_string(), vec![14..18]),
4819 (separator!("dir/one.rs").to_string(), vec![8..12]),
4820 (separator!("dir/one.ts").to_string(), vec![14..18]),
4821 (separator!("dir/two.rs").to_string(), vec![8..12]),
4822 ]),
4823 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4824 );
4825}
4826
4827#[gpui::test]
4828async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4829 init_test(cx);
4830
4831 let search_query = "file";
4832
4833 let fs = FakeFs::new(cx.executor());
4834 fs.insert_tree(
4835 path!("/dir"),
4836 json!({
4837 "one.rs": r#"// Rust file one"#,
4838 "one.ts": r#"// TypeScript file one"#,
4839 "two.rs": r#"// Rust file two"#,
4840 "two.ts": r#"// TypeScript file two"#,
4841 }),
4842 )
4843 .await;
4844 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4845
4846 assert_eq!(
4847 search(
4848 &project,
4849 SearchQuery::text(
4850 search_query,
4851 false,
4852 true,
4853 false,
4854 Default::default(),
4855 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4856 None,
4857 )
4858 .unwrap(),
4859 cx
4860 )
4861 .await
4862 .unwrap(),
4863 HashMap::from_iter([
4864 (separator!("dir/one.rs").to_string(), vec![8..12]),
4865 (separator!("dir/one.ts").to_string(), vec![14..18]),
4866 (separator!("dir/two.rs").to_string(), vec![8..12]),
4867 (separator!("dir/two.ts").to_string(), vec![14..18]),
4868 ]),
4869 "If no exclusions match, all files should be returned"
4870 );
4871
4872 assert_eq!(
4873 search(
4874 &project,
4875 SearchQuery::text(
4876 search_query,
4877 false,
4878 true,
4879 false,
4880 Default::default(),
4881 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4882 None,
4883 )
4884 .unwrap(),
4885 cx
4886 )
4887 .await
4888 .unwrap(),
4889 HashMap::from_iter([
4890 (separator!("dir/one.ts").to_string(), vec![14..18]),
4891 (separator!("dir/two.ts").to_string(), vec![14..18]),
4892 ]),
4893 "Rust exclusion search should give only TypeScript files"
4894 );
4895
4896 assert_eq!(
4897 search(
4898 &project,
4899 SearchQuery::text(
4900 search_query,
4901 false,
4902 true,
4903 false,
4904 Default::default(),
4905 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4906 None,
4907 )
4908 .unwrap(),
4909 cx
4910 )
4911 .await
4912 .unwrap(),
4913 HashMap::from_iter([
4914 (separator!("dir/one.rs").to_string(), vec![8..12]),
4915 (separator!("dir/two.rs").to_string(), vec![8..12]),
4916 ]),
4917 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
4918 );
4919
4920 assert!(
4921 search(
4922 &project,
4923 SearchQuery::text(
4924 search_query,
4925 false,
4926 true,
4927 false,
4928 Default::default(),
4929 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
4930 .unwrap(),
4931 None,
4932 )
4933 .unwrap(),
4934 cx
4935 )
4936 .await
4937 .unwrap()
4938 .is_empty(),
4939 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4940 );
4941}
4942
4943#[gpui::test]
4944async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4945 init_test(cx);
4946
4947 let search_query = "file";
4948
4949 let fs = FakeFs::new(cx.executor());
4950 fs.insert_tree(
4951 path!("/dir"),
4952 json!({
4953 "one.rs": r#"// Rust file one"#,
4954 "one.ts": r#"// TypeScript file one"#,
4955 "two.rs": r#"// Rust file two"#,
4956 "two.ts": r#"// TypeScript file two"#,
4957 }),
4958 )
4959 .await;
4960 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4961
4962 assert!(
4963 search(
4964 &project,
4965 SearchQuery::text(
4966 search_query,
4967 false,
4968 true,
4969 false,
4970 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4971 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4972 None,
4973 )
4974 .unwrap(),
4975 cx
4976 )
4977 .await
4978 .unwrap()
4979 .is_empty(),
4980 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4981 );
4982
4983 assert!(
4984 search(
4985 &project,
4986 SearchQuery::text(
4987 search_query,
4988 false,
4989 true,
4990 false,
4991 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4992 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4993 None,
4994 )
4995 .unwrap(),
4996 cx
4997 )
4998 .await
4999 .unwrap()
5000 .is_empty(),
5001 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
5002 );
5003
5004 assert!(
5005 search(
5006 &project,
5007 SearchQuery::text(
5008 search_query,
5009 false,
5010 true,
5011 false,
5012 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5013 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5014 None,
5015 )
5016 .unwrap(),
5017 cx
5018 )
5019 .await
5020 .unwrap()
5021 .is_empty(),
5022 "Non-matching inclusions and exclusions should not change that."
5023 );
5024
5025 assert_eq!(
5026 search(
5027 &project,
5028 SearchQuery::text(
5029 search_query,
5030 false,
5031 true,
5032 false,
5033 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5034 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
5035 None,
5036 )
5037 .unwrap(),
5038 cx
5039 )
5040 .await
5041 .unwrap(),
5042 HashMap::from_iter([
5043 (separator!("dir/one.ts").to_string(), vec![14..18]),
5044 (separator!("dir/two.ts").to_string(), vec![14..18]),
5045 ]),
5046 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
5047 );
5048}
5049
5050#[gpui::test]
5051async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
5052 init_test(cx);
5053
5054 let fs = FakeFs::new(cx.executor());
5055 fs.insert_tree(
5056 path!("/worktree-a"),
5057 json!({
5058 "haystack.rs": r#"// NEEDLE"#,
5059 "haystack.ts": r#"// NEEDLE"#,
5060 }),
5061 )
5062 .await;
5063 fs.insert_tree(
5064 path!("/worktree-b"),
5065 json!({
5066 "haystack.rs": r#"// NEEDLE"#,
5067 "haystack.ts": r#"// NEEDLE"#,
5068 }),
5069 )
5070 .await;
5071
5072 let project = Project::test(
5073 fs.clone(),
5074 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
5075 cx,
5076 )
5077 .await;
5078
5079 assert_eq!(
5080 search(
5081 &project,
5082 SearchQuery::text(
5083 "NEEDLE",
5084 false,
5085 true,
5086 false,
5087 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
5088 Default::default(),
5089 None,
5090 )
5091 .unwrap(),
5092 cx
5093 )
5094 .await
5095 .unwrap(),
5096 HashMap::from_iter([(separator!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
5097 "should only return results from included worktree"
5098 );
5099 assert_eq!(
5100 search(
5101 &project,
5102 SearchQuery::text(
5103 "NEEDLE",
5104 false,
5105 true,
5106 false,
5107 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
5108 Default::default(),
5109 None,
5110 )
5111 .unwrap(),
5112 cx
5113 )
5114 .await
5115 .unwrap(),
5116 HashMap::from_iter([(separator!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
5117 "should only return results from included worktree"
5118 );
5119
5120 assert_eq!(
5121 search(
5122 &project,
5123 SearchQuery::text(
5124 "NEEDLE",
5125 false,
5126 true,
5127 false,
5128 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5129 Default::default(),
5130 None,
5131 )
5132 .unwrap(),
5133 cx
5134 )
5135 .await
5136 .unwrap(),
5137 HashMap::from_iter([
5138 (separator!("worktree-a/haystack.ts").to_string(), vec![3..9]),
5139 (separator!("worktree-b/haystack.ts").to_string(), vec![3..9])
5140 ]),
5141 "should return results from both worktrees"
5142 );
5143}
5144
5145#[gpui::test]
5146async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
5147 init_test(cx);
5148
5149 let fs = FakeFs::new(cx.background_executor.clone());
5150 fs.insert_tree(
5151 path!("/dir"),
5152 json!({
5153 ".git": {},
5154 ".gitignore": "**/target\n/node_modules\n",
5155 "target": {
5156 "index.txt": "index_key:index_value"
5157 },
5158 "node_modules": {
5159 "eslint": {
5160 "index.ts": "const eslint_key = 'eslint value'",
5161 "package.json": r#"{ "some_key": "some value" }"#,
5162 },
5163 "prettier": {
5164 "index.ts": "const prettier_key = 'prettier value'",
5165 "package.json": r#"{ "other_key": "other value" }"#,
5166 },
5167 },
5168 "package.json": r#"{ "main_key": "main value" }"#,
5169 }),
5170 )
5171 .await;
5172 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5173
5174 let query = "key";
5175 assert_eq!(
5176 search(
5177 &project,
5178 SearchQuery::text(
5179 query,
5180 false,
5181 false,
5182 false,
5183 Default::default(),
5184 Default::default(),
5185 None,
5186 )
5187 .unwrap(),
5188 cx
5189 )
5190 .await
5191 .unwrap(),
5192 HashMap::from_iter([(separator!("dir/package.json").to_string(), vec![8..11])]),
5193 "Only one non-ignored file should have the query"
5194 );
5195
5196 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5197 assert_eq!(
5198 search(
5199 &project,
5200 SearchQuery::text(
5201 query,
5202 false,
5203 false,
5204 true,
5205 Default::default(),
5206 Default::default(),
5207 None,
5208 )
5209 .unwrap(),
5210 cx
5211 )
5212 .await
5213 .unwrap(),
5214 HashMap::from_iter([
5215 (separator!("dir/package.json").to_string(), vec![8..11]),
5216 (separator!("dir/target/index.txt").to_string(), vec![6..9]),
5217 (
5218 separator!("dir/node_modules/prettier/package.json").to_string(),
5219 vec![9..12]
5220 ),
5221 (
5222 separator!("dir/node_modules/prettier/index.ts").to_string(),
5223 vec![15..18]
5224 ),
5225 (
5226 separator!("dir/node_modules/eslint/index.ts").to_string(),
5227 vec![13..16]
5228 ),
5229 (
5230 separator!("dir/node_modules/eslint/package.json").to_string(),
5231 vec![8..11]
5232 ),
5233 ]),
5234 "Unrestricted search with ignored directories should find every file with the query"
5235 );
5236
5237 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
5238 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
5239 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5240 assert_eq!(
5241 search(
5242 &project,
5243 SearchQuery::text(
5244 query,
5245 false,
5246 false,
5247 true,
5248 files_to_include,
5249 files_to_exclude,
5250 None,
5251 )
5252 .unwrap(),
5253 cx
5254 )
5255 .await
5256 .unwrap(),
5257 HashMap::from_iter([(
5258 separator!("dir/node_modules/prettier/package.json").to_string(),
5259 vec![9..12]
5260 )]),
5261 "With search including ignored prettier directory and excluding TS files, only one file should be found"
5262 );
5263}
5264
5265#[gpui::test]
5266async fn test_create_entry(cx: &mut gpui::TestAppContext) {
5267 init_test(cx);
5268
5269 let fs = FakeFs::new(cx.executor().clone());
5270 fs.insert_tree(
5271 "/one/two",
5272 json!({
5273 "three": {
5274 "a.txt": "",
5275 "four": {}
5276 },
5277 "c.rs": ""
5278 }),
5279 )
5280 .await;
5281
5282 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
5283 project
5284 .update(cx, |project, cx| {
5285 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5286 project.create_entry((id, "b.."), true, cx)
5287 })
5288 .await
5289 .unwrap()
5290 .to_included()
5291 .unwrap();
5292
5293 // Can't create paths outside the project
5294 let result = project
5295 .update(cx, |project, cx| {
5296 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5297 project.create_entry((id, "../../boop"), true, cx)
5298 })
5299 .await;
5300 assert!(result.is_err());
5301
5302 // Can't create paths with '..'
5303 let result = project
5304 .update(cx, |project, cx| {
5305 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5306 project.create_entry((id, "four/../beep"), true, cx)
5307 })
5308 .await;
5309 assert!(result.is_err());
5310
5311 assert_eq!(
5312 fs.paths(true),
5313 vec![
5314 PathBuf::from(path!("/")),
5315 PathBuf::from(path!("/one")),
5316 PathBuf::from(path!("/one/two")),
5317 PathBuf::from(path!("/one/two/c.rs")),
5318 PathBuf::from(path!("/one/two/three")),
5319 PathBuf::from(path!("/one/two/three/a.txt")),
5320 PathBuf::from(path!("/one/two/three/b..")),
5321 PathBuf::from(path!("/one/two/three/four")),
5322 ]
5323 );
5324
5325 // And we cannot open buffers with '..'
5326 let result = project
5327 .update(cx, |project, cx| {
5328 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5329 project.open_buffer((id, "../c.rs"), cx)
5330 })
5331 .await;
5332 assert!(result.is_err())
5333}
5334
5335#[gpui::test]
5336async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
5337 init_test(cx);
5338
5339 let fs = FakeFs::new(cx.executor());
5340 fs.insert_tree(
5341 path!("/dir"),
5342 json!({
5343 "a.tsx": "a",
5344 }),
5345 )
5346 .await;
5347
5348 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5349
5350 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5351 language_registry.add(tsx_lang());
5352 let language_server_names = [
5353 "TypeScriptServer",
5354 "TailwindServer",
5355 "ESLintServer",
5356 "NoHoverCapabilitiesServer",
5357 ];
5358 let mut language_servers = [
5359 language_registry.register_fake_lsp(
5360 "tsx",
5361 FakeLspAdapter {
5362 name: language_server_names[0],
5363 capabilities: lsp::ServerCapabilities {
5364 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5365 ..lsp::ServerCapabilities::default()
5366 },
5367 ..FakeLspAdapter::default()
5368 },
5369 ),
5370 language_registry.register_fake_lsp(
5371 "tsx",
5372 FakeLspAdapter {
5373 name: language_server_names[1],
5374 capabilities: lsp::ServerCapabilities {
5375 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5376 ..lsp::ServerCapabilities::default()
5377 },
5378 ..FakeLspAdapter::default()
5379 },
5380 ),
5381 language_registry.register_fake_lsp(
5382 "tsx",
5383 FakeLspAdapter {
5384 name: language_server_names[2],
5385 capabilities: lsp::ServerCapabilities {
5386 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5387 ..lsp::ServerCapabilities::default()
5388 },
5389 ..FakeLspAdapter::default()
5390 },
5391 ),
5392 language_registry.register_fake_lsp(
5393 "tsx",
5394 FakeLspAdapter {
5395 name: language_server_names[3],
5396 capabilities: lsp::ServerCapabilities {
5397 hover_provider: None,
5398 ..lsp::ServerCapabilities::default()
5399 },
5400 ..FakeLspAdapter::default()
5401 },
5402 ),
5403 ];
5404
5405 let (buffer, _handle) = project
5406 .update(cx, |p, cx| {
5407 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
5408 })
5409 .await
5410 .unwrap();
5411 cx.executor().run_until_parked();
5412
5413 let mut servers_with_hover_requests = HashMap::default();
5414 for i in 0..language_server_names.len() {
5415 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
5416 panic!(
5417 "Failed to get language server #{i} with name {}",
5418 &language_server_names[i]
5419 )
5420 });
5421 let new_server_name = new_server.server.name();
5422 assert!(
5423 !servers_with_hover_requests.contains_key(&new_server_name),
5424 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5425 );
5426 match new_server_name.as_ref() {
5427 "TailwindServer" | "TypeScriptServer" => {
5428 servers_with_hover_requests.insert(
5429 new_server_name.clone(),
5430 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
5431 move |_, _| {
5432 let name = new_server_name.clone();
5433 async move {
5434 Ok(Some(lsp::Hover {
5435 contents: lsp::HoverContents::Scalar(
5436 lsp::MarkedString::String(format!("{name} hover")),
5437 ),
5438 range: None,
5439 }))
5440 }
5441 },
5442 ),
5443 );
5444 }
5445 "ESLintServer" => {
5446 servers_with_hover_requests.insert(
5447 new_server_name,
5448 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
5449 |_, _| async move { Ok(None) },
5450 ),
5451 );
5452 }
5453 "NoHoverCapabilitiesServer" => {
5454 let _never_handled = new_server
5455 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
5456 panic!(
5457 "Should not call for hovers server with no corresponding capabilities"
5458 )
5459 });
5460 }
5461 unexpected => panic!("Unexpected server name: {unexpected}"),
5462 }
5463 }
5464
5465 let hover_task = project.update(cx, |project, cx| {
5466 project.hover(&buffer, Point::new(0, 0), cx)
5467 });
5468 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
5469 |mut hover_request| async move {
5470 hover_request
5471 .next()
5472 .await
5473 .expect("All hover requests should have been triggered")
5474 },
5475 ))
5476 .await;
5477 assert_eq!(
5478 vec!["TailwindServer hover", "TypeScriptServer hover"],
5479 hover_task
5480 .await
5481 .into_iter()
5482 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5483 .sorted()
5484 .collect::<Vec<_>>(),
5485 "Should receive hover responses from all related servers with hover capabilities"
5486 );
5487}
5488
5489#[gpui::test]
5490async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
5491 init_test(cx);
5492
5493 let fs = FakeFs::new(cx.executor());
5494 fs.insert_tree(
5495 path!("/dir"),
5496 json!({
5497 "a.ts": "a",
5498 }),
5499 )
5500 .await;
5501
5502 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5503
5504 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5505 language_registry.add(typescript_lang());
5506 let mut fake_language_servers = language_registry.register_fake_lsp(
5507 "TypeScript",
5508 FakeLspAdapter {
5509 capabilities: lsp::ServerCapabilities {
5510 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5511 ..lsp::ServerCapabilities::default()
5512 },
5513 ..FakeLspAdapter::default()
5514 },
5515 );
5516
5517 let (buffer, _handle) = project
5518 .update(cx, |p, cx| {
5519 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
5520 })
5521 .await
5522 .unwrap();
5523 cx.executor().run_until_parked();
5524
5525 let fake_server = fake_language_servers
5526 .next()
5527 .await
5528 .expect("failed to get the language server");
5529
5530 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
5531 move |_, _| async move {
5532 Ok(Some(lsp::Hover {
5533 contents: lsp::HoverContents::Array(vec![
5534 lsp::MarkedString::String("".to_string()),
5535 lsp::MarkedString::String(" ".to_string()),
5536 lsp::MarkedString::String("\n\n\n".to_string()),
5537 ]),
5538 range: None,
5539 }))
5540 },
5541 );
5542
5543 let hover_task = project.update(cx, |project, cx| {
5544 project.hover(&buffer, Point::new(0, 0), cx)
5545 });
5546 let () = request_handled
5547 .next()
5548 .await
5549 .expect("All hover requests should have been triggered");
5550 assert_eq!(
5551 Vec::<String>::new(),
5552 hover_task
5553 .await
5554 .into_iter()
5555 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5556 .sorted()
5557 .collect::<Vec<_>>(),
5558 "Empty hover parts should be ignored"
5559 );
5560}
5561
5562#[gpui::test]
5563async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
5564 init_test(cx);
5565
5566 let fs = FakeFs::new(cx.executor());
5567 fs.insert_tree(
5568 path!("/dir"),
5569 json!({
5570 "a.ts": "a",
5571 }),
5572 )
5573 .await;
5574
5575 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5576
5577 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5578 language_registry.add(typescript_lang());
5579 let mut fake_language_servers = language_registry.register_fake_lsp(
5580 "TypeScript",
5581 FakeLspAdapter {
5582 capabilities: lsp::ServerCapabilities {
5583 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5584 ..lsp::ServerCapabilities::default()
5585 },
5586 ..FakeLspAdapter::default()
5587 },
5588 );
5589
5590 let (buffer, _handle) = project
5591 .update(cx, |p, cx| {
5592 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
5593 })
5594 .await
5595 .unwrap();
5596 cx.executor().run_until_parked();
5597
5598 let fake_server = fake_language_servers
5599 .next()
5600 .await
5601 .expect("failed to get the language server");
5602
5603 let mut request_handled = fake_server
5604 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
5605 Ok(Some(vec![
5606 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5607 title: "organize imports".to_string(),
5608 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
5609 ..lsp::CodeAction::default()
5610 }),
5611 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5612 title: "fix code".to_string(),
5613 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
5614 ..lsp::CodeAction::default()
5615 }),
5616 ]))
5617 });
5618
5619 let code_actions_task = project.update(cx, |project, cx| {
5620 project.code_actions(
5621 &buffer,
5622 0..buffer.read(cx).len(),
5623 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
5624 cx,
5625 )
5626 });
5627
5628 let () = request_handled
5629 .next()
5630 .await
5631 .expect("The code action request should have been triggered");
5632
5633 let code_actions = code_actions_task.await.unwrap();
5634 assert_eq!(code_actions.len(), 1);
5635 assert_eq!(
5636 code_actions[0].lsp_action.action_kind(),
5637 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
5638 );
5639}
5640
5641#[gpui::test]
5642async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
5643 init_test(cx);
5644
5645 let fs = FakeFs::new(cx.executor());
5646 fs.insert_tree(
5647 path!("/dir"),
5648 json!({
5649 "a.tsx": "a",
5650 }),
5651 )
5652 .await;
5653
5654 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5655
5656 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5657 language_registry.add(tsx_lang());
5658 let language_server_names = [
5659 "TypeScriptServer",
5660 "TailwindServer",
5661 "ESLintServer",
5662 "NoActionsCapabilitiesServer",
5663 ];
5664
5665 let mut language_server_rxs = [
5666 language_registry.register_fake_lsp(
5667 "tsx",
5668 FakeLspAdapter {
5669 name: language_server_names[0],
5670 capabilities: lsp::ServerCapabilities {
5671 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5672 ..lsp::ServerCapabilities::default()
5673 },
5674 ..FakeLspAdapter::default()
5675 },
5676 ),
5677 language_registry.register_fake_lsp(
5678 "tsx",
5679 FakeLspAdapter {
5680 name: language_server_names[1],
5681 capabilities: lsp::ServerCapabilities {
5682 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5683 ..lsp::ServerCapabilities::default()
5684 },
5685 ..FakeLspAdapter::default()
5686 },
5687 ),
5688 language_registry.register_fake_lsp(
5689 "tsx",
5690 FakeLspAdapter {
5691 name: language_server_names[2],
5692 capabilities: lsp::ServerCapabilities {
5693 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5694 ..lsp::ServerCapabilities::default()
5695 },
5696 ..FakeLspAdapter::default()
5697 },
5698 ),
5699 language_registry.register_fake_lsp(
5700 "tsx",
5701 FakeLspAdapter {
5702 name: language_server_names[3],
5703 capabilities: lsp::ServerCapabilities {
5704 code_action_provider: None,
5705 ..lsp::ServerCapabilities::default()
5706 },
5707 ..FakeLspAdapter::default()
5708 },
5709 ),
5710 ];
5711
5712 let (buffer, _handle) = project
5713 .update(cx, |p, cx| {
5714 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
5715 })
5716 .await
5717 .unwrap();
5718 cx.executor().run_until_parked();
5719
5720 let mut servers_with_actions_requests = HashMap::default();
5721 for i in 0..language_server_names.len() {
5722 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
5723 panic!(
5724 "Failed to get language server #{i} with name {}",
5725 &language_server_names[i]
5726 )
5727 });
5728 let new_server_name = new_server.server.name();
5729
5730 assert!(
5731 !servers_with_actions_requests.contains_key(&new_server_name),
5732 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5733 );
5734 match new_server_name.0.as_ref() {
5735 "TailwindServer" | "TypeScriptServer" => {
5736 servers_with_actions_requests.insert(
5737 new_server_name.clone(),
5738 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
5739 move |_, _| {
5740 let name = new_server_name.clone();
5741 async move {
5742 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
5743 lsp::CodeAction {
5744 title: format!("{name} code action"),
5745 ..lsp::CodeAction::default()
5746 },
5747 )]))
5748 }
5749 },
5750 ),
5751 );
5752 }
5753 "ESLintServer" => {
5754 servers_with_actions_requests.insert(
5755 new_server_name,
5756 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
5757 |_, _| async move { Ok(None) },
5758 ),
5759 );
5760 }
5761 "NoActionsCapabilitiesServer" => {
5762 let _never_handled = new_server
5763 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
5764 panic!(
5765 "Should not call for code actions server with no corresponding capabilities"
5766 )
5767 });
5768 }
5769 unexpected => panic!("Unexpected server name: {unexpected}"),
5770 }
5771 }
5772
5773 let code_actions_task = project.update(cx, |project, cx| {
5774 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
5775 });
5776
5777 // cx.run_until_parked();
5778 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
5779 |mut code_actions_request| async move {
5780 code_actions_request
5781 .next()
5782 .await
5783 .expect("All code actions requests should have been triggered")
5784 },
5785 ))
5786 .await;
5787 assert_eq!(
5788 vec!["TailwindServer code action", "TypeScriptServer code action"],
5789 code_actions_task
5790 .await
5791 .unwrap()
5792 .into_iter()
5793 .map(|code_action| code_action.lsp_action.title().to_owned())
5794 .sorted()
5795 .collect::<Vec<_>>(),
5796 "Should receive code actions responses from all related servers with hover capabilities"
5797 );
5798}
5799
5800#[gpui::test]
5801async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
5802 init_test(cx);
5803
5804 let fs = FakeFs::new(cx.executor());
5805 fs.insert_tree(
5806 "/dir",
5807 json!({
5808 "a.rs": "let a = 1;",
5809 "b.rs": "let b = 2;",
5810 "c.rs": "let c = 2;",
5811 }),
5812 )
5813 .await;
5814
5815 let project = Project::test(
5816 fs,
5817 [
5818 "/dir/a.rs".as_ref(),
5819 "/dir/b.rs".as_ref(),
5820 "/dir/c.rs".as_ref(),
5821 ],
5822 cx,
5823 )
5824 .await;
5825
5826 // check the initial state and get the worktrees
5827 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
5828 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5829 assert_eq!(worktrees.len(), 3);
5830
5831 let worktree_a = worktrees[0].read(cx);
5832 let worktree_b = worktrees[1].read(cx);
5833 let worktree_c = worktrees[2].read(cx);
5834
5835 // check they start in the right order
5836 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
5837 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
5838 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
5839
5840 (
5841 worktrees[0].clone(),
5842 worktrees[1].clone(),
5843 worktrees[2].clone(),
5844 )
5845 });
5846
5847 // move first worktree to after the second
5848 // [a, b, c] -> [b, a, c]
5849 project
5850 .update(cx, |project, cx| {
5851 let first = worktree_a.read(cx);
5852 let second = worktree_b.read(cx);
5853 project.move_worktree(first.id(), second.id(), cx)
5854 })
5855 .expect("moving first after second");
5856
5857 // check the state after moving
5858 project.update(cx, |project, cx| {
5859 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5860 assert_eq!(worktrees.len(), 3);
5861
5862 let first = worktrees[0].read(cx);
5863 let second = worktrees[1].read(cx);
5864 let third = worktrees[2].read(cx);
5865
5866 // check they are now in the right order
5867 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5868 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
5869 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5870 });
5871
5872 // move the second worktree to before the first
5873 // [b, a, c] -> [a, b, c]
5874 project
5875 .update(cx, |project, cx| {
5876 let second = worktree_a.read(cx);
5877 let first = worktree_b.read(cx);
5878 project.move_worktree(first.id(), second.id(), cx)
5879 })
5880 .expect("moving second before first");
5881
5882 // check the state after moving
5883 project.update(cx, |project, cx| {
5884 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5885 assert_eq!(worktrees.len(), 3);
5886
5887 let first = worktrees[0].read(cx);
5888 let second = worktrees[1].read(cx);
5889 let third = worktrees[2].read(cx);
5890
5891 // check they are now in the right order
5892 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5893 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5894 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5895 });
5896
5897 // move the second worktree to after the third
5898 // [a, b, c] -> [a, c, b]
5899 project
5900 .update(cx, |project, cx| {
5901 let second = worktree_b.read(cx);
5902 let third = worktree_c.read(cx);
5903 project.move_worktree(second.id(), third.id(), cx)
5904 })
5905 .expect("moving second after third");
5906
5907 // check the state after moving
5908 project.update(cx, |project, cx| {
5909 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5910 assert_eq!(worktrees.len(), 3);
5911
5912 let first = worktrees[0].read(cx);
5913 let second = worktrees[1].read(cx);
5914 let third = worktrees[2].read(cx);
5915
5916 // check they are now in the right order
5917 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5918 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5919 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
5920 });
5921
5922 // move the third worktree to before the second
5923 // [a, c, b] -> [a, b, c]
5924 project
5925 .update(cx, |project, cx| {
5926 let third = worktree_c.read(cx);
5927 let second = worktree_b.read(cx);
5928 project.move_worktree(third.id(), second.id(), cx)
5929 })
5930 .expect("moving third before second");
5931
5932 // check the state after moving
5933 project.update(cx, |project, cx| {
5934 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5935 assert_eq!(worktrees.len(), 3);
5936
5937 let first = worktrees[0].read(cx);
5938 let second = worktrees[1].read(cx);
5939 let third = worktrees[2].read(cx);
5940
5941 // check they are now in the right order
5942 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5943 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5944 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5945 });
5946
5947 // move the first worktree to after the third
5948 // [a, b, c] -> [b, c, a]
5949 project
5950 .update(cx, |project, cx| {
5951 let first = worktree_a.read(cx);
5952 let third = worktree_c.read(cx);
5953 project.move_worktree(first.id(), third.id(), cx)
5954 })
5955 .expect("moving first after third");
5956
5957 // check the state after moving
5958 project.update(cx, |project, cx| {
5959 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5960 assert_eq!(worktrees.len(), 3);
5961
5962 let first = worktrees[0].read(cx);
5963 let second = worktrees[1].read(cx);
5964 let third = worktrees[2].read(cx);
5965
5966 // check they are now in the right order
5967 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5968 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5969 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
5970 });
5971
5972 // move the third worktree to before the first
5973 // [b, c, a] -> [a, b, c]
5974 project
5975 .update(cx, |project, cx| {
5976 let third = worktree_a.read(cx);
5977 let first = worktree_b.read(cx);
5978 project.move_worktree(third.id(), first.id(), cx)
5979 })
5980 .expect("moving third before first");
5981
5982 // check the state after moving
5983 project.update(cx, |project, cx| {
5984 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5985 assert_eq!(worktrees.len(), 3);
5986
5987 let first = worktrees[0].read(cx);
5988 let second = worktrees[1].read(cx);
5989 let third = worktrees[2].read(cx);
5990
5991 // check they are now in the right order
5992 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5993 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5994 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5995 });
5996}
5997
5998#[gpui::test]
5999async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
6000 init_test(cx);
6001
6002 let staged_contents = r#"
6003 fn main() {
6004 println!("hello world");
6005 }
6006 "#
6007 .unindent();
6008 let file_contents = r#"
6009 // print goodbye
6010 fn main() {
6011 println!("goodbye world");
6012 }
6013 "#
6014 .unindent();
6015
6016 let fs = FakeFs::new(cx.background_executor.clone());
6017 fs.insert_tree(
6018 "/dir",
6019 json!({
6020 ".git": {},
6021 "src": {
6022 "main.rs": file_contents,
6023 }
6024 }),
6025 )
6026 .await;
6027
6028 fs.set_index_for_repo(
6029 Path::new("/dir/.git"),
6030 &[("src/main.rs".into(), staged_contents)],
6031 );
6032
6033 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6034
6035 let buffer = project
6036 .update(cx, |project, cx| {
6037 project.open_local_buffer("/dir/src/main.rs", cx)
6038 })
6039 .await
6040 .unwrap();
6041 let unstaged_diff = project
6042 .update(cx, |project, cx| {
6043 project.open_unstaged_diff(buffer.clone(), cx)
6044 })
6045 .await
6046 .unwrap();
6047
6048 cx.run_until_parked();
6049 unstaged_diff.update(cx, |unstaged_diff, cx| {
6050 let snapshot = buffer.read(cx).snapshot();
6051 assert_hunks(
6052 unstaged_diff.hunks(&snapshot, cx),
6053 &snapshot,
6054 &unstaged_diff.base_text_string().unwrap(),
6055 &[
6056 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
6057 (
6058 2..3,
6059 " println!(\"hello world\");\n",
6060 " println!(\"goodbye world\");\n",
6061 DiffHunkStatus::modified_none(),
6062 ),
6063 ],
6064 );
6065 });
6066
6067 let staged_contents = r#"
6068 // print goodbye
6069 fn main() {
6070 }
6071 "#
6072 .unindent();
6073
6074 fs.set_index_for_repo(
6075 Path::new("/dir/.git"),
6076 &[("src/main.rs".into(), staged_contents)],
6077 );
6078
6079 cx.run_until_parked();
6080 unstaged_diff.update(cx, |unstaged_diff, cx| {
6081 let snapshot = buffer.read(cx).snapshot();
6082 assert_hunks(
6083 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6084 &snapshot,
6085 &unstaged_diff.base_text().text(),
6086 &[(
6087 2..3,
6088 "",
6089 " println!(\"goodbye world\");\n",
6090 DiffHunkStatus::added_none(),
6091 )],
6092 );
6093 });
6094}
6095
6096#[gpui::test]
6097async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
6098 init_test(cx);
6099
6100 let committed_contents = r#"
6101 fn main() {
6102 println!("hello world");
6103 }
6104 "#
6105 .unindent();
6106 let staged_contents = r#"
6107 fn main() {
6108 println!("goodbye world");
6109 }
6110 "#
6111 .unindent();
6112 let file_contents = r#"
6113 // print goodbye
6114 fn main() {
6115 println!("goodbye world");
6116 }
6117 "#
6118 .unindent();
6119
6120 let fs = FakeFs::new(cx.background_executor.clone());
6121 fs.insert_tree(
6122 "/dir",
6123 json!({
6124 ".git": {},
6125 "src": {
6126 "modification.rs": file_contents,
6127 }
6128 }),
6129 )
6130 .await;
6131
6132 fs.set_head_for_repo(
6133 Path::new("/dir/.git"),
6134 &[
6135 ("src/modification.rs".into(), committed_contents),
6136 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6137 ],
6138 );
6139 fs.set_index_for_repo(
6140 Path::new("/dir/.git"),
6141 &[
6142 ("src/modification.rs".into(), staged_contents),
6143 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6144 ],
6145 );
6146
6147 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6148 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6149 let language = rust_lang();
6150 language_registry.add(language.clone());
6151
6152 let buffer_1 = project
6153 .update(cx, |project, cx| {
6154 project.open_local_buffer("/dir/src/modification.rs", cx)
6155 })
6156 .await
6157 .unwrap();
6158 let diff_1 = project
6159 .update(cx, |project, cx| {
6160 project.open_uncommitted_diff(buffer_1.clone(), cx)
6161 })
6162 .await
6163 .unwrap();
6164 diff_1.read_with(cx, |diff, _| {
6165 assert_eq!(diff.base_text().language().cloned(), Some(language))
6166 });
6167 cx.run_until_parked();
6168 diff_1.update(cx, |diff, cx| {
6169 let snapshot = buffer_1.read(cx).snapshot();
6170 assert_hunks(
6171 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6172 &snapshot,
6173 &diff.base_text_string().unwrap(),
6174 &[
6175 (
6176 0..1,
6177 "",
6178 "// print goodbye\n",
6179 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
6180 ),
6181 (
6182 2..3,
6183 " println!(\"hello world\");\n",
6184 " println!(\"goodbye world\");\n",
6185 DiffHunkStatus::modified_none(),
6186 ),
6187 ],
6188 );
6189 });
6190
6191 // Reset HEAD to a version that differs from both the buffer and the index.
6192 let committed_contents = r#"
6193 // print goodbye
6194 fn main() {
6195 }
6196 "#
6197 .unindent();
6198 fs.set_head_for_repo(
6199 Path::new("/dir/.git"),
6200 &[
6201 ("src/modification.rs".into(), committed_contents.clone()),
6202 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6203 ],
6204 );
6205
6206 // Buffer now has an unstaged hunk.
6207 cx.run_until_parked();
6208 diff_1.update(cx, |diff, cx| {
6209 let snapshot = buffer_1.read(cx).snapshot();
6210 assert_hunks(
6211 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6212 &snapshot,
6213 &diff.base_text().text(),
6214 &[(
6215 2..3,
6216 "",
6217 " println!(\"goodbye world\");\n",
6218 DiffHunkStatus::added_none(),
6219 )],
6220 );
6221 });
6222
6223 // Open a buffer for a file that's been deleted.
6224 let buffer_2 = project
6225 .update(cx, |project, cx| {
6226 project.open_local_buffer("/dir/src/deletion.rs", cx)
6227 })
6228 .await
6229 .unwrap();
6230 let diff_2 = project
6231 .update(cx, |project, cx| {
6232 project.open_uncommitted_diff(buffer_2.clone(), cx)
6233 })
6234 .await
6235 .unwrap();
6236 cx.run_until_parked();
6237 diff_2.update(cx, |diff, cx| {
6238 let snapshot = buffer_2.read(cx).snapshot();
6239 assert_hunks(
6240 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6241 &snapshot,
6242 &diff.base_text_string().unwrap(),
6243 &[(
6244 0..0,
6245 "// the-deleted-contents\n",
6246 "",
6247 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
6248 )],
6249 );
6250 });
6251
6252 // Stage the deletion of this file
6253 fs.set_index_for_repo(
6254 Path::new("/dir/.git"),
6255 &[("src/modification.rs".into(), committed_contents.clone())],
6256 );
6257 cx.run_until_parked();
6258 diff_2.update(cx, |diff, cx| {
6259 let snapshot = buffer_2.read(cx).snapshot();
6260 assert_hunks(
6261 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6262 &snapshot,
6263 &diff.base_text_string().unwrap(),
6264 &[(
6265 0..0,
6266 "// the-deleted-contents\n",
6267 "",
6268 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
6269 )],
6270 );
6271 });
6272}
6273
6274#[gpui::test]
6275async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
6276 use DiffHunkSecondaryStatus::*;
6277 init_test(cx);
6278
6279 let committed_contents = r#"
6280 zero
6281 one
6282 two
6283 three
6284 four
6285 five
6286 "#
6287 .unindent();
6288 let file_contents = r#"
6289 one
6290 TWO
6291 three
6292 FOUR
6293 five
6294 "#
6295 .unindent();
6296
6297 let fs = FakeFs::new(cx.background_executor.clone());
6298 fs.insert_tree(
6299 "/dir",
6300 json!({
6301 ".git": {},
6302 "file.txt": file_contents.clone()
6303 }),
6304 )
6305 .await;
6306
6307 fs.set_head_and_index_for_repo(
6308 "/dir/.git".as_ref(),
6309 &[("file.txt".into(), committed_contents.clone())],
6310 );
6311
6312 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6313
6314 let buffer = project
6315 .update(cx, |project, cx| {
6316 project.open_local_buffer("/dir/file.txt", cx)
6317 })
6318 .await
6319 .unwrap();
6320 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6321 let uncommitted_diff = project
6322 .update(cx, |project, cx| {
6323 project.open_uncommitted_diff(buffer.clone(), cx)
6324 })
6325 .await
6326 .unwrap();
6327 let mut diff_events = cx.events(&uncommitted_diff);
6328
6329 // The hunks are initially unstaged.
6330 uncommitted_diff.read_with(cx, |diff, cx| {
6331 assert_hunks(
6332 diff.hunks(&snapshot, cx),
6333 &snapshot,
6334 &diff.base_text_string().unwrap(),
6335 &[
6336 (
6337 0..0,
6338 "zero\n",
6339 "",
6340 DiffHunkStatus::deleted(HasSecondaryHunk),
6341 ),
6342 (
6343 1..2,
6344 "two\n",
6345 "TWO\n",
6346 DiffHunkStatus::modified(HasSecondaryHunk),
6347 ),
6348 (
6349 3..4,
6350 "four\n",
6351 "FOUR\n",
6352 DiffHunkStatus::modified(HasSecondaryHunk),
6353 ),
6354 ],
6355 );
6356 });
6357
6358 // Stage a hunk. It appears as optimistically staged.
6359 uncommitted_diff.update(cx, |diff, cx| {
6360 let range =
6361 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
6362 let hunks = diff
6363 .hunks_intersecting_range(range, &snapshot, cx)
6364 .collect::<Vec<_>>();
6365 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
6366
6367 assert_hunks(
6368 diff.hunks(&snapshot, cx),
6369 &snapshot,
6370 &diff.base_text_string().unwrap(),
6371 &[
6372 (
6373 0..0,
6374 "zero\n",
6375 "",
6376 DiffHunkStatus::deleted(HasSecondaryHunk),
6377 ),
6378 (
6379 1..2,
6380 "two\n",
6381 "TWO\n",
6382 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6383 ),
6384 (
6385 3..4,
6386 "four\n",
6387 "FOUR\n",
6388 DiffHunkStatus::modified(HasSecondaryHunk),
6389 ),
6390 ],
6391 );
6392 });
6393
6394 // The diff emits a change event for the range of the staged hunk.
6395 assert!(matches!(
6396 diff_events.next().await.unwrap(),
6397 BufferDiffEvent::HunksStagedOrUnstaged(_)
6398 ));
6399 let event = diff_events.next().await.unwrap();
6400 if let BufferDiffEvent::DiffChanged {
6401 changed_range: Some(changed_range),
6402 } = event
6403 {
6404 let changed_range = changed_range.to_point(&snapshot);
6405 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
6406 } else {
6407 panic!("Unexpected event {event:?}");
6408 }
6409
6410 // When the write to the index completes, it appears as staged.
6411 cx.run_until_parked();
6412 uncommitted_diff.update(cx, |diff, cx| {
6413 assert_hunks(
6414 diff.hunks(&snapshot, cx),
6415 &snapshot,
6416 &diff.base_text_string().unwrap(),
6417 &[
6418 (
6419 0..0,
6420 "zero\n",
6421 "",
6422 DiffHunkStatus::deleted(HasSecondaryHunk),
6423 ),
6424 (
6425 1..2,
6426 "two\n",
6427 "TWO\n",
6428 DiffHunkStatus::modified(NoSecondaryHunk),
6429 ),
6430 (
6431 3..4,
6432 "four\n",
6433 "FOUR\n",
6434 DiffHunkStatus::modified(HasSecondaryHunk),
6435 ),
6436 ],
6437 );
6438 });
6439
6440 // The diff emits a change event for the changed index text.
6441 let event = diff_events.next().await.unwrap();
6442 if let BufferDiffEvent::DiffChanged {
6443 changed_range: Some(changed_range),
6444 } = event
6445 {
6446 let changed_range = changed_range.to_point(&snapshot);
6447 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
6448 } else {
6449 panic!("Unexpected event {event:?}");
6450 }
6451
6452 // Simulate a problem writing to the git index.
6453 fs.set_error_message_for_index_write(
6454 "/dir/.git".as_ref(),
6455 Some("failed to write git index".into()),
6456 );
6457
6458 // Stage another hunk.
6459 uncommitted_diff.update(cx, |diff, cx| {
6460 let range =
6461 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
6462 let hunks = diff
6463 .hunks_intersecting_range(range, &snapshot, cx)
6464 .collect::<Vec<_>>();
6465 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
6466
6467 assert_hunks(
6468 diff.hunks(&snapshot, cx),
6469 &snapshot,
6470 &diff.base_text_string().unwrap(),
6471 &[
6472 (
6473 0..0,
6474 "zero\n",
6475 "",
6476 DiffHunkStatus::deleted(HasSecondaryHunk),
6477 ),
6478 (
6479 1..2,
6480 "two\n",
6481 "TWO\n",
6482 DiffHunkStatus::modified(NoSecondaryHunk),
6483 ),
6484 (
6485 3..4,
6486 "four\n",
6487 "FOUR\n",
6488 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6489 ),
6490 ],
6491 );
6492 });
6493 assert!(matches!(
6494 diff_events.next().await.unwrap(),
6495 BufferDiffEvent::HunksStagedOrUnstaged(_)
6496 ));
6497 let event = diff_events.next().await.unwrap();
6498 if let BufferDiffEvent::DiffChanged {
6499 changed_range: Some(changed_range),
6500 } = event
6501 {
6502 let changed_range = changed_range.to_point(&snapshot);
6503 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
6504 } else {
6505 panic!("Unexpected event {event:?}");
6506 }
6507
6508 // When the write fails, the hunk returns to being unstaged.
6509 cx.run_until_parked();
6510 uncommitted_diff.update(cx, |diff, cx| {
6511 assert_hunks(
6512 diff.hunks(&snapshot, cx),
6513 &snapshot,
6514 &diff.base_text_string().unwrap(),
6515 &[
6516 (
6517 0..0,
6518 "zero\n",
6519 "",
6520 DiffHunkStatus::deleted(HasSecondaryHunk),
6521 ),
6522 (
6523 1..2,
6524 "two\n",
6525 "TWO\n",
6526 DiffHunkStatus::modified(NoSecondaryHunk),
6527 ),
6528 (
6529 3..4,
6530 "four\n",
6531 "FOUR\n",
6532 DiffHunkStatus::modified(HasSecondaryHunk),
6533 ),
6534 ],
6535 );
6536 });
6537
6538 let event = diff_events.next().await.unwrap();
6539 if let BufferDiffEvent::DiffChanged {
6540 changed_range: Some(changed_range),
6541 } = event
6542 {
6543 let changed_range = changed_range.to_point(&snapshot);
6544 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
6545 } else {
6546 panic!("Unexpected event {event:?}");
6547 }
6548
6549 // Allow writing to the git index to succeed again.
6550 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
6551
6552 // Stage two hunks with separate operations.
6553 uncommitted_diff.update(cx, |diff, cx| {
6554 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
6555 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
6556 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
6557 });
6558
6559 // Both staged hunks appear as pending.
6560 uncommitted_diff.update(cx, |diff, cx| {
6561 assert_hunks(
6562 diff.hunks(&snapshot, cx),
6563 &snapshot,
6564 &diff.base_text_string().unwrap(),
6565 &[
6566 (
6567 0..0,
6568 "zero\n",
6569 "",
6570 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
6571 ),
6572 (
6573 1..2,
6574 "two\n",
6575 "TWO\n",
6576 DiffHunkStatus::modified(NoSecondaryHunk),
6577 ),
6578 (
6579 3..4,
6580 "four\n",
6581 "FOUR\n",
6582 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6583 ),
6584 ],
6585 );
6586 });
6587
6588 // Both staging operations take effect.
6589 cx.run_until_parked();
6590 uncommitted_diff.update(cx, |diff, cx| {
6591 assert_hunks(
6592 diff.hunks(&snapshot, cx),
6593 &snapshot,
6594 &diff.base_text_string().unwrap(),
6595 &[
6596 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
6597 (
6598 1..2,
6599 "two\n",
6600 "TWO\n",
6601 DiffHunkStatus::modified(NoSecondaryHunk),
6602 ),
6603 (
6604 3..4,
6605 "four\n",
6606 "FOUR\n",
6607 DiffHunkStatus::modified(NoSecondaryHunk),
6608 ),
6609 ],
6610 );
6611 });
6612}
6613
6614#[gpui::test(seeds(340, 472))]
6615async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
6616 use DiffHunkSecondaryStatus::*;
6617 init_test(cx);
6618
6619 let committed_contents = r#"
6620 zero
6621 one
6622 two
6623 three
6624 four
6625 five
6626 "#
6627 .unindent();
6628 let file_contents = r#"
6629 one
6630 TWO
6631 three
6632 FOUR
6633 five
6634 "#
6635 .unindent();
6636
6637 let fs = FakeFs::new(cx.background_executor.clone());
6638 fs.insert_tree(
6639 "/dir",
6640 json!({
6641 ".git": {},
6642 "file.txt": file_contents.clone()
6643 }),
6644 )
6645 .await;
6646
6647 fs.set_head_for_repo(
6648 "/dir/.git".as_ref(),
6649 &[("file.txt".into(), committed_contents.clone())],
6650 );
6651 fs.set_index_for_repo(
6652 "/dir/.git".as_ref(),
6653 &[("file.txt".into(), committed_contents.clone())],
6654 );
6655
6656 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6657
6658 let buffer = project
6659 .update(cx, |project, cx| {
6660 project.open_local_buffer("/dir/file.txt", cx)
6661 })
6662 .await
6663 .unwrap();
6664 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6665 let uncommitted_diff = project
6666 .update(cx, |project, cx| {
6667 project.open_uncommitted_diff(buffer.clone(), cx)
6668 })
6669 .await
6670 .unwrap();
6671
6672 // The hunks are initially unstaged.
6673 uncommitted_diff.read_with(cx, |diff, cx| {
6674 assert_hunks(
6675 diff.hunks(&snapshot, cx),
6676 &snapshot,
6677 &diff.base_text_string().unwrap(),
6678 &[
6679 (
6680 0..0,
6681 "zero\n",
6682 "",
6683 DiffHunkStatus::deleted(HasSecondaryHunk),
6684 ),
6685 (
6686 1..2,
6687 "two\n",
6688 "TWO\n",
6689 DiffHunkStatus::modified(HasSecondaryHunk),
6690 ),
6691 (
6692 3..4,
6693 "four\n",
6694 "FOUR\n",
6695 DiffHunkStatus::modified(HasSecondaryHunk),
6696 ),
6697 ],
6698 );
6699 });
6700
6701 // Pause IO events
6702 fs.pause_events();
6703
6704 // Stage the first hunk.
6705 uncommitted_diff.update(cx, |diff, cx| {
6706 let hunk = diff.hunks(&snapshot, cx).next().unwrap();
6707 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
6708 assert_hunks(
6709 diff.hunks(&snapshot, cx),
6710 &snapshot,
6711 &diff.base_text_string().unwrap(),
6712 &[
6713 (
6714 0..0,
6715 "zero\n",
6716 "",
6717 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
6718 ),
6719 (
6720 1..2,
6721 "two\n",
6722 "TWO\n",
6723 DiffHunkStatus::modified(HasSecondaryHunk),
6724 ),
6725 (
6726 3..4,
6727 "four\n",
6728 "FOUR\n",
6729 DiffHunkStatus::modified(HasSecondaryHunk),
6730 ),
6731 ],
6732 );
6733 });
6734
6735 // Stage the second hunk *before* receiving the FS event for the first hunk.
6736 cx.run_until_parked();
6737 uncommitted_diff.update(cx, |diff, cx| {
6738 let hunk = diff.hunks(&snapshot, cx).nth(1).unwrap();
6739 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
6740 assert_hunks(
6741 diff.hunks(&snapshot, cx),
6742 &snapshot,
6743 &diff.base_text_string().unwrap(),
6744 &[
6745 (
6746 0..0,
6747 "zero\n",
6748 "",
6749 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
6750 ),
6751 (
6752 1..2,
6753 "two\n",
6754 "TWO\n",
6755 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6756 ),
6757 (
6758 3..4,
6759 "four\n",
6760 "FOUR\n",
6761 DiffHunkStatus::modified(HasSecondaryHunk),
6762 ),
6763 ],
6764 );
6765 });
6766
6767 // Process the FS event for staging the first hunk (second event is still pending).
6768 fs.flush_events(1);
6769 cx.run_until_parked();
6770
6771 // Stage the third hunk before receiving the second FS event.
6772 uncommitted_diff.update(cx, |diff, cx| {
6773 let hunk = diff.hunks(&snapshot, cx).nth(2).unwrap();
6774 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
6775 });
6776
6777 // Wait for all remaining IO.
6778 cx.run_until_parked();
6779 fs.flush_events(fs.buffered_event_count());
6780
6781 // Now all hunks are staged.
6782 cx.run_until_parked();
6783 uncommitted_diff.update(cx, |diff, cx| {
6784 assert_hunks(
6785 diff.hunks(&snapshot, cx),
6786 &snapshot,
6787 &diff.base_text_string().unwrap(),
6788 &[
6789 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
6790 (
6791 1..2,
6792 "two\n",
6793 "TWO\n",
6794 DiffHunkStatus::modified(NoSecondaryHunk),
6795 ),
6796 (
6797 3..4,
6798 "four\n",
6799 "FOUR\n",
6800 DiffHunkStatus::modified(NoSecondaryHunk),
6801 ),
6802 ],
6803 );
6804 });
6805}
6806
6807#[gpui::test]
6808async fn test_staging_lots_of_hunks_fast(cx: &mut gpui::TestAppContext) {
6809 use DiffHunkSecondaryStatus::*;
6810 init_test(cx);
6811
6812 let different_lines = (0..500)
6813 .step_by(5)
6814 .map(|i| format!("diff {}\n", i))
6815 .collect::<Vec<String>>();
6816 let committed_contents = (0..500).map(|i| format!("{}\n", i)).collect::<String>();
6817 let file_contents = (0..500)
6818 .map(|i| {
6819 if i % 5 == 0 {
6820 different_lines[i / 5].clone()
6821 } else {
6822 format!("{}\n", i)
6823 }
6824 })
6825 .collect::<String>();
6826
6827 let fs = FakeFs::new(cx.background_executor.clone());
6828 fs.insert_tree(
6829 "/dir",
6830 json!({
6831 ".git": {},
6832 "file.txt": file_contents.clone()
6833 }),
6834 )
6835 .await;
6836
6837 fs.set_head_for_repo(
6838 "/dir/.git".as_ref(),
6839 &[("file.txt".into(), committed_contents.clone())],
6840 );
6841 fs.set_index_for_repo(
6842 "/dir/.git".as_ref(),
6843 &[("file.txt".into(), committed_contents.clone())],
6844 );
6845
6846 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6847
6848 let buffer = project
6849 .update(cx, |project, cx| {
6850 project.open_local_buffer("/dir/file.txt", cx)
6851 })
6852 .await
6853 .unwrap();
6854 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6855 let uncommitted_diff = project
6856 .update(cx, |project, cx| {
6857 project.open_uncommitted_diff(buffer.clone(), cx)
6858 })
6859 .await
6860 .unwrap();
6861
6862 let mut expected_hunks: Vec<(Range<u32>, String, String, DiffHunkStatus)> = (0..500)
6863 .step_by(5)
6864 .map(|i| {
6865 (
6866 i as u32..i as u32 + 1,
6867 format!("{}\n", i),
6868 different_lines[i / 5].clone(),
6869 DiffHunkStatus::modified(HasSecondaryHunk),
6870 )
6871 })
6872 .collect();
6873
6874 // The hunks are initially unstaged
6875 uncommitted_diff.read_with(cx, |diff, cx| {
6876 assert_hunks(
6877 diff.hunks(&snapshot, cx),
6878 &snapshot,
6879 &diff.base_text_string().unwrap(),
6880 &expected_hunks,
6881 );
6882 });
6883
6884 for (_, _, _, status) in expected_hunks.iter_mut() {
6885 *status = DiffHunkStatus::modified(SecondaryHunkRemovalPending);
6886 }
6887
6888 // Stage every hunk with a different call
6889 uncommitted_diff.update(cx, |diff, cx| {
6890 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
6891 for hunk in hunks {
6892 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
6893 }
6894
6895 assert_hunks(
6896 diff.hunks(&snapshot, cx),
6897 &snapshot,
6898 &diff.base_text_string().unwrap(),
6899 &expected_hunks,
6900 );
6901 });
6902
6903 // If we wait, we'll have no pending hunks
6904 cx.run_until_parked();
6905 for (_, _, _, status) in expected_hunks.iter_mut() {
6906 *status = DiffHunkStatus::modified(NoSecondaryHunk);
6907 }
6908
6909 uncommitted_diff.update(cx, |diff, cx| {
6910 assert_hunks(
6911 diff.hunks(&snapshot, cx),
6912 &snapshot,
6913 &diff.base_text_string().unwrap(),
6914 &expected_hunks,
6915 );
6916 });
6917
6918 for (_, _, _, status) in expected_hunks.iter_mut() {
6919 *status = DiffHunkStatus::modified(SecondaryHunkAdditionPending);
6920 }
6921
6922 // Unstage every hunk with a different call
6923 uncommitted_diff.update(cx, |diff, cx| {
6924 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
6925 for hunk in hunks {
6926 diff.stage_or_unstage_hunks(false, &[hunk], &snapshot, true, cx);
6927 }
6928
6929 assert_hunks(
6930 diff.hunks(&snapshot, cx),
6931 &snapshot,
6932 &diff.base_text_string().unwrap(),
6933 &expected_hunks,
6934 );
6935 });
6936
6937 // If we wait, we'll have no pending hunks, again
6938 cx.run_until_parked();
6939 for (_, _, _, status) in expected_hunks.iter_mut() {
6940 *status = DiffHunkStatus::modified(HasSecondaryHunk);
6941 }
6942
6943 uncommitted_diff.update(cx, |diff, cx| {
6944 assert_hunks(
6945 diff.hunks(&snapshot, cx),
6946 &snapshot,
6947 &diff.base_text_string().unwrap(),
6948 &expected_hunks,
6949 );
6950 });
6951}
6952
6953#[gpui::test]
6954async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
6955 init_test(cx);
6956
6957 let committed_contents = r#"
6958 fn main() {
6959 println!("hello from HEAD");
6960 }
6961 "#
6962 .unindent();
6963 let file_contents = r#"
6964 fn main() {
6965 println!("hello from the working copy");
6966 }
6967 "#
6968 .unindent();
6969
6970 let fs = FakeFs::new(cx.background_executor.clone());
6971 fs.insert_tree(
6972 "/dir",
6973 json!({
6974 ".git": {},
6975 "src": {
6976 "main.rs": file_contents,
6977 }
6978 }),
6979 )
6980 .await;
6981
6982 fs.set_head_for_repo(
6983 Path::new("/dir/.git"),
6984 &[("src/main.rs".into(), committed_contents.clone())],
6985 );
6986 fs.set_index_for_repo(
6987 Path::new("/dir/.git"),
6988 &[("src/main.rs".into(), committed_contents.clone())],
6989 );
6990
6991 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
6992
6993 let buffer = project
6994 .update(cx, |project, cx| {
6995 project.open_local_buffer("/dir/src/main.rs", cx)
6996 })
6997 .await
6998 .unwrap();
6999 let uncommitted_diff = project
7000 .update(cx, |project, cx| {
7001 project.open_uncommitted_diff(buffer.clone(), cx)
7002 })
7003 .await
7004 .unwrap();
7005
7006 cx.run_until_parked();
7007 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
7008 let snapshot = buffer.read(cx).snapshot();
7009 assert_hunks(
7010 uncommitted_diff.hunks(&snapshot, cx),
7011 &snapshot,
7012 &uncommitted_diff.base_text_string().unwrap(),
7013 &[(
7014 1..2,
7015 " println!(\"hello from HEAD\");\n",
7016 " println!(\"hello from the working copy\");\n",
7017 DiffHunkStatus {
7018 kind: DiffHunkStatusKind::Modified,
7019 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
7020 },
7021 )],
7022 );
7023 });
7024}
7025
7026#[gpui::test]
7027async fn test_repository_and_path_for_project_path(
7028 background_executor: BackgroundExecutor,
7029 cx: &mut gpui::TestAppContext,
7030) {
7031 init_test(cx);
7032 let fs = FakeFs::new(background_executor);
7033 fs.insert_tree(
7034 path!("/root"),
7035 json!({
7036 "c.txt": "",
7037 "dir1": {
7038 ".git": {},
7039 "deps": {
7040 "dep1": {
7041 ".git": {},
7042 "src": {
7043 "a.txt": ""
7044 }
7045 }
7046 },
7047 "src": {
7048 "b.txt": ""
7049 }
7050 },
7051 }),
7052 )
7053 .await;
7054
7055 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
7056 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7057 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7058 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
7059 .await;
7060 tree.flush_fs_events(cx).await;
7061
7062 project.read_with(cx, |project, cx| {
7063 let git_store = project.git_store().read(cx);
7064 let pairs = [
7065 ("c.txt", None),
7066 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
7067 (
7068 "dir1/deps/dep1/src/a.txt",
7069 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
7070 ),
7071 ];
7072 let expected = pairs
7073 .iter()
7074 .map(|(path, result)| {
7075 (
7076 path,
7077 result.map(|(repo, repo_path)| {
7078 (Path::new(repo).into(), RepoPath::from(repo_path))
7079 }),
7080 )
7081 })
7082 .collect::<Vec<_>>();
7083 let actual = pairs
7084 .iter()
7085 .map(|(path, _)| {
7086 let project_path = (tree_id, Path::new(path)).into();
7087 let result = maybe!({
7088 let (repo, repo_path) =
7089 git_store.repository_and_path_for_project_path(&project_path, cx)?;
7090 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
7091 });
7092 (path, result)
7093 })
7094 .collect::<Vec<_>>();
7095 pretty_assertions::assert_eq!(expected, actual);
7096 });
7097
7098 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
7099 .await
7100 .unwrap();
7101 tree.flush_fs_events(cx).await;
7102
7103 project.read_with(cx, |project, cx| {
7104 let git_store = project.git_store().read(cx);
7105 assert_eq!(
7106 git_store.repository_and_path_for_project_path(
7107 &(tree_id, Path::new("dir1/src/b.txt")).into(),
7108 cx
7109 ),
7110 None
7111 );
7112 });
7113}
7114
7115#[gpui::test]
7116async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
7117 init_test(cx);
7118 let fs = FakeFs::new(cx.background_executor.clone());
7119 fs.insert_tree(
7120 path!("/root"),
7121 json!({
7122 "home": {
7123 ".git": {},
7124 "project": {
7125 "a.txt": "A"
7126 },
7127 },
7128 }),
7129 )
7130 .await;
7131 fs.set_home_dir(Path::new(path!("/root/home")).to_owned());
7132
7133 let project = Project::test(fs.clone(), [path!("/root/home/project").as_ref()], cx).await;
7134 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7135 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7136 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
7137 .await;
7138 tree.flush_fs_events(cx).await;
7139
7140 project.read_with(cx, |project, cx| {
7141 let containing = project
7142 .git_store()
7143 .read(cx)
7144 .repository_and_path_for_project_path(&(tree_id, "a.txt").into(), cx);
7145 assert!(containing.is_none());
7146 });
7147
7148 let project = Project::test(fs.clone(), [path!("/root/home").as_ref()], cx).await;
7149 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7150 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7151 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
7152 .await;
7153 tree.flush_fs_events(cx).await;
7154
7155 project.read_with(cx, |project, cx| {
7156 let containing = project
7157 .git_store()
7158 .read(cx)
7159 .repository_and_path_for_project_path(&(tree_id, "project/a.txt").into(), cx);
7160 assert_eq!(
7161 containing
7162 .unwrap()
7163 .0
7164 .read(cx)
7165 .work_directory_abs_path
7166 .as_ref(),
7167 Path::new(path!("/root/home"))
7168 );
7169 });
7170}
7171
7172#[gpui::test]
7173async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
7174 init_test(cx);
7175 cx.executor().allow_parking();
7176
7177 let root = TempTree::new(json!({
7178 "project": {
7179 "a.txt": "a", // Modified
7180 "b.txt": "bb", // Added
7181 "c.txt": "ccc", // Unchanged
7182 "d.txt": "dddd", // Deleted
7183 },
7184 }));
7185
7186 // Set up git repository before creating the project.
7187 let work_dir = root.path().join("project");
7188 let repo = git_init(work_dir.as_path());
7189 git_add("a.txt", &repo);
7190 git_add("c.txt", &repo);
7191 git_add("d.txt", &repo);
7192 git_commit("Initial commit", &repo);
7193 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
7194 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
7195
7196 let project = Project::test(
7197 Arc::new(RealFs::new(None, cx.executor())),
7198 [root.path()],
7199 cx,
7200 )
7201 .await;
7202
7203 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7204 tree.flush_fs_events(cx).await;
7205 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7206 .await;
7207 cx.executor().run_until_parked();
7208
7209 let repository = project.read_with(cx, |project, cx| {
7210 project.repositories(cx).values().next().unwrap().clone()
7211 });
7212
7213 // Check that the right git state is observed on startup
7214 repository.read_with(cx, |repository, _| {
7215 let entries = repository.cached_status().collect::<Vec<_>>();
7216 assert_eq!(
7217 entries,
7218 [
7219 StatusEntry {
7220 repo_path: "a.txt".into(),
7221 status: StatusCode::Modified.worktree(),
7222 },
7223 StatusEntry {
7224 repo_path: "b.txt".into(),
7225 status: FileStatus::Untracked,
7226 },
7227 StatusEntry {
7228 repo_path: "d.txt".into(),
7229 status: StatusCode::Deleted.worktree(),
7230 },
7231 ]
7232 );
7233 });
7234
7235 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
7236
7237 tree.flush_fs_events(cx).await;
7238 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7239 .await;
7240 cx.executor().run_until_parked();
7241
7242 repository.read_with(cx, |repository, _| {
7243 let entries = repository.cached_status().collect::<Vec<_>>();
7244 assert_eq!(
7245 entries,
7246 [
7247 StatusEntry {
7248 repo_path: "a.txt".into(),
7249 status: StatusCode::Modified.worktree(),
7250 },
7251 StatusEntry {
7252 repo_path: "b.txt".into(),
7253 status: FileStatus::Untracked,
7254 },
7255 StatusEntry {
7256 repo_path: "c.txt".into(),
7257 status: StatusCode::Modified.worktree(),
7258 },
7259 StatusEntry {
7260 repo_path: "d.txt".into(),
7261 status: StatusCode::Deleted.worktree(),
7262 },
7263 ]
7264 );
7265 });
7266
7267 git_add("a.txt", &repo);
7268 git_add("c.txt", &repo);
7269 git_remove_index(Path::new("d.txt"), &repo);
7270 git_commit("Another commit", &repo);
7271 tree.flush_fs_events(cx).await;
7272 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7273 .await;
7274 cx.executor().run_until_parked();
7275
7276 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
7277 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
7278 tree.flush_fs_events(cx).await;
7279 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7280 .await;
7281 cx.executor().run_until_parked();
7282
7283 repository.read_with(cx, |repository, _cx| {
7284 let entries = repository.cached_status().collect::<Vec<_>>();
7285
7286 // Deleting an untracked entry, b.txt, should leave no status
7287 // a.txt was tracked, and so should have a status
7288 assert_eq!(
7289 entries,
7290 [StatusEntry {
7291 repo_path: "a.txt".into(),
7292 status: StatusCode::Deleted.worktree(),
7293 }]
7294 );
7295 });
7296}
7297
7298#[gpui::test]
7299async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
7300 init_test(cx);
7301 cx.executor().allow_parking();
7302
7303 let root = TempTree::new(json!({
7304 "project": {
7305 "sub": {},
7306 "a.txt": "",
7307 },
7308 }));
7309
7310 let work_dir = root.path().join("project");
7311 let repo = git_init(work_dir.as_path());
7312 // a.txt exists in HEAD and the working copy but is deleted in the index.
7313 git_add("a.txt", &repo);
7314 git_commit("Initial commit", &repo);
7315 git_remove_index("a.txt".as_ref(), &repo);
7316 // `sub` is a nested git repository.
7317 let _sub = git_init(&work_dir.join("sub"));
7318
7319 let project = Project::test(
7320 Arc::new(RealFs::new(None, cx.executor())),
7321 [root.path()],
7322 cx,
7323 )
7324 .await;
7325
7326 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7327 tree.flush_fs_events(cx).await;
7328 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7329 .await;
7330 cx.executor().run_until_parked();
7331
7332 let repository = project.read_with(cx, |project, cx| {
7333 project
7334 .repositories(cx)
7335 .values()
7336 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
7337 .unwrap()
7338 .clone()
7339 });
7340
7341 repository.read_with(cx, |repository, _cx| {
7342 let entries = repository.cached_status().collect::<Vec<_>>();
7343
7344 // `sub` doesn't appear in our computed statuses.
7345 // a.txt appears with a combined `DA` status.
7346 assert_eq!(
7347 entries,
7348 [StatusEntry {
7349 repo_path: "a.txt".into(),
7350 status: TrackedStatus {
7351 index_status: StatusCode::Deleted,
7352 worktree_status: StatusCode::Added
7353 }
7354 .into(),
7355 }]
7356 )
7357 });
7358}
7359
7360#[gpui::test]
7361async fn test_repository_subfolder_git_status(cx: &mut gpui::TestAppContext) {
7362 init_test(cx);
7363 cx.executor().allow_parking();
7364
7365 let root = TempTree::new(json!({
7366 "my-repo": {
7367 // .git folder will go here
7368 "a.txt": "a",
7369 "sub-folder-1": {
7370 "sub-folder-2": {
7371 "c.txt": "cc",
7372 "d": {
7373 "e.txt": "eee"
7374 }
7375 },
7376 }
7377 },
7378 }));
7379
7380 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
7381 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
7382
7383 // Set up git repository before creating the worktree.
7384 let git_repo_work_dir = root.path().join("my-repo");
7385 let repo = git_init(git_repo_work_dir.as_path());
7386 git_add(C_TXT, &repo);
7387 git_commit("Initial commit", &repo);
7388
7389 // Open the worktree in subfolder
7390 let project_root = Path::new("my-repo/sub-folder-1/sub-folder-2");
7391
7392 let project = Project::test(
7393 Arc::new(RealFs::new(None, cx.executor())),
7394 [root.path().join(project_root).as_path()],
7395 cx,
7396 )
7397 .await;
7398
7399 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7400 tree.flush_fs_events(cx).await;
7401 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7402 .await;
7403 cx.executor().run_until_parked();
7404
7405 let repository = project.read_with(cx, |project, cx| {
7406 project.repositories(cx).values().next().unwrap().clone()
7407 });
7408
7409 // Ensure that the git status is loaded correctly
7410 repository.read_with(cx, |repository, _cx| {
7411 assert_eq!(
7412 repository.work_directory_abs_path.canonicalize().unwrap(),
7413 root.path().join("my-repo").canonicalize().unwrap()
7414 );
7415
7416 assert_eq!(repository.status_for_path(&C_TXT.into()), None);
7417 assert_eq!(
7418 repository.status_for_path(&E_TXT.into()).unwrap().status,
7419 FileStatus::Untracked
7420 );
7421 });
7422
7423 // Now we simulate FS events, but ONLY in the .git folder that's outside
7424 // of out project root.
7425 // Meaning: we don't produce any FS events for files inside the project.
7426 git_add(E_TXT, &repo);
7427 git_commit("Second commit", &repo);
7428 tree.flush_fs_events_in_root_git_repository(cx).await;
7429 cx.executor().run_until_parked();
7430
7431 repository.read_with(cx, |repository, _cx| {
7432 assert_eq!(repository.status_for_path(&C_TXT.into()), None);
7433 assert_eq!(repository.status_for_path(&E_TXT.into()), None);
7434 });
7435}
7436
7437// TODO: this test fails on Windows because upon cherry-picking we don't get an event in the .git directory,
7438// despite CHERRY_PICK_HEAD existing after the `git_cherry_pick` call and the conflicted path showing up in git status.
7439#[cfg(not(windows))]
7440#[gpui::test]
7441async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
7442 init_test(cx);
7443 cx.executor().allow_parking();
7444
7445 let root = TempTree::new(json!({
7446 "project": {
7447 "a.txt": "a",
7448 },
7449 }));
7450 let root_path = root.path();
7451
7452 let repo = git_init(&root_path.join("project"));
7453 git_add("a.txt", &repo);
7454 git_commit("init", &repo);
7455
7456 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
7457
7458 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7459 tree.flush_fs_events(cx).await;
7460 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7461 .await;
7462 cx.executor().run_until_parked();
7463
7464 let repository = project.read_with(cx, |project, cx| {
7465 project.repositories(cx).values().next().unwrap().clone()
7466 });
7467
7468 git_branch("other-branch", &repo);
7469 git_checkout("refs/heads/other-branch", &repo);
7470 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
7471 git_add("a.txt", &repo);
7472 git_commit("capitalize", &repo);
7473 let commit = repo
7474 .head()
7475 .expect("Failed to get HEAD")
7476 .peel_to_commit()
7477 .expect("HEAD is not a commit");
7478 git_checkout("refs/heads/main", &repo);
7479 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
7480 git_add("a.txt", &repo);
7481 git_commit("improve letter", &repo);
7482 git_cherry_pick(&commit, &repo);
7483 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
7484 .expect("No CHERRY_PICK_HEAD");
7485 pretty_assertions::assert_eq!(
7486 git_status(&repo),
7487 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
7488 );
7489 tree.flush_fs_events(cx).await;
7490 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7491 .await;
7492 cx.executor().run_until_parked();
7493 let conflicts = repository.update(cx, |repository, _| {
7494 repository
7495 .merge_conflicts
7496 .iter()
7497 .cloned()
7498 .collect::<Vec<_>>()
7499 });
7500 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
7501
7502 git_add("a.txt", &repo);
7503 // Attempt to manually simulate what `git cherry-pick --continue` would do.
7504 git_commit("whatevs", &repo);
7505 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
7506 .expect("Failed to remove CHERRY_PICK_HEAD");
7507 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
7508 tree.flush_fs_events(cx).await;
7509 let conflicts = repository.update(cx, |repository, _| {
7510 repository
7511 .merge_conflicts
7512 .iter()
7513 .cloned()
7514 .collect::<Vec<_>>()
7515 });
7516 pretty_assertions::assert_eq!(conflicts, []);
7517}
7518
7519#[gpui::test]
7520async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
7521 init_test(cx);
7522 let fs = FakeFs::new(cx.background_executor.clone());
7523 fs.insert_tree(
7524 path!("/root"),
7525 json!({
7526 ".git": {},
7527 ".gitignore": "*.txt\n",
7528 "a.xml": "<a></a>",
7529 "b.txt": "Some text"
7530 }),
7531 )
7532 .await;
7533
7534 fs.set_head_and_index_for_repo(
7535 path!("/root/.git").as_ref(),
7536 &[
7537 (".gitignore".into(), "*.txt\n".into()),
7538 ("a.xml".into(), "<a></a>".into()),
7539 ],
7540 );
7541
7542 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
7543
7544 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7545 tree.flush_fs_events(cx).await;
7546 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7547 .await;
7548 cx.executor().run_until_parked();
7549
7550 let repository = project.read_with(cx, |project, cx| {
7551 project.repositories(cx).values().next().unwrap().clone()
7552 });
7553
7554 // One file is unmodified, the other is ignored.
7555 cx.read(|cx| {
7556 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
7557 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
7558 });
7559
7560 // Change the gitignore, and stage the newly non-ignored file.
7561 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
7562 .await
7563 .unwrap();
7564 fs.set_index_for_repo(
7565 Path::new(path!("/root/.git")),
7566 &[
7567 (".gitignore".into(), "*.txt\n".into()),
7568 ("a.xml".into(), "<a></a>".into()),
7569 ("b.txt".into(), "Some text".into()),
7570 ],
7571 );
7572
7573 cx.executor().run_until_parked();
7574 cx.read(|cx| {
7575 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
7576 assert_entry_git_state(
7577 tree.read(cx),
7578 repository.read(cx),
7579 "b.txt",
7580 Some(StatusCode::Added),
7581 false,
7582 );
7583 });
7584}
7585
7586// NOTE:
7587// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
7588// a directory which some program has already open.
7589// This is a limitation of the Windows.
7590// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
7591#[gpui::test]
7592#[cfg_attr(target_os = "windows", ignore)]
7593async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
7594 init_test(cx);
7595 cx.executor().allow_parking();
7596 let root = TempTree::new(json!({
7597 "projects": {
7598 "project1": {
7599 "a": "",
7600 "b": "",
7601 }
7602 },
7603
7604 }));
7605 let root_path = root.path();
7606
7607 let repo = git_init(&root_path.join("projects/project1"));
7608 git_add("a", &repo);
7609 git_commit("init", &repo);
7610 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
7611
7612 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
7613
7614 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7615 tree.flush_fs_events(cx).await;
7616 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7617 .await;
7618 cx.executor().run_until_parked();
7619
7620 let repository = project.read_with(cx, |project, cx| {
7621 project.repositories(cx).values().next().unwrap().clone()
7622 });
7623
7624 repository.read_with(cx, |repository, _| {
7625 assert_eq!(
7626 repository.work_directory_abs_path.as_ref(),
7627 root_path.join("projects/project1").as_path()
7628 );
7629 assert_eq!(
7630 repository
7631 .status_for_path(&"a".into())
7632 .map(|entry| entry.status),
7633 Some(StatusCode::Modified.worktree()),
7634 );
7635 assert_eq!(
7636 repository
7637 .status_for_path(&"b".into())
7638 .map(|entry| entry.status),
7639 Some(FileStatus::Untracked),
7640 );
7641 });
7642
7643 std::fs::rename(
7644 root_path.join("projects/project1"),
7645 root_path.join("projects/project2"),
7646 )
7647 .unwrap();
7648 tree.flush_fs_events(cx).await;
7649
7650 repository.read_with(cx, |repository, _| {
7651 assert_eq!(
7652 repository.work_directory_abs_path.as_ref(),
7653 root_path.join("projects/project2").as_path()
7654 );
7655 assert_eq!(
7656 repository.status_for_path(&"a".into()).unwrap().status,
7657 StatusCode::Modified.worktree(),
7658 );
7659 assert_eq!(
7660 repository.status_for_path(&"b".into()).unwrap().status,
7661 FileStatus::Untracked,
7662 );
7663 });
7664}
7665
7666// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
7667// you can't rename a directory which some program has already open. This is a
7668// limitation of the Windows. See:
7669// https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
7670#[gpui::test]
7671#[cfg_attr(target_os = "windows", ignore)]
7672async fn test_file_status(cx: &mut gpui::TestAppContext) {
7673 init_test(cx);
7674 cx.executor().allow_parking();
7675 const IGNORE_RULE: &str = "**/target";
7676
7677 let root = TempTree::new(json!({
7678 "project": {
7679 "a.txt": "a",
7680 "b.txt": "bb",
7681 "c": {
7682 "d": {
7683 "e.txt": "eee"
7684 }
7685 },
7686 "f.txt": "ffff",
7687 "target": {
7688 "build_file": "???"
7689 },
7690 ".gitignore": IGNORE_RULE
7691 },
7692
7693 }));
7694 let root_path = root.path();
7695
7696 const A_TXT: &str = "a.txt";
7697 const B_TXT: &str = "b.txt";
7698 const E_TXT: &str = "c/d/e.txt";
7699 const F_TXT: &str = "f.txt";
7700 const DOTGITIGNORE: &str = ".gitignore";
7701 const BUILD_FILE: &str = "target/build_file";
7702
7703 // Set up git repository before creating the worktree.
7704 let work_dir = root.path().join("project");
7705 let mut repo = git_init(work_dir.as_path());
7706 repo.add_ignore_rule(IGNORE_RULE).unwrap();
7707 git_add(A_TXT, &repo);
7708 git_add(E_TXT, &repo);
7709 git_add(DOTGITIGNORE, &repo);
7710 git_commit("Initial commit", &repo);
7711
7712 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
7713
7714 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7715 tree.flush_fs_events(cx).await;
7716 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7717 .await;
7718 cx.executor().run_until_parked();
7719
7720 let repository = project.read_with(cx, |project, cx| {
7721 project.repositories(cx).values().next().unwrap().clone()
7722 });
7723
7724 // Check that the right git state is observed on startup
7725 repository.read_with(cx, |repository, _cx| {
7726 assert_eq!(
7727 repository.work_directory_abs_path.as_ref(),
7728 root_path.join("project").as_path()
7729 );
7730
7731 assert_eq!(
7732 repository.status_for_path(&B_TXT.into()).unwrap().status,
7733 FileStatus::Untracked,
7734 );
7735 assert_eq!(
7736 repository.status_for_path(&F_TXT.into()).unwrap().status,
7737 FileStatus::Untracked,
7738 );
7739 });
7740
7741 // Modify a file in the working copy.
7742 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
7743 tree.flush_fs_events(cx).await;
7744 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7745 .await;
7746 cx.executor().run_until_parked();
7747
7748 // The worktree detects that the file's git status has changed.
7749 repository.read_with(cx, |repository, _| {
7750 assert_eq!(
7751 repository.status_for_path(&A_TXT.into()).unwrap().status,
7752 StatusCode::Modified.worktree(),
7753 );
7754 });
7755
7756 // Create a commit in the git repository.
7757 git_add(A_TXT, &repo);
7758 git_add(B_TXT, &repo);
7759 git_commit("Committing modified and added", &repo);
7760 tree.flush_fs_events(cx).await;
7761 cx.executor().run_until_parked();
7762
7763 // The worktree detects that the files' git status have changed.
7764 repository.read_with(cx, |repository, _cx| {
7765 assert_eq!(
7766 repository.status_for_path(&F_TXT.into()).unwrap().status,
7767 FileStatus::Untracked,
7768 );
7769 assert_eq!(repository.status_for_path(&B_TXT.into()), None);
7770 assert_eq!(repository.status_for_path(&A_TXT.into()), None);
7771 });
7772
7773 // Modify files in the working copy and perform git operations on other files.
7774 git_reset(0, &repo);
7775 git_remove_index(Path::new(B_TXT), &repo);
7776 git_stash(&mut repo);
7777 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
7778 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
7779 tree.flush_fs_events(cx).await;
7780 cx.executor().run_until_parked();
7781
7782 // Check that more complex repo changes are tracked
7783 repository.read_with(cx, |repository, _cx| {
7784 assert_eq!(repository.status_for_path(&A_TXT.into()), None);
7785 assert_eq!(
7786 repository.status_for_path(&B_TXT.into()).unwrap().status,
7787 FileStatus::Untracked,
7788 );
7789 assert_eq!(
7790 repository.status_for_path(&E_TXT.into()).unwrap().status,
7791 StatusCode::Modified.worktree(),
7792 );
7793 });
7794
7795 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
7796 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
7797 std::fs::write(
7798 work_dir.join(DOTGITIGNORE),
7799 [IGNORE_RULE, "f.txt"].join("\n"),
7800 )
7801 .unwrap();
7802
7803 git_add(Path::new(DOTGITIGNORE), &repo);
7804 git_commit("Committing modified git ignore", &repo);
7805
7806 tree.flush_fs_events(cx).await;
7807 cx.executor().run_until_parked();
7808
7809 let mut renamed_dir_name = "first_directory/second_directory";
7810 const RENAMED_FILE: &str = "rf.txt";
7811
7812 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
7813 std::fs::write(
7814 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
7815 "new-contents",
7816 )
7817 .unwrap();
7818
7819 tree.flush_fs_events(cx).await;
7820 cx.executor().run_until_parked();
7821
7822 repository.read_with(cx, |repository, _cx| {
7823 assert_eq!(
7824 repository
7825 .status_for_path(&Path::new(renamed_dir_name).join(RENAMED_FILE).into())
7826 .unwrap()
7827 .status,
7828 FileStatus::Untracked,
7829 );
7830 });
7831
7832 renamed_dir_name = "new_first_directory/second_directory";
7833
7834 std::fs::rename(
7835 work_dir.join("first_directory"),
7836 work_dir.join("new_first_directory"),
7837 )
7838 .unwrap();
7839
7840 tree.flush_fs_events(cx).await;
7841 cx.executor().run_until_parked();
7842
7843 repository.read_with(cx, |repository, _cx| {
7844 assert_eq!(
7845 repository
7846 .status_for_path(&Path::new(renamed_dir_name).join(RENAMED_FILE).into())
7847 .unwrap()
7848 .status,
7849 FileStatus::Untracked,
7850 );
7851 });
7852}
7853
7854#[gpui::test]
7855async fn test_repos_in_invisible_worktrees(
7856 executor: BackgroundExecutor,
7857 cx: &mut gpui::TestAppContext,
7858) {
7859 init_test(cx);
7860 let fs = FakeFs::new(executor);
7861 fs.insert_tree(
7862 path!("/root"),
7863 json!({
7864 "dir1": {
7865 ".git": {},
7866 "dep1": {
7867 ".git": {},
7868 "src": {
7869 "a.txt": "",
7870 },
7871 },
7872 "b.txt": "",
7873 },
7874 }),
7875 )
7876 .await;
7877
7878 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
7879 let visible_worktree =
7880 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7881 visible_worktree
7882 .read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
7883 .await;
7884
7885 let repos = project.read_with(cx, |project, cx| {
7886 project
7887 .repositories(cx)
7888 .values()
7889 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
7890 .collect::<Vec<_>>()
7891 });
7892 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
7893
7894 let (invisible_worktree, _) = project
7895 .update(cx, |project, cx| {
7896 project.worktree_store.update(cx, |worktree_store, cx| {
7897 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
7898 })
7899 })
7900 .await
7901 .expect("failed to create worktree");
7902 invisible_worktree
7903 .read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
7904 .await;
7905
7906 let repos = project.read_with(cx, |project, cx| {
7907 project
7908 .repositories(cx)
7909 .values()
7910 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
7911 .collect::<Vec<_>>()
7912 });
7913 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
7914}
7915
7916#[gpui::test(iterations = 10)]
7917async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
7918 init_test(cx);
7919 cx.update(|cx| {
7920 cx.update_global::<SettingsStore, _>(|store, cx| {
7921 store.update_user_settings::<WorktreeSettings>(cx, |project_settings| {
7922 project_settings.file_scan_exclusions = Some(Vec::new());
7923 });
7924 });
7925 });
7926 let fs = FakeFs::new(cx.background_executor.clone());
7927 fs.insert_tree(
7928 path!("/root"),
7929 json!({
7930 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
7931 "tree": {
7932 ".git": {},
7933 ".gitignore": "ignored-dir\n",
7934 "tracked-dir": {
7935 "tracked-file1": "",
7936 "ancestor-ignored-file1": "",
7937 },
7938 "ignored-dir": {
7939 "ignored-file1": ""
7940 }
7941 }
7942 }),
7943 )
7944 .await;
7945 fs.set_head_and_index_for_repo(
7946 path!("/root/tree/.git").as_ref(),
7947 &[
7948 (".gitignore".into(), "ignored-dir\n".into()),
7949 ("tracked-dir/tracked-file1".into(), "".into()),
7950 ],
7951 );
7952
7953 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
7954
7955 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7956 tree.flush_fs_events(cx).await;
7957 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7958 .await;
7959 cx.executor().run_until_parked();
7960
7961 let repository = project.read_with(cx, |project, cx| {
7962 project.repositories(cx).values().next().unwrap().clone()
7963 });
7964
7965 tree.read_with(cx, |tree, _| {
7966 tree.as_local()
7967 .unwrap()
7968 .manually_refresh_entries_for_paths(vec![Path::new("ignored-dir").into()])
7969 })
7970 .recv()
7971 .await;
7972
7973 cx.read(|cx| {
7974 assert_entry_git_state(
7975 tree.read(cx),
7976 repository.read(cx),
7977 "tracked-dir/tracked-file1",
7978 None,
7979 false,
7980 );
7981 assert_entry_git_state(
7982 tree.read(cx),
7983 repository.read(cx),
7984 "tracked-dir/ancestor-ignored-file1",
7985 None,
7986 false,
7987 );
7988 assert_entry_git_state(
7989 tree.read(cx),
7990 repository.read(cx),
7991 "ignored-dir/ignored-file1",
7992 None,
7993 true,
7994 );
7995 });
7996
7997 fs.create_file(
7998 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
7999 Default::default(),
8000 )
8001 .await
8002 .unwrap();
8003 fs.set_index_for_repo(
8004 path!("/root/tree/.git").as_ref(),
8005 &[
8006 (".gitignore".into(), "ignored-dir\n".into()),
8007 ("tracked-dir/tracked-file1".into(), "".into()),
8008 ("tracked-dir/tracked-file2".into(), "".into()),
8009 ],
8010 );
8011 fs.create_file(
8012 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
8013 Default::default(),
8014 )
8015 .await
8016 .unwrap();
8017 fs.create_file(
8018 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
8019 Default::default(),
8020 )
8021 .await
8022 .unwrap();
8023
8024 cx.executor().run_until_parked();
8025 cx.read(|cx| {
8026 assert_entry_git_state(
8027 tree.read(cx),
8028 repository.read(cx),
8029 "tracked-dir/tracked-file2",
8030 Some(StatusCode::Added),
8031 false,
8032 );
8033 assert_entry_git_state(
8034 tree.read(cx),
8035 repository.read(cx),
8036 "tracked-dir/ancestor-ignored-file2",
8037 None,
8038 false,
8039 );
8040 assert_entry_git_state(
8041 tree.read(cx),
8042 repository.read(cx),
8043 "ignored-dir/ignored-file2",
8044 None,
8045 true,
8046 );
8047 assert!(tree.read(cx).entry_for_path(".git").unwrap().is_ignored);
8048 });
8049}
8050
8051#[gpui::test]
8052async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
8053 init_test(cx);
8054 let fs = FakeFs::new(cx.background_executor.clone());
8055 fs.insert_tree(
8056 path!("/root"),
8057 json!({
8058 "project": {
8059 ".git": {},
8060 "child1": {
8061 "a.txt": "A",
8062 },
8063 "child2": {
8064 "b.txt": "B",
8065 }
8066 }
8067 }),
8068 )
8069 .await;
8070
8071 let project = Project::test(
8072 fs.clone(),
8073 [
8074 path!("/root/project/child1").as_ref(),
8075 path!("/root/project/child2").as_ref(),
8076 ],
8077 cx,
8078 )
8079 .await;
8080
8081 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8082 tree.flush_fs_events(cx).await;
8083 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
8084 .await;
8085 cx.executor().run_until_parked();
8086
8087 let repos = project.read_with(cx, |project, cx| {
8088 project
8089 .repositories(cx)
8090 .values()
8091 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8092 .collect::<Vec<_>>()
8093 });
8094 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
8095}
8096
8097async fn search(
8098 project: &Entity<Project>,
8099 query: SearchQuery,
8100 cx: &mut gpui::TestAppContext,
8101) -> Result<HashMap<String, Vec<Range<usize>>>> {
8102 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
8103 let mut results = HashMap::default();
8104 while let Ok(search_result) = search_rx.recv().await {
8105 match search_result {
8106 SearchResult::Buffer { buffer, ranges } => {
8107 results.entry(buffer).or_insert(ranges);
8108 }
8109 SearchResult::LimitReached => {}
8110 }
8111 }
8112 Ok(results
8113 .into_iter()
8114 .map(|(buffer, ranges)| {
8115 buffer.update(cx, |buffer, cx| {
8116 let path = buffer
8117 .file()
8118 .unwrap()
8119 .full_path(cx)
8120 .to_string_lossy()
8121 .to_string();
8122 let ranges = ranges
8123 .into_iter()
8124 .map(|range| range.to_offset(buffer))
8125 .collect::<Vec<_>>();
8126 (path, ranges)
8127 })
8128 })
8129 .collect())
8130}
8131
8132pub fn init_test(cx: &mut gpui::TestAppContext) {
8133 if std::env::var("RUST_LOG").is_ok() {
8134 env_logger::try_init().ok();
8135 }
8136
8137 cx.update(|cx| {
8138 let settings_store = SettingsStore::test(cx);
8139 cx.set_global(settings_store);
8140 release_channel::init(SemanticVersion::default(), cx);
8141 language::init(cx);
8142 Project::init_settings(cx);
8143 });
8144}
8145
8146fn json_lang() -> Arc<Language> {
8147 Arc::new(Language::new(
8148 LanguageConfig {
8149 name: "JSON".into(),
8150 matcher: LanguageMatcher {
8151 path_suffixes: vec!["json".to_string()],
8152 ..Default::default()
8153 },
8154 ..Default::default()
8155 },
8156 None,
8157 ))
8158}
8159
8160fn js_lang() -> Arc<Language> {
8161 Arc::new(Language::new(
8162 LanguageConfig {
8163 name: "JavaScript".into(),
8164 matcher: LanguageMatcher {
8165 path_suffixes: vec!["js".to_string()],
8166 ..Default::default()
8167 },
8168 ..Default::default()
8169 },
8170 None,
8171 ))
8172}
8173
8174fn rust_lang() -> Arc<Language> {
8175 Arc::new(Language::new(
8176 LanguageConfig {
8177 name: "Rust".into(),
8178 matcher: LanguageMatcher {
8179 path_suffixes: vec!["rs".to_string()],
8180 ..Default::default()
8181 },
8182 ..Default::default()
8183 },
8184 Some(tree_sitter_rust::LANGUAGE.into()),
8185 ))
8186}
8187
8188fn typescript_lang() -> Arc<Language> {
8189 Arc::new(Language::new(
8190 LanguageConfig {
8191 name: "TypeScript".into(),
8192 matcher: LanguageMatcher {
8193 path_suffixes: vec!["ts".to_string()],
8194 ..Default::default()
8195 },
8196 ..Default::default()
8197 },
8198 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
8199 ))
8200}
8201
8202fn tsx_lang() -> Arc<Language> {
8203 Arc::new(Language::new(
8204 LanguageConfig {
8205 name: "tsx".into(),
8206 matcher: LanguageMatcher {
8207 path_suffixes: vec!["tsx".to_string()],
8208 ..Default::default()
8209 },
8210 ..Default::default()
8211 },
8212 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
8213 ))
8214}
8215
8216fn get_all_tasks(
8217 project: &Entity<Project>,
8218 task_contexts: &TaskContexts,
8219 cx: &mut App,
8220) -> Vec<(TaskSourceKind, ResolvedTask)> {
8221 let (mut old, new) = project.update(cx, |project, cx| {
8222 project
8223 .task_store
8224 .read(cx)
8225 .task_inventory()
8226 .unwrap()
8227 .read(cx)
8228 .used_and_current_resolved_tasks(task_contexts, cx)
8229 });
8230 old.extend(new);
8231 old
8232}
8233
8234#[track_caller]
8235fn assert_entry_git_state(
8236 tree: &Worktree,
8237 repository: &Repository,
8238 path: &str,
8239 index_status: Option<StatusCode>,
8240 is_ignored: bool,
8241) {
8242 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
8243 let entry = tree
8244 .entry_for_path(path)
8245 .unwrap_or_else(|| panic!("entry {path} not found"));
8246 let status = repository
8247 .status_for_path(&path.into())
8248 .map(|entry| entry.status);
8249 let expected = index_status.map(|index_status| {
8250 TrackedStatus {
8251 index_status,
8252 worktree_status: StatusCode::Unmodified,
8253 }
8254 .into()
8255 });
8256 assert_eq!(
8257 status, expected,
8258 "expected {path} to have git status: {expected:?}"
8259 );
8260 assert_eq!(
8261 entry.is_ignored, is_ignored,
8262 "expected {path} to have is_ignored: {is_ignored}"
8263 );
8264}
8265
8266#[track_caller]
8267fn git_init(path: &Path) -> git2::Repository {
8268 let mut init_opts = RepositoryInitOptions::new();
8269 init_opts.initial_head("main");
8270 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
8271}
8272
8273#[track_caller]
8274fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
8275 let path = path.as_ref();
8276 let mut index = repo.index().expect("Failed to get index");
8277 index.add_path(path).expect("Failed to add file");
8278 index.write().expect("Failed to write index");
8279}
8280
8281#[track_caller]
8282fn git_remove_index(path: &Path, repo: &git2::Repository) {
8283 let mut index = repo.index().expect("Failed to get index");
8284 index.remove_path(path).expect("Failed to add file");
8285 index.write().expect("Failed to write index");
8286}
8287
8288#[track_caller]
8289fn git_commit(msg: &'static str, repo: &git2::Repository) {
8290 use git2::Signature;
8291
8292 let signature = Signature::now("test", "test@zed.dev").unwrap();
8293 let oid = repo.index().unwrap().write_tree().unwrap();
8294 let tree = repo.find_tree(oid).unwrap();
8295 if let Ok(head) = repo.head() {
8296 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
8297
8298 let parent_commit = parent_obj.as_commit().unwrap();
8299
8300 repo.commit(
8301 Some("HEAD"),
8302 &signature,
8303 &signature,
8304 msg,
8305 &tree,
8306 &[parent_commit],
8307 )
8308 .expect("Failed to commit with parent");
8309 } else {
8310 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
8311 .expect("Failed to commit");
8312 }
8313}
8314
8315#[cfg(not(windows))]
8316#[track_caller]
8317fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
8318 repo.cherrypick(commit, None).expect("Failed to cherrypick");
8319}
8320
8321#[track_caller]
8322fn git_stash(repo: &mut git2::Repository) {
8323 use git2::Signature;
8324
8325 let signature = Signature::now("test", "test@zed.dev").unwrap();
8326 repo.stash_save(&signature, "N/A", None)
8327 .expect("Failed to stash");
8328}
8329
8330#[track_caller]
8331fn git_reset(offset: usize, repo: &git2::Repository) {
8332 let head = repo.head().expect("Couldn't get repo head");
8333 let object = head.peel(git2::ObjectType::Commit).unwrap();
8334 let commit = object.as_commit().unwrap();
8335 let new_head = commit
8336 .parents()
8337 .inspect(|parnet| {
8338 parnet.message();
8339 })
8340 .nth(offset)
8341 .expect("Not enough history");
8342 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
8343 .expect("Could not reset");
8344}
8345
8346#[cfg(not(windows))]
8347#[track_caller]
8348fn git_branch(name: &str, repo: &git2::Repository) {
8349 let head = repo
8350 .head()
8351 .expect("Couldn't get repo head")
8352 .peel_to_commit()
8353 .expect("HEAD is not a commit");
8354 repo.branch(name, &head, false).expect("Failed to commit");
8355}
8356
8357#[cfg(not(windows))]
8358#[track_caller]
8359fn git_checkout(name: &str, repo: &git2::Repository) {
8360 repo.set_head(name).expect("Failed to set head");
8361 repo.checkout_head(None).expect("Failed to check out head");
8362}
8363
8364#[cfg(not(windows))]
8365#[track_caller]
8366fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
8367 repo.statuses(None)
8368 .unwrap()
8369 .iter()
8370 .map(|status| (status.path().unwrap().to_string(), status.status()))
8371 .collect()
8372}