1#![allow(clippy::format_collect)]
2
3use crate::{
4 Event, git_store::StatusEntry, task_inventory::TaskContexts, task_store::TaskSettingsLocation,
5 *,
6};
7use buffer_diff::{
8 BufferDiffEvent, DiffHunkSecondaryStatus, DiffHunkStatus, DiffHunkStatusKind, assert_hunks,
9};
10use fs::FakeFs;
11use futures::{StreamExt, future};
12use git::{
13 repository::RepoPath,
14 status::{StatusCode, TrackedStatus},
15};
16use git2::RepositoryInitOptions;
17use gpui::{App, BackgroundExecutor, SemanticVersion, UpdateGlobal};
18use http_client::Url;
19use language::{
20 Diagnostic, DiagnosticEntry, DiagnosticSet, DiskState, FakeLspAdapter, LanguageConfig,
21 LanguageMatcher, LanguageName, LineEnding, OffsetRangeExt, Point, ToPoint,
22 language_settings::{AllLanguageSettings, LanguageSettingsContent, language_settings},
23 tree_sitter_rust, tree_sitter_typescript,
24};
25use lsp::{
26 DiagnosticSeverity, DocumentChanges, FileOperationFilter, NumberOrString, TextDocumentEdit,
27 WillRenameFiles, notification::DidRenameFiles,
28};
29use parking_lot::Mutex;
30use paths::{config_dir, tasks_file};
31use postage::stream::Stream as _;
32use pretty_assertions::{assert_eq, assert_matches};
33use serde_json::json;
34#[cfg(not(windows))]
35use std::os;
36use std::{mem, num::NonZeroU32, ops::Range, str::FromStr, sync::OnceLock, task::Poll};
37use task::{ResolvedTask, TaskContext};
38use unindent::Unindent as _;
39use util::{
40 TryFutureExt as _, assert_set_eq, path,
41 paths::PathMatcher,
42 separator,
43 test::{TempTree, marked_text_offsets},
44 uri,
45};
46use worktree::WorktreeModelHandle as _;
47
48#[gpui::test]
49async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
50 cx.executor().allow_parking();
51
52 let (tx, mut rx) = futures::channel::mpsc::unbounded();
53 let _thread = std::thread::spawn(move || {
54 #[cfg(not(target_os = "windows"))]
55 std::fs::metadata("/tmp").unwrap();
56 #[cfg(target_os = "windows")]
57 std::fs::metadata("C:/Windows").unwrap();
58 std::thread::sleep(Duration::from_millis(1000));
59 tx.unbounded_send(1).unwrap();
60 });
61 rx.next().await.unwrap();
62}
63
64#[gpui::test]
65async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
66 cx.executor().allow_parking();
67
68 let io_task = smol::unblock(move || {
69 println!("sleeping on thread {:?}", std::thread::current().id());
70 std::thread::sleep(Duration::from_millis(10));
71 1
72 });
73
74 let task = cx.foreground_executor().spawn(async move {
75 io_task.await;
76 });
77
78 task.await;
79}
80
81#[cfg(not(windows))]
82#[gpui::test]
83async fn test_symlinks(cx: &mut gpui::TestAppContext) {
84 init_test(cx);
85 cx.executor().allow_parking();
86
87 let dir = TempTree::new(json!({
88 "root": {
89 "apple": "",
90 "banana": {
91 "carrot": {
92 "date": "",
93 "endive": "",
94 }
95 },
96 "fennel": {
97 "grape": "",
98 }
99 }
100 }));
101
102 let root_link_path = dir.path().join("root_link");
103 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
104 os::unix::fs::symlink(
105 dir.path().join("root/fennel"),
106 dir.path().join("root/finnochio"),
107 )
108 .unwrap();
109
110 let project = Project::test(
111 Arc::new(RealFs::new(None, cx.executor())),
112 [root_link_path.as_ref()],
113 cx,
114 )
115 .await;
116
117 project.update(cx, |project, cx| {
118 let tree = project.worktrees(cx).next().unwrap().read(cx);
119 assert_eq!(tree.file_count(), 5);
120 assert_eq!(
121 tree.inode_for_path("fennel/grape"),
122 tree.inode_for_path("finnochio/grape")
123 );
124 });
125}
126
127#[gpui::test]
128async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
129 init_test(cx);
130
131 let dir = TempTree::new(json!({
132 ".editorconfig": r#"
133 root = true
134 [*.rs]
135 indent_style = tab
136 indent_size = 3
137 end_of_line = lf
138 insert_final_newline = true
139 trim_trailing_whitespace = true
140 [*.js]
141 tab_width = 10
142 "#,
143 ".zed": {
144 "settings.json": r#"{
145 "tab_size": 8,
146 "hard_tabs": false,
147 "ensure_final_newline_on_save": false,
148 "remove_trailing_whitespace_on_save": false,
149 "soft_wrap": "editor_width"
150 }"#,
151 },
152 "a.rs": "fn a() {\n A\n}",
153 "b": {
154 ".editorconfig": r#"
155 [*.rs]
156 indent_size = 2
157 "#,
158 "b.rs": "fn b() {\n B\n}",
159 },
160 "c.js": "def c\n C\nend",
161 "README.json": "tabs are better\n",
162 }));
163
164 let path = dir.path();
165 let fs = FakeFs::new(cx.executor());
166 fs.insert_tree_from_real_fs(path, path).await;
167 let project = Project::test(fs, [path], cx).await;
168
169 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
170 language_registry.add(js_lang());
171 language_registry.add(json_lang());
172 language_registry.add(rust_lang());
173
174 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
175
176 cx.executor().run_until_parked();
177
178 cx.update(|cx| {
179 let tree = worktree.read(cx);
180 let settings_for = |path: &str| {
181 let file_entry = tree.entry_for_path(path).unwrap().clone();
182 let file = File::for_entry(file_entry, worktree.clone());
183 let file_language = project
184 .read(cx)
185 .languages()
186 .language_for_file_path(file.path.as_ref());
187 let file_language = cx
188 .background_executor()
189 .block(file_language)
190 .expect("Failed to get file language");
191 let file = file as _;
192 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
193 };
194
195 let settings_a = settings_for("a.rs");
196 let settings_b = settings_for("b/b.rs");
197 let settings_c = settings_for("c.js");
198 let settings_readme = settings_for("README.json");
199
200 // .editorconfig overrides .zed/settings
201 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
202 assert_eq!(settings_a.hard_tabs, true);
203 assert_eq!(settings_a.ensure_final_newline_on_save, true);
204 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
205
206 // .editorconfig in b/ overrides .editorconfig in root
207 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
208
209 // "indent_size" is not set, so "tab_width" is used
210 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
211
212 // README.md should not be affected by .editorconfig's globe "*.rs"
213 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
214 });
215}
216
217#[gpui::test]
218async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
219 init_test(cx);
220 TaskStore::init(None);
221
222 let fs = FakeFs::new(cx.executor());
223 fs.insert_tree(
224 path!("/dir"),
225 json!({
226 ".zed": {
227 "settings.json": r#"{ "tab_size": 8 }"#,
228 "tasks.json": r#"[{
229 "label": "cargo check all",
230 "command": "cargo",
231 "args": ["check", "--all"]
232 },]"#,
233 },
234 "a": {
235 "a.rs": "fn a() {\n A\n}"
236 },
237 "b": {
238 ".zed": {
239 "settings.json": r#"{ "tab_size": 2 }"#,
240 "tasks.json": r#"[{
241 "label": "cargo check",
242 "command": "cargo",
243 "args": ["check"]
244 },]"#,
245 },
246 "b.rs": "fn b() {\n B\n}"
247 }
248 }),
249 )
250 .await;
251
252 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
253 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
254
255 cx.executor().run_until_parked();
256 let worktree_id = cx.update(|cx| {
257 project.update(cx, |project, cx| {
258 project.worktrees(cx).next().unwrap().read(cx).id()
259 })
260 });
261
262 let mut task_contexts = TaskContexts::default();
263 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
264
265 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
266 id: worktree_id,
267 directory_in_worktree: PathBuf::from(".zed"),
268 id_base: "local worktree tasks from directory \".zed\"".into(),
269 };
270
271 let all_tasks = cx
272 .update(|cx| {
273 let tree = worktree.read(cx);
274
275 let file_a = File::for_entry(
276 tree.entry_for_path("a/a.rs").unwrap().clone(),
277 worktree.clone(),
278 ) as _;
279 let settings_a = language_settings(None, Some(&file_a), cx);
280 let file_b = File::for_entry(
281 tree.entry_for_path("b/b.rs").unwrap().clone(),
282 worktree.clone(),
283 ) as _;
284 let settings_b = language_settings(None, Some(&file_b), cx);
285
286 assert_eq!(settings_a.tab_size.get(), 8);
287 assert_eq!(settings_b.tab_size.get(), 2);
288
289 get_all_tasks(&project, &task_contexts, cx)
290 })
291 .into_iter()
292 .map(|(source_kind, task)| {
293 let resolved = task.resolved.unwrap();
294 (
295 source_kind,
296 task.resolved_label,
297 resolved.args,
298 resolved.env,
299 )
300 })
301 .collect::<Vec<_>>();
302 assert_eq!(
303 all_tasks,
304 vec![
305 (
306 TaskSourceKind::Worktree {
307 id: worktree_id,
308 directory_in_worktree: PathBuf::from(separator!("b/.zed")),
309 id_base: if cfg!(windows) {
310 "local worktree tasks from directory \"b\\\\.zed\"".into()
311 } else {
312 "local worktree tasks from directory \"b/.zed\"".into()
313 },
314 },
315 "cargo check".to_string(),
316 vec!["check".to_string()],
317 HashMap::default(),
318 ),
319 (
320 topmost_local_task_source_kind.clone(),
321 "cargo check all".to_string(),
322 vec!["check".to_string(), "--all".to_string()],
323 HashMap::default(),
324 ),
325 ]
326 );
327
328 let (_, resolved_task) = cx
329 .update(|cx| get_all_tasks(&project, &task_contexts, cx))
330 .into_iter()
331 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
332 .expect("should have one global task");
333 project.update(cx, |project, cx| {
334 let task_inventory = project
335 .task_store
336 .read(cx)
337 .task_inventory()
338 .cloned()
339 .unwrap();
340 task_inventory.update(cx, |inventory, _| {
341 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
342 inventory
343 .update_file_based_tasks(
344 TaskSettingsLocation::Global(tasks_file()),
345 Some(
346 &json!([{
347 "label": "cargo check unstable",
348 "command": "cargo",
349 "args": [
350 "check",
351 "--all",
352 "--all-targets"
353 ],
354 "env": {
355 "RUSTFLAGS": "-Zunstable-options"
356 }
357 }])
358 .to_string(),
359 ),
360 settings::TaskKind::Script,
361 )
362 .unwrap();
363 });
364 });
365 cx.run_until_parked();
366
367 let all_tasks = cx
368 .update(|cx| get_all_tasks(&project, &task_contexts, cx))
369 .into_iter()
370 .map(|(source_kind, task)| {
371 let resolved = task.resolved.unwrap();
372 (
373 source_kind,
374 task.resolved_label,
375 resolved.args,
376 resolved.env,
377 )
378 })
379 .collect::<Vec<_>>();
380 assert_eq!(
381 all_tasks,
382 vec![
383 (
384 topmost_local_task_source_kind.clone(),
385 "cargo check all".to_string(),
386 vec!["check".to_string(), "--all".to_string()],
387 HashMap::default(),
388 ),
389 (
390 TaskSourceKind::Worktree {
391 id: worktree_id,
392 directory_in_worktree: PathBuf::from(separator!("b/.zed")),
393 id_base: if cfg!(windows) {
394 "local worktree tasks from directory \"b\\\\.zed\"".into()
395 } else {
396 "local worktree tasks from directory \"b/.zed\"".into()
397 },
398 },
399 "cargo check".to_string(),
400 vec!["check".to_string()],
401 HashMap::default(),
402 ),
403 (
404 TaskSourceKind::AbsPath {
405 abs_path: paths::tasks_file().clone(),
406 id_base: "global tasks.json".into(),
407 },
408 "cargo check unstable".to_string(),
409 vec![
410 "check".to_string(),
411 "--all".to_string(),
412 "--all-targets".to_string(),
413 ],
414 HashMap::from_iter(Some((
415 "RUSTFLAGS".to_string(),
416 "-Zunstable-options".to_string()
417 ))),
418 ),
419 ]
420 );
421}
422
423#[gpui::test]
424async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
425 init_test(cx);
426 TaskStore::init(None);
427
428 let fs = FakeFs::new(cx.executor());
429 fs.insert_tree(
430 path!("/dir"),
431 json!({
432 ".zed": {
433 "tasks.json": r#"[{
434 "label": "test worktree root",
435 "command": "echo $ZED_WORKTREE_ROOT"
436 }]"#,
437 },
438 "a": {
439 "a.rs": "fn a() {\n A\n}"
440 },
441 }),
442 )
443 .await;
444
445 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
446 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
447
448 cx.executor().run_until_parked();
449 let worktree_id = cx.update(|cx| {
450 project.update(cx, |project, cx| {
451 project.worktrees(cx).next().unwrap().read(cx).id()
452 })
453 });
454
455 let active_non_worktree_item_tasks = cx.update(|cx| {
456 get_all_tasks(
457 &project,
458 &TaskContexts {
459 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
460 active_worktree_context: None,
461 other_worktree_contexts: Vec::new(),
462 },
463 cx,
464 )
465 });
466 assert!(
467 active_non_worktree_item_tasks.is_empty(),
468 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
469 );
470
471 let active_worktree_tasks = cx.update(|cx| {
472 get_all_tasks(
473 &project,
474 &TaskContexts {
475 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
476 active_worktree_context: Some((worktree_id, {
477 let mut worktree_context = TaskContext::default();
478 worktree_context
479 .task_variables
480 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
481 worktree_context
482 })),
483 other_worktree_contexts: Vec::new(),
484 },
485 cx,
486 )
487 });
488 assert_eq!(
489 active_worktree_tasks
490 .into_iter()
491 .map(|(source_kind, task)| {
492 let resolved = task.resolved.unwrap();
493 (source_kind, resolved.command)
494 })
495 .collect::<Vec<_>>(),
496 vec![(
497 TaskSourceKind::Worktree {
498 id: worktree_id,
499 directory_in_worktree: PathBuf::from(separator!(".zed")),
500 id_base: if cfg!(windows) {
501 "local worktree tasks from directory \".zed\"".into()
502 } else {
503 "local worktree tasks from directory \".zed\"".into()
504 },
505 },
506 "echo /dir".to_string(),
507 )]
508 );
509}
510
511#[gpui::test]
512async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
513 init_test(cx);
514
515 let fs = FakeFs::new(cx.executor());
516 fs.insert_tree(
517 path!("/dir"),
518 json!({
519 "test.rs": "const A: i32 = 1;",
520 "test2.rs": "",
521 "Cargo.toml": "a = 1",
522 "package.json": "{\"a\": 1}",
523 }),
524 )
525 .await;
526
527 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
528 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
529
530 let mut fake_rust_servers = language_registry.register_fake_lsp(
531 "Rust",
532 FakeLspAdapter {
533 name: "the-rust-language-server",
534 capabilities: lsp::ServerCapabilities {
535 completion_provider: Some(lsp::CompletionOptions {
536 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
537 ..Default::default()
538 }),
539 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
540 lsp::TextDocumentSyncOptions {
541 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
542 ..Default::default()
543 },
544 )),
545 ..Default::default()
546 },
547 ..Default::default()
548 },
549 );
550 let mut fake_json_servers = language_registry.register_fake_lsp(
551 "JSON",
552 FakeLspAdapter {
553 name: "the-json-language-server",
554 capabilities: lsp::ServerCapabilities {
555 completion_provider: Some(lsp::CompletionOptions {
556 trigger_characters: Some(vec![":".to_string()]),
557 ..Default::default()
558 }),
559 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
560 lsp::TextDocumentSyncOptions {
561 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
562 ..Default::default()
563 },
564 )),
565 ..Default::default()
566 },
567 ..Default::default()
568 },
569 );
570
571 // Open a buffer without an associated language server.
572 let (toml_buffer, _handle) = project
573 .update(cx, |project, cx| {
574 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
575 })
576 .await
577 .unwrap();
578
579 // Open a buffer with an associated language server before the language for it has been loaded.
580 let (rust_buffer, _handle2) = project
581 .update(cx, |project, cx| {
582 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
583 })
584 .await
585 .unwrap();
586 rust_buffer.update(cx, |buffer, _| {
587 assert_eq!(buffer.language().map(|l| l.name()), None);
588 });
589
590 // Now we add the languages to the project, and ensure they get assigned to all
591 // the relevant open buffers.
592 language_registry.add(json_lang());
593 language_registry.add(rust_lang());
594 cx.executor().run_until_parked();
595 rust_buffer.update(cx, |buffer, _| {
596 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
597 });
598
599 // A server is started up, and it is notified about Rust files.
600 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
601 assert_eq!(
602 fake_rust_server
603 .receive_notification::<lsp::notification::DidOpenTextDocument>()
604 .await
605 .text_document,
606 lsp::TextDocumentItem {
607 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
608 version: 0,
609 text: "const A: i32 = 1;".to_string(),
610 language_id: "rust".to_string(),
611 }
612 );
613
614 // The buffer is configured based on the language server's capabilities.
615 rust_buffer.update(cx, |buffer, _| {
616 assert_eq!(
617 buffer
618 .completion_triggers()
619 .into_iter()
620 .cloned()
621 .collect::<Vec<_>>(),
622 &[".".to_string(), "::".to_string()]
623 );
624 });
625 toml_buffer.update(cx, |buffer, _| {
626 assert!(buffer.completion_triggers().is_empty());
627 });
628
629 // Edit a buffer. The changes are reported to the language server.
630 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
631 assert_eq!(
632 fake_rust_server
633 .receive_notification::<lsp::notification::DidChangeTextDocument>()
634 .await
635 .text_document,
636 lsp::VersionedTextDocumentIdentifier::new(
637 lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
638 1
639 )
640 );
641
642 // Open a third buffer with a different associated language server.
643 let (json_buffer, _json_handle) = project
644 .update(cx, |project, cx| {
645 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
646 })
647 .await
648 .unwrap();
649
650 // A json language server is started up and is only notified about the json buffer.
651 let mut fake_json_server = fake_json_servers.next().await.unwrap();
652 assert_eq!(
653 fake_json_server
654 .receive_notification::<lsp::notification::DidOpenTextDocument>()
655 .await
656 .text_document,
657 lsp::TextDocumentItem {
658 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
659 version: 0,
660 text: "{\"a\": 1}".to_string(),
661 language_id: "json".to_string(),
662 }
663 );
664
665 // This buffer is configured based on the second language server's
666 // capabilities.
667 json_buffer.update(cx, |buffer, _| {
668 assert_eq!(
669 buffer
670 .completion_triggers()
671 .into_iter()
672 .cloned()
673 .collect::<Vec<_>>(),
674 &[":".to_string()]
675 );
676 });
677
678 // When opening another buffer whose language server is already running,
679 // it is also configured based on the existing language server's capabilities.
680 let (rust_buffer2, _handle4) = project
681 .update(cx, |project, cx| {
682 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
683 })
684 .await
685 .unwrap();
686 rust_buffer2.update(cx, |buffer, _| {
687 assert_eq!(
688 buffer
689 .completion_triggers()
690 .into_iter()
691 .cloned()
692 .collect::<Vec<_>>(),
693 &[".".to_string(), "::".to_string()]
694 );
695 });
696
697 // Changes are reported only to servers matching the buffer's language.
698 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
699 rust_buffer2.update(cx, |buffer, cx| {
700 buffer.edit([(0..0, "let x = 1;")], None, cx)
701 });
702 assert_eq!(
703 fake_rust_server
704 .receive_notification::<lsp::notification::DidChangeTextDocument>()
705 .await
706 .text_document,
707 lsp::VersionedTextDocumentIdentifier::new(
708 lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap(),
709 1
710 )
711 );
712
713 // Save notifications are reported to all servers.
714 project
715 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
716 .await
717 .unwrap();
718 assert_eq!(
719 fake_rust_server
720 .receive_notification::<lsp::notification::DidSaveTextDocument>()
721 .await
722 .text_document,
723 lsp::TextDocumentIdentifier::new(
724 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
725 )
726 );
727 assert_eq!(
728 fake_json_server
729 .receive_notification::<lsp::notification::DidSaveTextDocument>()
730 .await
731 .text_document,
732 lsp::TextDocumentIdentifier::new(
733 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
734 )
735 );
736
737 // Renames are reported only to servers matching the buffer's language.
738 fs.rename(
739 Path::new(path!("/dir/test2.rs")),
740 Path::new(path!("/dir/test3.rs")),
741 Default::default(),
742 )
743 .await
744 .unwrap();
745 assert_eq!(
746 fake_rust_server
747 .receive_notification::<lsp::notification::DidCloseTextDocument>()
748 .await
749 .text_document,
750 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap()),
751 );
752 assert_eq!(
753 fake_rust_server
754 .receive_notification::<lsp::notification::DidOpenTextDocument>()
755 .await
756 .text_document,
757 lsp::TextDocumentItem {
758 uri: lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),
759 version: 0,
760 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
761 language_id: "rust".to_string(),
762 },
763 );
764
765 rust_buffer2.update(cx, |buffer, cx| {
766 buffer.update_diagnostics(
767 LanguageServerId(0),
768 DiagnosticSet::from_sorted_entries(
769 vec![DiagnosticEntry {
770 diagnostic: Default::default(),
771 range: Anchor::MIN..Anchor::MAX,
772 }],
773 &buffer.snapshot(),
774 ),
775 cx,
776 );
777 assert_eq!(
778 buffer
779 .snapshot()
780 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
781 .count(),
782 1
783 );
784 });
785
786 // When the rename changes the extension of the file, the buffer gets closed on the old
787 // language server and gets opened on the new one.
788 fs.rename(
789 Path::new(path!("/dir/test3.rs")),
790 Path::new(path!("/dir/test3.json")),
791 Default::default(),
792 )
793 .await
794 .unwrap();
795 assert_eq!(
796 fake_rust_server
797 .receive_notification::<lsp::notification::DidCloseTextDocument>()
798 .await
799 .text_document,
800 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),),
801 );
802 assert_eq!(
803 fake_json_server
804 .receive_notification::<lsp::notification::DidOpenTextDocument>()
805 .await
806 .text_document,
807 lsp::TextDocumentItem {
808 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
809 version: 0,
810 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
811 language_id: "json".to_string(),
812 },
813 );
814
815 // We clear the diagnostics, since the language has changed.
816 rust_buffer2.update(cx, |buffer, _| {
817 assert_eq!(
818 buffer
819 .snapshot()
820 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
821 .count(),
822 0
823 );
824 });
825
826 // The renamed file's version resets after changing language server.
827 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
828 assert_eq!(
829 fake_json_server
830 .receive_notification::<lsp::notification::DidChangeTextDocument>()
831 .await
832 .text_document,
833 lsp::VersionedTextDocumentIdentifier::new(
834 lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
835 1
836 )
837 );
838
839 // Restart language servers
840 project.update(cx, |project, cx| {
841 project.restart_language_servers_for_buffers(
842 vec![rust_buffer.clone(), json_buffer.clone()],
843 cx,
844 );
845 });
846
847 let mut rust_shutdown_requests = fake_rust_server
848 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
849 let mut json_shutdown_requests = fake_json_server
850 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
851 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
852
853 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
854 let mut fake_json_server = fake_json_servers.next().await.unwrap();
855
856 // Ensure rust document is reopened in new rust language server
857 assert_eq!(
858 fake_rust_server
859 .receive_notification::<lsp::notification::DidOpenTextDocument>()
860 .await
861 .text_document,
862 lsp::TextDocumentItem {
863 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
864 version: 0,
865 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
866 language_id: "rust".to_string(),
867 }
868 );
869
870 // Ensure json documents are reopened in new json language server
871 assert_set_eq!(
872 [
873 fake_json_server
874 .receive_notification::<lsp::notification::DidOpenTextDocument>()
875 .await
876 .text_document,
877 fake_json_server
878 .receive_notification::<lsp::notification::DidOpenTextDocument>()
879 .await
880 .text_document,
881 ],
882 [
883 lsp::TextDocumentItem {
884 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
885 version: 0,
886 text: json_buffer.update(cx, |buffer, _| buffer.text()),
887 language_id: "json".to_string(),
888 },
889 lsp::TextDocumentItem {
890 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
891 version: 0,
892 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
893 language_id: "json".to_string(),
894 }
895 ]
896 );
897
898 // Close notifications are reported only to servers matching the buffer's language.
899 cx.update(|_| drop(_json_handle));
900 let close_message = lsp::DidCloseTextDocumentParams {
901 text_document: lsp::TextDocumentIdentifier::new(
902 lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
903 ),
904 };
905 assert_eq!(
906 fake_json_server
907 .receive_notification::<lsp::notification::DidCloseTextDocument>()
908 .await,
909 close_message,
910 );
911}
912
913#[gpui::test]
914async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
915 init_test(cx);
916
917 let fs = FakeFs::new(cx.executor());
918 fs.insert_tree(
919 path!("/the-root"),
920 json!({
921 ".gitignore": "target\n",
922 "Cargo.lock": "",
923 "src": {
924 "a.rs": "",
925 "b.rs": "",
926 },
927 "target": {
928 "x": {
929 "out": {
930 "x.rs": ""
931 }
932 },
933 "y": {
934 "out": {
935 "y.rs": "",
936 }
937 },
938 "z": {
939 "out": {
940 "z.rs": ""
941 }
942 }
943 }
944 }),
945 )
946 .await;
947 fs.insert_tree(
948 path!("/the-registry"),
949 json!({
950 "dep1": {
951 "src": {
952 "dep1.rs": "",
953 }
954 },
955 "dep2": {
956 "src": {
957 "dep2.rs": "",
958 }
959 },
960 }),
961 )
962 .await;
963 fs.insert_tree(
964 path!("/the/stdlib"),
965 json!({
966 "LICENSE": "",
967 "src": {
968 "string.rs": "",
969 }
970 }),
971 )
972 .await;
973
974 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
975 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
976 (project.languages().clone(), project.lsp_store())
977 });
978 language_registry.add(rust_lang());
979 let mut fake_servers = language_registry.register_fake_lsp(
980 "Rust",
981 FakeLspAdapter {
982 name: "the-language-server",
983 ..Default::default()
984 },
985 );
986
987 cx.executor().run_until_parked();
988
989 // Start the language server by opening a buffer with a compatible file extension.
990 project
991 .update(cx, |project, cx| {
992 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
993 })
994 .await
995 .unwrap();
996
997 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
998 project.update(cx, |project, cx| {
999 let worktree = project.worktrees(cx).next().unwrap();
1000 assert_eq!(
1001 worktree
1002 .read(cx)
1003 .snapshot()
1004 .entries(true, 0)
1005 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1006 .collect::<Vec<_>>(),
1007 &[
1008 (Path::new(""), false),
1009 (Path::new(".gitignore"), false),
1010 (Path::new("Cargo.lock"), false),
1011 (Path::new("src"), false),
1012 (Path::new("src/a.rs"), false),
1013 (Path::new("src/b.rs"), false),
1014 (Path::new("target"), true),
1015 ]
1016 );
1017 });
1018
1019 let prev_read_dir_count = fs.read_dir_call_count();
1020
1021 let fake_server = fake_servers.next().await.unwrap();
1022 let (server_id, server_name) = lsp_store.read_with(cx, |lsp_store, _| {
1023 let (id, status) = lsp_store.language_server_statuses().next().unwrap();
1024 (id, LanguageServerName::from(status.name.as_str()))
1025 });
1026
1027 // Simulate jumping to a definition in a dependency outside of the worktree.
1028 let _out_of_worktree_buffer = project
1029 .update(cx, |project, cx| {
1030 project.open_local_buffer_via_lsp(
1031 lsp::Url::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
1032 server_id,
1033 server_name.clone(),
1034 cx,
1035 )
1036 })
1037 .await
1038 .unwrap();
1039
1040 // Keep track of the FS events reported to the language server.
1041 let file_changes = Arc::new(Mutex::new(Vec::new()));
1042 fake_server
1043 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
1044 registrations: vec![lsp::Registration {
1045 id: Default::default(),
1046 method: "workspace/didChangeWatchedFiles".to_string(),
1047 register_options: serde_json::to_value(
1048 lsp::DidChangeWatchedFilesRegistrationOptions {
1049 watchers: vec![
1050 lsp::FileSystemWatcher {
1051 glob_pattern: lsp::GlobPattern::String(
1052 path!("/the-root/Cargo.toml").to_string(),
1053 ),
1054 kind: None,
1055 },
1056 lsp::FileSystemWatcher {
1057 glob_pattern: lsp::GlobPattern::String(
1058 path!("/the-root/src/*.{rs,c}").to_string(),
1059 ),
1060 kind: None,
1061 },
1062 lsp::FileSystemWatcher {
1063 glob_pattern: lsp::GlobPattern::String(
1064 path!("/the-root/target/y/**/*.rs").to_string(),
1065 ),
1066 kind: None,
1067 },
1068 lsp::FileSystemWatcher {
1069 glob_pattern: lsp::GlobPattern::String(
1070 path!("/the/stdlib/src/**/*.rs").to_string(),
1071 ),
1072 kind: None,
1073 },
1074 lsp::FileSystemWatcher {
1075 glob_pattern: lsp::GlobPattern::String(
1076 path!("**/Cargo.lock").to_string(),
1077 ),
1078 kind: None,
1079 },
1080 ],
1081 },
1082 )
1083 .ok(),
1084 }],
1085 })
1086 .await
1087 .unwrap();
1088 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
1089 let file_changes = file_changes.clone();
1090 move |params, _| {
1091 let mut file_changes = file_changes.lock();
1092 file_changes.extend(params.changes);
1093 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
1094 }
1095 });
1096
1097 cx.executor().run_until_parked();
1098 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
1099 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 5);
1100
1101 let mut new_watched_paths = fs.watched_paths();
1102 new_watched_paths.retain(|path| !path.starts_with(config_dir()));
1103 assert_eq!(
1104 &new_watched_paths,
1105 &[
1106 Path::new(path!("/the-root")),
1107 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
1108 Path::new(path!("/the/stdlib/src"))
1109 ]
1110 );
1111
1112 // Now the language server has asked us to watch an ignored directory path,
1113 // so we recursively load it.
1114 project.update(cx, |project, cx| {
1115 let worktree = project.visible_worktrees(cx).next().unwrap();
1116 assert_eq!(
1117 worktree
1118 .read(cx)
1119 .snapshot()
1120 .entries(true, 0)
1121 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1122 .collect::<Vec<_>>(),
1123 &[
1124 (Path::new(""), false),
1125 (Path::new(".gitignore"), false),
1126 (Path::new("Cargo.lock"), false),
1127 (Path::new("src"), false),
1128 (Path::new("src/a.rs"), false),
1129 (Path::new("src/b.rs"), false),
1130 (Path::new("target"), true),
1131 (Path::new("target/x"), true),
1132 (Path::new("target/y"), true),
1133 (Path::new("target/y/out"), true),
1134 (Path::new("target/y/out/y.rs"), true),
1135 (Path::new("target/z"), true),
1136 ]
1137 );
1138 });
1139
1140 // Perform some file system mutations, two of which match the watched patterns,
1141 // and one of which does not.
1142 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
1143 .await
1144 .unwrap();
1145 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
1146 .await
1147 .unwrap();
1148 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
1149 .await
1150 .unwrap();
1151 fs.create_file(
1152 path!("/the-root/target/x/out/x2.rs").as_ref(),
1153 Default::default(),
1154 )
1155 .await
1156 .unwrap();
1157 fs.create_file(
1158 path!("/the-root/target/y/out/y2.rs").as_ref(),
1159 Default::default(),
1160 )
1161 .await
1162 .unwrap();
1163 fs.save(
1164 path!("/the-root/Cargo.lock").as_ref(),
1165 &"".into(),
1166 Default::default(),
1167 )
1168 .await
1169 .unwrap();
1170 fs.save(
1171 path!("/the-stdlib/LICENSE").as_ref(),
1172 &"".into(),
1173 Default::default(),
1174 )
1175 .await
1176 .unwrap();
1177 fs.save(
1178 path!("/the/stdlib/src/string.rs").as_ref(),
1179 &"".into(),
1180 Default::default(),
1181 )
1182 .await
1183 .unwrap();
1184
1185 // The language server receives events for the FS mutations that match its watch patterns.
1186 cx.executor().run_until_parked();
1187 assert_eq!(
1188 &*file_changes.lock(),
1189 &[
1190 lsp::FileEvent {
1191 uri: lsp::Url::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
1192 typ: lsp::FileChangeType::CHANGED,
1193 },
1194 lsp::FileEvent {
1195 uri: lsp::Url::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
1196 typ: lsp::FileChangeType::DELETED,
1197 },
1198 lsp::FileEvent {
1199 uri: lsp::Url::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
1200 typ: lsp::FileChangeType::CREATED,
1201 },
1202 lsp::FileEvent {
1203 uri: lsp::Url::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
1204 typ: lsp::FileChangeType::CREATED,
1205 },
1206 lsp::FileEvent {
1207 uri: lsp::Url::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
1208 typ: lsp::FileChangeType::CHANGED,
1209 },
1210 ]
1211 );
1212}
1213
1214#[gpui::test]
1215async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1216 init_test(cx);
1217
1218 let fs = FakeFs::new(cx.executor());
1219 fs.insert_tree(
1220 path!("/dir"),
1221 json!({
1222 "a.rs": "let a = 1;",
1223 "b.rs": "let b = 2;"
1224 }),
1225 )
1226 .await;
1227
1228 let project = Project::test(
1229 fs,
1230 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1231 cx,
1232 )
1233 .await;
1234 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1235
1236 let buffer_a = project
1237 .update(cx, |project, cx| {
1238 project.open_local_buffer(path!("/dir/a.rs"), cx)
1239 })
1240 .await
1241 .unwrap();
1242 let buffer_b = project
1243 .update(cx, |project, cx| {
1244 project.open_local_buffer(path!("/dir/b.rs"), cx)
1245 })
1246 .await
1247 .unwrap();
1248
1249 lsp_store.update(cx, |lsp_store, cx| {
1250 lsp_store
1251 .update_diagnostics(
1252 LanguageServerId(0),
1253 lsp::PublishDiagnosticsParams {
1254 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1255 version: None,
1256 diagnostics: vec![lsp::Diagnostic {
1257 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1258 severity: Some(lsp::DiagnosticSeverity::ERROR),
1259 message: "error 1".to_string(),
1260 ..Default::default()
1261 }],
1262 },
1263 &[],
1264 cx,
1265 )
1266 .unwrap();
1267 lsp_store
1268 .update_diagnostics(
1269 LanguageServerId(0),
1270 lsp::PublishDiagnosticsParams {
1271 uri: Url::from_file_path(path!("/dir/b.rs")).unwrap(),
1272 version: None,
1273 diagnostics: vec![lsp::Diagnostic {
1274 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1275 severity: Some(DiagnosticSeverity::WARNING),
1276 message: "error 2".to_string(),
1277 ..Default::default()
1278 }],
1279 },
1280 &[],
1281 cx,
1282 )
1283 .unwrap();
1284 });
1285
1286 buffer_a.update(cx, |buffer, _| {
1287 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1288 assert_eq!(
1289 chunks
1290 .iter()
1291 .map(|(s, d)| (s.as_str(), *d))
1292 .collect::<Vec<_>>(),
1293 &[
1294 ("let ", None),
1295 ("a", Some(DiagnosticSeverity::ERROR)),
1296 (" = 1;", None),
1297 ]
1298 );
1299 });
1300 buffer_b.update(cx, |buffer, _| {
1301 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1302 assert_eq!(
1303 chunks
1304 .iter()
1305 .map(|(s, d)| (s.as_str(), *d))
1306 .collect::<Vec<_>>(),
1307 &[
1308 ("let ", None),
1309 ("b", Some(DiagnosticSeverity::WARNING)),
1310 (" = 2;", None),
1311 ]
1312 );
1313 });
1314}
1315
1316#[gpui::test]
1317async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1318 init_test(cx);
1319
1320 let fs = FakeFs::new(cx.executor());
1321 fs.insert_tree(
1322 path!("/root"),
1323 json!({
1324 "dir": {
1325 ".git": {
1326 "HEAD": "ref: refs/heads/main",
1327 },
1328 ".gitignore": "b.rs",
1329 "a.rs": "let a = 1;",
1330 "b.rs": "let b = 2;",
1331 },
1332 "other.rs": "let b = c;"
1333 }),
1334 )
1335 .await;
1336
1337 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1338 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1339 let (worktree, _) = project
1340 .update(cx, |project, cx| {
1341 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1342 })
1343 .await
1344 .unwrap();
1345 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1346
1347 let (worktree, _) = project
1348 .update(cx, |project, cx| {
1349 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1350 })
1351 .await
1352 .unwrap();
1353 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1354
1355 let server_id = LanguageServerId(0);
1356 lsp_store.update(cx, |lsp_store, cx| {
1357 lsp_store
1358 .update_diagnostics(
1359 server_id,
1360 lsp::PublishDiagnosticsParams {
1361 uri: Url::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1362 version: None,
1363 diagnostics: vec![lsp::Diagnostic {
1364 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1365 severity: Some(lsp::DiagnosticSeverity::ERROR),
1366 message: "unused variable 'b'".to_string(),
1367 ..Default::default()
1368 }],
1369 },
1370 &[],
1371 cx,
1372 )
1373 .unwrap();
1374 lsp_store
1375 .update_diagnostics(
1376 server_id,
1377 lsp::PublishDiagnosticsParams {
1378 uri: Url::from_file_path(path!("/root/other.rs")).unwrap(),
1379 version: None,
1380 diagnostics: vec![lsp::Diagnostic {
1381 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1382 severity: Some(lsp::DiagnosticSeverity::ERROR),
1383 message: "unknown variable 'c'".to_string(),
1384 ..Default::default()
1385 }],
1386 },
1387 &[],
1388 cx,
1389 )
1390 .unwrap();
1391 });
1392
1393 let main_ignored_buffer = project
1394 .update(cx, |project, cx| {
1395 project.open_buffer((main_worktree_id, "b.rs"), cx)
1396 })
1397 .await
1398 .unwrap();
1399 main_ignored_buffer.update(cx, |buffer, _| {
1400 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1401 assert_eq!(
1402 chunks
1403 .iter()
1404 .map(|(s, d)| (s.as_str(), *d))
1405 .collect::<Vec<_>>(),
1406 &[
1407 ("let ", None),
1408 ("b", Some(DiagnosticSeverity::ERROR)),
1409 (" = 2;", None),
1410 ],
1411 "Gigitnored buffers should still get in-buffer diagnostics",
1412 );
1413 });
1414 let other_buffer = project
1415 .update(cx, |project, cx| {
1416 project.open_buffer((other_worktree_id, ""), cx)
1417 })
1418 .await
1419 .unwrap();
1420 other_buffer.update(cx, |buffer, _| {
1421 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1422 assert_eq!(
1423 chunks
1424 .iter()
1425 .map(|(s, d)| (s.as_str(), *d))
1426 .collect::<Vec<_>>(),
1427 &[
1428 ("let b = ", None),
1429 ("c", Some(DiagnosticSeverity::ERROR)),
1430 (";", None),
1431 ],
1432 "Buffers from hidden projects should still get in-buffer diagnostics"
1433 );
1434 });
1435
1436 project.update(cx, |project, cx| {
1437 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1438 assert_eq!(
1439 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1440 vec![(
1441 ProjectPath {
1442 worktree_id: main_worktree_id,
1443 path: Arc::from(Path::new("b.rs")),
1444 },
1445 server_id,
1446 DiagnosticSummary {
1447 error_count: 1,
1448 warning_count: 0,
1449 }
1450 )]
1451 );
1452 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1453 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1454 });
1455}
1456
1457#[gpui::test]
1458async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1459 init_test(cx);
1460
1461 let progress_token = "the-progress-token";
1462
1463 let fs = FakeFs::new(cx.executor());
1464 fs.insert_tree(
1465 path!("/dir"),
1466 json!({
1467 "a.rs": "fn a() { A }",
1468 "b.rs": "const y: i32 = 1",
1469 }),
1470 )
1471 .await;
1472
1473 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1474 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1475
1476 language_registry.add(rust_lang());
1477 let mut fake_servers = language_registry.register_fake_lsp(
1478 "Rust",
1479 FakeLspAdapter {
1480 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1481 disk_based_diagnostics_sources: vec!["disk".into()],
1482 ..Default::default()
1483 },
1484 );
1485
1486 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1487
1488 // Cause worktree to start the fake language server
1489 let _ = project
1490 .update(cx, |project, cx| {
1491 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1492 })
1493 .await
1494 .unwrap();
1495
1496 let mut events = cx.events(&project);
1497
1498 let fake_server = fake_servers.next().await.unwrap();
1499 assert_eq!(
1500 events.next().await.unwrap(),
1501 Event::LanguageServerAdded(
1502 LanguageServerId(0),
1503 fake_server.server.name(),
1504 Some(worktree_id)
1505 ),
1506 );
1507
1508 fake_server
1509 .start_progress(format!("{}/0", progress_token))
1510 .await;
1511 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1512 assert_eq!(
1513 events.next().await.unwrap(),
1514 Event::DiskBasedDiagnosticsStarted {
1515 language_server_id: LanguageServerId(0),
1516 }
1517 );
1518
1519 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1520 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1521 version: None,
1522 diagnostics: vec![lsp::Diagnostic {
1523 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1524 severity: Some(lsp::DiagnosticSeverity::ERROR),
1525 message: "undefined variable 'A'".to_string(),
1526 ..Default::default()
1527 }],
1528 });
1529 assert_eq!(
1530 events.next().await.unwrap(),
1531 Event::DiagnosticsUpdated {
1532 language_server_id: LanguageServerId(0),
1533 path: (worktree_id, Path::new("a.rs")).into()
1534 }
1535 );
1536
1537 fake_server.end_progress(format!("{}/0", progress_token));
1538 assert_eq!(
1539 events.next().await.unwrap(),
1540 Event::DiskBasedDiagnosticsFinished {
1541 language_server_id: LanguageServerId(0)
1542 }
1543 );
1544
1545 let buffer = project
1546 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1547 .await
1548 .unwrap();
1549
1550 buffer.update(cx, |buffer, _| {
1551 let snapshot = buffer.snapshot();
1552 let diagnostics = snapshot
1553 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1554 .collect::<Vec<_>>();
1555 assert_eq!(
1556 diagnostics,
1557 &[DiagnosticEntry {
1558 range: Point::new(0, 9)..Point::new(0, 10),
1559 diagnostic: Diagnostic {
1560 severity: lsp::DiagnosticSeverity::ERROR,
1561 message: "undefined variable 'A'".to_string(),
1562 group_id: 0,
1563 is_primary: true,
1564 ..Default::default()
1565 }
1566 }]
1567 )
1568 });
1569
1570 // Ensure publishing empty diagnostics twice only results in one update event.
1571 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1572 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1573 version: None,
1574 diagnostics: Default::default(),
1575 });
1576 assert_eq!(
1577 events.next().await.unwrap(),
1578 Event::DiagnosticsUpdated {
1579 language_server_id: LanguageServerId(0),
1580 path: (worktree_id, Path::new("a.rs")).into()
1581 }
1582 );
1583
1584 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1585 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1586 version: None,
1587 diagnostics: Default::default(),
1588 });
1589 cx.executor().run_until_parked();
1590 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1591}
1592
1593#[gpui::test]
1594async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1595 init_test(cx);
1596
1597 let progress_token = "the-progress-token";
1598
1599 let fs = FakeFs::new(cx.executor());
1600 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1601
1602 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1603
1604 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1605 language_registry.add(rust_lang());
1606 let mut fake_servers = language_registry.register_fake_lsp(
1607 "Rust",
1608 FakeLspAdapter {
1609 name: "the-language-server",
1610 disk_based_diagnostics_sources: vec!["disk".into()],
1611 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1612 ..Default::default()
1613 },
1614 );
1615
1616 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1617
1618 let (buffer, _handle) = project
1619 .update(cx, |project, cx| {
1620 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1621 })
1622 .await
1623 .unwrap();
1624 // Simulate diagnostics starting to update.
1625 let fake_server = fake_servers.next().await.unwrap();
1626 fake_server.start_progress(progress_token).await;
1627
1628 // Restart the server before the diagnostics finish updating.
1629 project.update(cx, |project, cx| {
1630 project.restart_language_servers_for_buffers(vec![buffer], cx);
1631 });
1632 let mut events = cx.events(&project);
1633
1634 // Simulate the newly started server sending more diagnostics.
1635 let fake_server = fake_servers.next().await.unwrap();
1636 assert_eq!(
1637 events.next().await.unwrap(),
1638 Event::LanguageServerAdded(
1639 LanguageServerId(1),
1640 fake_server.server.name(),
1641 Some(worktree_id)
1642 )
1643 );
1644 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1645 fake_server.start_progress(progress_token).await;
1646 assert_eq!(
1647 events.next().await.unwrap(),
1648 Event::DiskBasedDiagnosticsStarted {
1649 language_server_id: LanguageServerId(1)
1650 }
1651 );
1652 project.update(cx, |project, cx| {
1653 assert_eq!(
1654 project
1655 .language_servers_running_disk_based_diagnostics(cx)
1656 .collect::<Vec<_>>(),
1657 [LanguageServerId(1)]
1658 );
1659 });
1660
1661 // All diagnostics are considered done, despite the old server's diagnostic
1662 // task never completing.
1663 fake_server.end_progress(progress_token);
1664 assert_eq!(
1665 events.next().await.unwrap(),
1666 Event::DiskBasedDiagnosticsFinished {
1667 language_server_id: LanguageServerId(1)
1668 }
1669 );
1670 project.update(cx, |project, cx| {
1671 assert_eq!(
1672 project
1673 .language_servers_running_disk_based_diagnostics(cx)
1674 .collect::<Vec<_>>(),
1675 [] as [language::LanguageServerId; 0]
1676 );
1677 });
1678}
1679
1680#[gpui::test]
1681async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1682 init_test(cx);
1683
1684 let fs = FakeFs::new(cx.executor());
1685 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
1686
1687 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1688
1689 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1690 language_registry.add(rust_lang());
1691 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1692
1693 let (buffer, _) = project
1694 .update(cx, |project, cx| {
1695 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1696 })
1697 .await
1698 .unwrap();
1699
1700 // Publish diagnostics
1701 let fake_server = fake_servers.next().await.unwrap();
1702 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1703 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1704 version: None,
1705 diagnostics: vec![lsp::Diagnostic {
1706 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1707 severity: Some(lsp::DiagnosticSeverity::ERROR),
1708 message: "the message".to_string(),
1709 ..Default::default()
1710 }],
1711 });
1712
1713 cx.executor().run_until_parked();
1714 buffer.update(cx, |buffer, _| {
1715 assert_eq!(
1716 buffer
1717 .snapshot()
1718 .diagnostics_in_range::<_, usize>(0..1, false)
1719 .map(|entry| entry.diagnostic.message.clone())
1720 .collect::<Vec<_>>(),
1721 ["the message".to_string()]
1722 );
1723 });
1724 project.update(cx, |project, cx| {
1725 assert_eq!(
1726 project.diagnostic_summary(false, cx),
1727 DiagnosticSummary {
1728 error_count: 1,
1729 warning_count: 0,
1730 }
1731 );
1732 });
1733
1734 project.update(cx, |project, cx| {
1735 project.restart_language_servers_for_buffers(vec![buffer.clone()], cx);
1736 });
1737
1738 // The diagnostics are cleared.
1739 cx.executor().run_until_parked();
1740 buffer.update(cx, |buffer, _| {
1741 assert_eq!(
1742 buffer
1743 .snapshot()
1744 .diagnostics_in_range::<_, usize>(0..1, false)
1745 .map(|entry| entry.diagnostic.message.clone())
1746 .collect::<Vec<_>>(),
1747 Vec::<String>::new(),
1748 );
1749 });
1750 project.update(cx, |project, cx| {
1751 assert_eq!(
1752 project.diagnostic_summary(false, cx),
1753 DiagnosticSummary {
1754 error_count: 0,
1755 warning_count: 0,
1756 }
1757 );
1758 });
1759}
1760
1761#[gpui::test]
1762async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1763 init_test(cx);
1764
1765 let fs = FakeFs::new(cx.executor());
1766 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1767
1768 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1769 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1770
1771 language_registry.add(rust_lang());
1772 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1773
1774 let (buffer, _handle) = project
1775 .update(cx, |project, cx| {
1776 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1777 })
1778 .await
1779 .unwrap();
1780
1781 // Before restarting the server, report diagnostics with an unknown buffer version.
1782 let fake_server = fake_servers.next().await.unwrap();
1783 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1784 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1785 version: Some(10000),
1786 diagnostics: Vec::new(),
1787 });
1788 cx.executor().run_until_parked();
1789 project.update(cx, |project, cx| {
1790 project.restart_language_servers_for_buffers(vec![buffer.clone()], cx);
1791 });
1792
1793 let mut fake_server = fake_servers.next().await.unwrap();
1794 let notification = fake_server
1795 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1796 .await
1797 .text_document;
1798 assert_eq!(notification.version, 0);
1799}
1800
1801#[gpui::test]
1802async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1803 init_test(cx);
1804
1805 let progress_token = "the-progress-token";
1806
1807 let fs = FakeFs::new(cx.executor());
1808 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1809
1810 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1811
1812 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1813 language_registry.add(rust_lang());
1814 let mut fake_servers = language_registry.register_fake_lsp(
1815 "Rust",
1816 FakeLspAdapter {
1817 name: "the-language-server",
1818 disk_based_diagnostics_sources: vec!["disk".into()],
1819 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1820 ..Default::default()
1821 },
1822 );
1823
1824 let (buffer, _handle) = project
1825 .update(cx, |project, cx| {
1826 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1827 })
1828 .await
1829 .unwrap();
1830
1831 // Simulate diagnostics starting to update.
1832 let mut fake_server = fake_servers.next().await.unwrap();
1833 fake_server
1834 .start_progress_with(
1835 "another-token",
1836 lsp::WorkDoneProgressBegin {
1837 cancellable: Some(false),
1838 ..Default::default()
1839 },
1840 )
1841 .await;
1842 fake_server
1843 .start_progress_with(
1844 progress_token,
1845 lsp::WorkDoneProgressBegin {
1846 cancellable: Some(true),
1847 ..Default::default()
1848 },
1849 )
1850 .await;
1851 cx.executor().run_until_parked();
1852
1853 project.update(cx, |project, cx| {
1854 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1855 });
1856
1857 let cancel_notification = fake_server
1858 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1859 .await;
1860 assert_eq!(
1861 cancel_notification.token,
1862 NumberOrString::String(progress_token.into())
1863 );
1864}
1865
1866#[gpui::test]
1867async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1868 init_test(cx);
1869
1870 let fs = FakeFs::new(cx.executor());
1871 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
1872 .await;
1873
1874 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1875 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1876
1877 let mut fake_rust_servers = language_registry.register_fake_lsp(
1878 "Rust",
1879 FakeLspAdapter {
1880 name: "rust-lsp",
1881 ..Default::default()
1882 },
1883 );
1884 let mut fake_js_servers = language_registry.register_fake_lsp(
1885 "JavaScript",
1886 FakeLspAdapter {
1887 name: "js-lsp",
1888 ..Default::default()
1889 },
1890 );
1891 language_registry.add(rust_lang());
1892 language_registry.add(js_lang());
1893
1894 let _rs_buffer = project
1895 .update(cx, |project, cx| {
1896 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1897 })
1898 .await
1899 .unwrap();
1900 let _js_buffer = project
1901 .update(cx, |project, cx| {
1902 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
1903 })
1904 .await
1905 .unwrap();
1906
1907 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1908 assert_eq!(
1909 fake_rust_server_1
1910 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1911 .await
1912 .text_document
1913 .uri
1914 .as_str(),
1915 uri!("file:///dir/a.rs")
1916 );
1917
1918 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1919 assert_eq!(
1920 fake_js_server
1921 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1922 .await
1923 .text_document
1924 .uri
1925 .as_str(),
1926 uri!("file:///dir/b.js")
1927 );
1928
1929 // Disable Rust language server, ensuring only that server gets stopped.
1930 cx.update(|cx| {
1931 SettingsStore::update_global(cx, |settings, cx| {
1932 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1933 settings.languages.insert(
1934 "Rust".into(),
1935 LanguageSettingsContent {
1936 enable_language_server: Some(false),
1937 ..Default::default()
1938 },
1939 );
1940 });
1941 })
1942 });
1943 fake_rust_server_1
1944 .receive_notification::<lsp::notification::Exit>()
1945 .await;
1946
1947 // Enable Rust and disable JavaScript language servers, ensuring that the
1948 // former gets started again and that the latter stops.
1949 cx.update(|cx| {
1950 SettingsStore::update_global(cx, |settings, cx| {
1951 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1952 settings.languages.insert(
1953 LanguageName::new("Rust"),
1954 LanguageSettingsContent {
1955 enable_language_server: Some(true),
1956 ..Default::default()
1957 },
1958 );
1959 settings.languages.insert(
1960 LanguageName::new("JavaScript"),
1961 LanguageSettingsContent {
1962 enable_language_server: Some(false),
1963 ..Default::default()
1964 },
1965 );
1966 });
1967 })
1968 });
1969 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1970 assert_eq!(
1971 fake_rust_server_2
1972 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1973 .await
1974 .text_document
1975 .uri
1976 .as_str(),
1977 uri!("file:///dir/a.rs")
1978 );
1979 fake_js_server
1980 .receive_notification::<lsp::notification::Exit>()
1981 .await;
1982}
1983
1984#[gpui::test(iterations = 3)]
1985async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1986 init_test(cx);
1987
1988 let text = "
1989 fn a() { A }
1990 fn b() { BB }
1991 fn c() { CCC }
1992 "
1993 .unindent();
1994
1995 let fs = FakeFs::new(cx.executor());
1996 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
1997
1998 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1999 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2000
2001 language_registry.add(rust_lang());
2002 let mut fake_servers = language_registry.register_fake_lsp(
2003 "Rust",
2004 FakeLspAdapter {
2005 disk_based_diagnostics_sources: vec!["disk".into()],
2006 ..Default::default()
2007 },
2008 );
2009
2010 let buffer = project
2011 .update(cx, |project, cx| {
2012 project.open_local_buffer(path!("/dir/a.rs"), cx)
2013 })
2014 .await
2015 .unwrap();
2016
2017 let _handle = project.update(cx, |project, cx| {
2018 project.register_buffer_with_language_servers(&buffer, cx)
2019 });
2020
2021 let mut fake_server = fake_servers.next().await.unwrap();
2022 let open_notification = fake_server
2023 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2024 .await;
2025
2026 // Edit the buffer, moving the content down
2027 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
2028 let change_notification_1 = fake_server
2029 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2030 .await;
2031 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
2032
2033 // Report some diagnostics for the initial version of the buffer
2034 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2035 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2036 version: Some(open_notification.text_document.version),
2037 diagnostics: vec![
2038 lsp::Diagnostic {
2039 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2040 severity: Some(DiagnosticSeverity::ERROR),
2041 message: "undefined variable 'A'".to_string(),
2042 source: Some("disk".to_string()),
2043 ..Default::default()
2044 },
2045 lsp::Diagnostic {
2046 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2047 severity: Some(DiagnosticSeverity::ERROR),
2048 message: "undefined variable 'BB'".to_string(),
2049 source: Some("disk".to_string()),
2050 ..Default::default()
2051 },
2052 lsp::Diagnostic {
2053 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
2054 severity: Some(DiagnosticSeverity::ERROR),
2055 source: Some("disk".to_string()),
2056 message: "undefined variable 'CCC'".to_string(),
2057 ..Default::default()
2058 },
2059 ],
2060 });
2061
2062 // The diagnostics have moved down since they were created.
2063 cx.executor().run_until_parked();
2064 buffer.update(cx, |buffer, _| {
2065 assert_eq!(
2066 buffer
2067 .snapshot()
2068 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
2069 .collect::<Vec<_>>(),
2070 &[
2071 DiagnosticEntry {
2072 range: Point::new(3, 9)..Point::new(3, 11),
2073 diagnostic: Diagnostic {
2074 source: Some("disk".into()),
2075 severity: DiagnosticSeverity::ERROR,
2076 message: "undefined variable 'BB'".to_string(),
2077 is_disk_based: true,
2078 group_id: 1,
2079 is_primary: true,
2080 ..Default::default()
2081 },
2082 },
2083 DiagnosticEntry {
2084 range: Point::new(4, 9)..Point::new(4, 12),
2085 diagnostic: Diagnostic {
2086 source: Some("disk".into()),
2087 severity: DiagnosticSeverity::ERROR,
2088 message: "undefined variable 'CCC'".to_string(),
2089 is_disk_based: true,
2090 group_id: 2,
2091 is_primary: true,
2092 ..Default::default()
2093 }
2094 }
2095 ]
2096 );
2097 assert_eq!(
2098 chunks_with_diagnostics(buffer, 0..buffer.len()),
2099 [
2100 ("\n\nfn a() { ".to_string(), None),
2101 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2102 (" }\nfn b() { ".to_string(), None),
2103 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
2104 (" }\nfn c() { ".to_string(), None),
2105 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
2106 (" }\n".to_string(), None),
2107 ]
2108 );
2109 assert_eq!(
2110 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
2111 [
2112 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
2113 (" }\nfn c() { ".to_string(), None),
2114 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
2115 ]
2116 );
2117 });
2118
2119 // Ensure overlapping diagnostics are highlighted correctly.
2120 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2121 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2122 version: Some(open_notification.text_document.version),
2123 diagnostics: vec![
2124 lsp::Diagnostic {
2125 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2126 severity: Some(DiagnosticSeverity::ERROR),
2127 message: "undefined variable 'A'".to_string(),
2128 source: Some("disk".to_string()),
2129 ..Default::default()
2130 },
2131 lsp::Diagnostic {
2132 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
2133 severity: Some(DiagnosticSeverity::WARNING),
2134 message: "unreachable statement".to_string(),
2135 source: Some("disk".to_string()),
2136 ..Default::default()
2137 },
2138 ],
2139 });
2140
2141 cx.executor().run_until_parked();
2142 buffer.update(cx, |buffer, _| {
2143 assert_eq!(
2144 buffer
2145 .snapshot()
2146 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
2147 .collect::<Vec<_>>(),
2148 &[
2149 DiagnosticEntry {
2150 range: Point::new(2, 9)..Point::new(2, 12),
2151 diagnostic: Diagnostic {
2152 source: Some("disk".into()),
2153 severity: DiagnosticSeverity::WARNING,
2154 message: "unreachable statement".to_string(),
2155 is_disk_based: true,
2156 group_id: 4,
2157 is_primary: true,
2158 ..Default::default()
2159 }
2160 },
2161 DiagnosticEntry {
2162 range: Point::new(2, 9)..Point::new(2, 10),
2163 diagnostic: Diagnostic {
2164 source: Some("disk".into()),
2165 severity: DiagnosticSeverity::ERROR,
2166 message: "undefined variable 'A'".to_string(),
2167 is_disk_based: true,
2168 group_id: 3,
2169 is_primary: true,
2170 ..Default::default()
2171 },
2172 }
2173 ]
2174 );
2175 assert_eq!(
2176 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
2177 [
2178 ("fn a() { ".to_string(), None),
2179 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2180 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2181 ("\n".to_string(), None),
2182 ]
2183 );
2184 assert_eq!(
2185 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
2186 [
2187 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2188 ("\n".to_string(), None),
2189 ]
2190 );
2191 });
2192
2193 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
2194 // changes since the last save.
2195 buffer.update(cx, |buffer, cx| {
2196 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
2197 buffer.edit(
2198 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
2199 None,
2200 cx,
2201 );
2202 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
2203 });
2204 let change_notification_2 = fake_server
2205 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2206 .await;
2207 assert!(
2208 change_notification_2.text_document.version > change_notification_1.text_document.version
2209 );
2210
2211 // Handle out-of-order diagnostics
2212 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2213 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2214 version: Some(change_notification_2.text_document.version),
2215 diagnostics: vec![
2216 lsp::Diagnostic {
2217 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2218 severity: Some(DiagnosticSeverity::ERROR),
2219 message: "undefined variable 'BB'".to_string(),
2220 source: Some("disk".to_string()),
2221 ..Default::default()
2222 },
2223 lsp::Diagnostic {
2224 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2225 severity: Some(DiagnosticSeverity::WARNING),
2226 message: "undefined variable 'A'".to_string(),
2227 source: Some("disk".to_string()),
2228 ..Default::default()
2229 },
2230 ],
2231 });
2232
2233 cx.executor().run_until_parked();
2234 buffer.update(cx, |buffer, _| {
2235 assert_eq!(
2236 buffer
2237 .snapshot()
2238 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2239 .collect::<Vec<_>>(),
2240 &[
2241 DiagnosticEntry {
2242 range: Point::new(2, 21)..Point::new(2, 22),
2243 diagnostic: Diagnostic {
2244 source: Some("disk".into()),
2245 severity: DiagnosticSeverity::WARNING,
2246 message: "undefined variable 'A'".to_string(),
2247 is_disk_based: true,
2248 group_id: 6,
2249 is_primary: true,
2250 ..Default::default()
2251 }
2252 },
2253 DiagnosticEntry {
2254 range: Point::new(3, 9)..Point::new(3, 14),
2255 diagnostic: Diagnostic {
2256 source: Some("disk".into()),
2257 severity: DiagnosticSeverity::ERROR,
2258 message: "undefined variable 'BB'".to_string(),
2259 is_disk_based: true,
2260 group_id: 5,
2261 is_primary: true,
2262 ..Default::default()
2263 },
2264 }
2265 ]
2266 );
2267 });
2268}
2269
2270#[gpui::test]
2271async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2272 init_test(cx);
2273
2274 let text = concat!(
2275 "let one = ;\n", //
2276 "let two = \n",
2277 "let three = 3;\n",
2278 );
2279
2280 let fs = FakeFs::new(cx.executor());
2281 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2282
2283 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2284 let buffer = project
2285 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2286 .await
2287 .unwrap();
2288
2289 project.update(cx, |project, cx| {
2290 project.lsp_store.update(cx, |lsp_store, cx| {
2291 lsp_store
2292 .update_diagnostic_entries(
2293 LanguageServerId(0),
2294 PathBuf::from("/dir/a.rs"),
2295 None,
2296 vec![
2297 DiagnosticEntry {
2298 range: Unclipped(PointUtf16::new(0, 10))
2299 ..Unclipped(PointUtf16::new(0, 10)),
2300 diagnostic: Diagnostic {
2301 severity: DiagnosticSeverity::ERROR,
2302 message: "syntax error 1".to_string(),
2303 ..Default::default()
2304 },
2305 },
2306 DiagnosticEntry {
2307 range: Unclipped(PointUtf16::new(1, 10))
2308 ..Unclipped(PointUtf16::new(1, 10)),
2309 diagnostic: Diagnostic {
2310 severity: DiagnosticSeverity::ERROR,
2311 message: "syntax error 2".to_string(),
2312 ..Default::default()
2313 },
2314 },
2315 ],
2316 cx,
2317 )
2318 .unwrap();
2319 })
2320 });
2321
2322 // An empty range is extended forward to include the following character.
2323 // At the end of a line, an empty range is extended backward to include
2324 // the preceding character.
2325 buffer.update(cx, |buffer, _| {
2326 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2327 assert_eq!(
2328 chunks
2329 .iter()
2330 .map(|(s, d)| (s.as_str(), *d))
2331 .collect::<Vec<_>>(),
2332 &[
2333 ("let one = ", None),
2334 (";", Some(DiagnosticSeverity::ERROR)),
2335 ("\nlet two =", None),
2336 (" ", Some(DiagnosticSeverity::ERROR)),
2337 ("\nlet three = 3;\n", None)
2338 ]
2339 );
2340 });
2341}
2342
2343#[gpui::test]
2344async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2345 init_test(cx);
2346
2347 let fs = FakeFs::new(cx.executor());
2348 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2349 .await;
2350
2351 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2352 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2353
2354 lsp_store.update(cx, |lsp_store, cx| {
2355 lsp_store
2356 .update_diagnostic_entries(
2357 LanguageServerId(0),
2358 Path::new("/dir/a.rs").to_owned(),
2359 None,
2360 vec![DiagnosticEntry {
2361 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2362 diagnostic: Diagnostic {
2363 severity: DiagnosticSeverity::ERROR,
2364 is_primary: true,
2365 message: "syntax error a1".to_string(),
2366 ..Default::default()
2367 },
2368 }],
2369 cx,
2370 )
2371 .unwrap();
2372 lsp_store
2373 .update_diagnostic_entries(
2374 LanguageServerId(1),
2375 Path::new("/dir/a.rs").to_owned(),
2376 None,
2377 vec![DiagnosticEntry {
2378 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2379 diagnostic: Diagnostic {
2380 severity: DiagnosticSeverity::ERROR,
2381 is_primary: true,
2382 message: "syntax error b1".to_string(),
2383 ..Default::default()
2384 },
2385 }],
2386 cx,
2387 )
2388 .unwrap();
2389
2390 assert_eq!(
2391 lsp_store.diagnostic_summary(false, cx),
2392 DiagnosticSummary {
2393 error_count: 2,
2394 warning_count: 0,
2395 }
2396 );
2397 });
2398}
2399
2400#[gpui::test]
2401async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2402 init_test(cx);
2403
2404 let text = "
2405 fn a() {
2406 f1();
2407 }
2408 fn b() {
2409 f2();
2410 }
2411 fn c() {
2412 f3();
2413 }
2414 "
2415 .unindent();
2416
2417 let fs = FakeFs::new(cx.executor());
2418 fs.insert_tree(
2419 path!("/dir"),
2420 json!({
2421 "a.rs": text.clone(),
2422 }),
2423 )
2424 .await;
2425
2426 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2427 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2428
2429 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2430 language_registry.add(rust_lang());
2431 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2432
2433 let (buffer, _handle) = project
2434 .update(cx, |project, cx| {
2435 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2436 })
2437 .await
2438 .unwrap();
2439
2440 let mut fake_server = fake_servers.next().await.unwrap();
2441 let lsp_document_version = fake_server
2442 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2443 .await
2444 .text_document
2445 .version;
2446
2447 // Simulate editing the buffer after the language server computes some edits.
2448 buffer.update(cx, |buffer, cx| {
2449 buffer.edit(
2450 [(
2451 Point::new(0, 0)..Point::new(0, 0),
2452 "// above first function\n",
2453 )],
2454 None,
2455 cx,
2456 );
2457 buffer.edit(
2458 [(
2459 Point::new(2, 0)..Point::new(2, 0),
2460 " // inside first function\n",
2461 )],
2462 None,
2463 cx,
2464 );
2465 buffer.edit(
2466 [(
2467 Point::new(6, 4)..Point::new(6, 4),
2468 "// inside second function ",
2469 )],
2470 None,
2471 cx,
2472 );
2473
2474 assert_eq!(
2475 buffer.text(),
2476 "
2477 // above first function
2478 fn a() {
2479 // inside first function
2480 f1();
2481 }
2482 fn b() {
2483 // inside second function f2();
2484 }
2485 fn c() {
2486 f3();
2487 }
2488 "
2489 .unindent()
2490 );
2491 });
2492
2493 let edits = lsp_store
2494 .update(cx, |lsp_store, cx| {
2495 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2496 &buffer,
2497 vec![
2498 // replace body of first function
2499 lsp::TextEdit {
2500 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2501 new_text: "
2502 fn a() {
2503 f10();
2504 }
2505 "
2506 .unindent(),
2507 },
2508 // edit inside second function
2509 lsp::TextEdit {
2510 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2511 new_text: "00".into(),
2512 },
2513 // edit inside third function via two distinct edits
2514 lsp::TextEdit {
2515 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2516 new_text: "4000".into(),
2517 },
2518 lsp::TextEdit {
2519 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2520 new_text: "".into(),
2521 },
2522 ],
2523 LanguageServerId(0),
2524 Some(lsp_document_version),
2525 cx,
2526 )
2527 })
2528 .await
2529 .unwrap();
2530
2531 buffer.update(cx, |buffer, cx| {
2532 for (range, new_text) in edits {
2533 buffer.edit([(range, new_text)], None, cx);
2534 }
2535 assert_eq!(
2536 buffer.text(),
2537 "
2538 // above first function
2539 fn a() {
2540 // inside first function
2541 f10();
2542 }
2543 fn b() {
2544 // inside second function f200();
2545 }
2546 fn c() {
2547 f4000();
2548 }
2549 "
2550 .unindent()
2551 );
2552 });
2553}
2554
2555#[gpui::test]
2556async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2557 init_test(cx);
2558
2559 let text = "
2560 use a::b;
2561 use a::c;
2562
2563 fn f() {
2564 b();
2565 c();
2566 }
2567 "
2568 .unindent();
2569
2570 let fs = FakeFs::new(cx.executor());
2571 fs.insert_tree(
2572 path!("/dir"),
2573 json!({
2574 "a.rs": text.clone(),
2575 }),
2576 )
2577 .await;
2578
2579 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2580 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2581 let buffer = project
2582 .update(cx, |project, cx| {
2583 project.open_local_buffer(path!("/dir/a.rs"), cx)
2584 })
2585 .await
2586 .unwrap();
2587
2588 // Simulate the language server sending us a small edit in the form of a very large diff.
2589 // Rust-analyzer does this when performing a merge-imports code action.
2590 let edits = lsp_store
2591 .update(cx, |lsp_store, cx| {
2592 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2593 &buffer,
2594 [
2595 // Replace the first use statement without editing the semicolon.
2596 lsp::TextEdit {
2597 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2598 new_text: "a::{b, c}".into(),
2599 },
2600 // Reinsert the remainder of the file between the semicolon and the final
2601 // newline of the file.
2602 lsp::TextEdit {
2603 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2604 new_text: "\n\n".into(),
2605 },
2606 lsp::TextEdit {
2607 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2608 new_text: "
2609 fn f() {
2610 b();
2611 c();
2612 }"
2613 .unindent(),
2614 },
2615 // Delete everything after the first newline of the file.
2616 lsp::TextEdit {
2617 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2618 new_text: "".into(),
2619 },
2620 ],
2621 LanguageServerId(0),
2622 None,
2623 cx,
2624 )
2625 })
2626 .await
2627 .unwrap();
2628
2629 buffer.update(cx, |buffer, cx| {
2630 let edits = edits
2631 .into_iter()
2632 .map(|(range, text)| {
2633 (
2634 range.start.to_point(buffer)..range.end.to_point(buffer),
2635 text,
2636 )
2637 })
2638 .collect::<Vec<_>>();
2639
2640 assert_eq!(
2641 edits,
2642 [
2643 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2644 (Point::new(1, 0)..Point::new(2, 0), "".into())
2645 ]
2646 );
2647
2648 for (range, new_text) in edits {
2649 buffer.edit([(range, new_text)], None, cx);
2650 }
2651 assert_eq!(
2652 buffer.text(),
2653 "
2654 use a::{b, c};
2655
2656 fn f() {
2657 b();
2658 c();
2659 }
2660 "
2661 .unindent()
2662 );
2663 });
2664}
2665
2666#[gpui::test]
2667async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2668 init_test(cx);
2669
2670 let text = "
2671 use a::b;
2672 use a::c;
2673
2674 fn f() {
2675 b();
2676 c();
2677 }
2678 "
2679 .unindent();
2680
2681 let fs = FakeFs::new(cx.executor());
2682 fs.insert_tree(
2683 path!("/dir"),
2684 json!({
2685 "a.rs": text.clone(),
2686 }),
2687 )
2688 .await;
2689
2690 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2691 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2692 let buffer = project
2693 .update(cx, |project, cx| {
2694 project.open_local_buffer(path!("/dir/a.rs"), cx)
2695 })
2696 .await
2697 .unwrap();
2698
2699 // Simulate the language server sending us edits in a non-ordered fashion,
2700 // with ranges sometimes being inverted or pointing to invalid locations.
2701 let edits = lsp_store
2702 .update(cx, |lsp_store, cx| {
2703 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2704 &buffer,
2705 [
2706 lsp::TextEdit {
2707 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2708 new_text: "\n\n".into(),
2709 },
2710 lsp::TextEdit {
2711 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2712 new_text: "a::{b, c}".into(),
2713 },
2714 lsp::TextEdit {
2715 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2716 new_text: "".into(),
2717 },
2718 lsp::TextEdit {
2719 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2720 new_text: "
2721 fn f() {
2722 b();
2723 c();
2724 }"
2725 .unindent(),
2726 },
2727 ],
2728 LanguageServerId(0),
2729 None,
2730 cx,
2731 )
2732 })
2733 .await
2734 .unwrap();
2735
2736 buffer.update(cx, |buffer, cx| {
2737 let edits = edits
2738 .into_iter()
2739 .map(|(range, text)| {
2740 (
2741 range.start.to_point(buffer)..range.end.to_point(buffer),
2742 text,
2743 )
2744 })
2745 .collect::<Vec<_>>();
2746
2747 assert_eq!(
2748 edits,
2749 [
2750 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2751 (Point::new(1, 0)..Point::new(2, 0), "".into())
2752 ]
2753 );
2754
2755 for (range, new_text) in edits {
2756 buffer.edit([(range, new_text)], None, cx);
2757 }
2758 assert_eq!(
2759 buffer.text(),
2760 "
2761 use a::{b, c};
2762
2763 fn f() {
2764 b();
2765 c();
2766 }
2767 "
2768 .unindent()
2769 );
2770 });
2771}
2772
2773fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2774 buffer: &Buffer,
2775 range: Range<T>,
2776) -> Vec<(String, Option<DiagnosticSeverity>)> {
2777 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2778 for chunk in buffer.snapshot().chunks(range, true) {
2779 if chunks.last().map_or(false, |prev_chunk| {
2780 prev_chunk.1 == chunk.diagnostic_severity
2781 }) {
2782 chunks.last_mut().unwrap().0.push_str(chunk.text);
2783 } else {
2784 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2785 }
2786 }
2787 chunks
2788}
2789
2790#[gpui::test(iterations = 10)]
2791async fn test_definition(cx: &mut gpui::TestAppContext) {
2792 init_test(cx);
2793
2794 let fs = FakeFs::new(cx.executor());
2795 fs.insert_tree(
2796 path!("/dir"),
2797 json!({
2798 "a.rs": "const fn a() { A }",
2799 "b.rs": "const y: i32 = crate::a()",
2800 }),
2801 )
2802 .await;
2803
2804 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
2805
2806 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2807 language_registry.add(rust_lang());
2808 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2809
2810 let (buffer, _handle) = project
2811 .update(cx, |project, cx| {
2812 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2813 })
2814 .await
2815 .unwrap();
2816
2817 let fake_server = fake_servers.next().await.unwrap();
2818 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2819 let params = params.text_document_position_params;
2820 assert_eq!(
2821 params.text_document.uri.to_file_path().unwrap(),
2822 Path::new(path!("/dir/b.rs")),
2823 );
2824 assert_eq!(params.position, lsp::Position::new(0, 22));
2825
2826 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2827 lsp::Location::new(
2828 lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2829 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2830 ),
2831 )))
2832 });
2833 let mut definitions = project
2834 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2835 .await
2836 .unwrap();
2837
2838 // Assert no new language server started
2839 cx.executor().run_until_parked();
2840 assert!(fake_servers.try_next().is_err());
2841
2842 assert_eq!(definitions.len(), 1);
2843 let definition = definitions.pop().unwrap();
2844 cx.update(|cx| {
2845 let target_buffer = definition.target.buffer.read(cx);
2846 assert_eq!(
2847 target_buffer
2848 .file()
2849 .unwrap()
2850 .as_local()
2851 .unwrap()
2852 .abs_path(cx),
2853 Path::new(path!("/dir/a.rs")),
2854 );
2855 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2856 assert_eq!(
2857 list_worktrees(&project, cx),
2858 [
2859 (path!("/dir/a.rs").as_ref(), false),
2860 (path!("/dir/b.rs").as_ref(), true)
2861 ],
2862 );
2863
2864 drop(definition);
2865 });
2866 cx.update(|cx| {
2867 assert_eq!(
2868 list_worktrees(&project, cx),
2869 [(path!("/dir/b.rs").as_ref(), true)]
2870 );
2871 });
2872
2873 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
2874 project
2875 .read(cx)
2876 .worktrees(cx)
2877 .map(|worktree| {
2878 let worktree = worktree.read(cx);
2879 (
2880 worktree.as_local().unwrap().abs_path().as_ref(),
2881 worktree.is_visible(),
2882 )
2883 })
2884 .collect::<Vec<_>>()
2885 }
2886}
2887
2888#[gpui::test]
2889async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
2890 init_test(cx);
2891
2892 let fs = FakeFs::new(cx.executor());
2893 fs.insert_tree(
2894 path!("/dir"),
2895 json!({
2896 "a.ts": "",
2897 }),
2898 )
2899 .await;
2900
2901 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2902
2903 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2904 language_registry.add(typescript_lang());
2905 let mut fake_language_servers = language_registry.register_fake_lsp(
2906 "TypeScript",
2907 FakeLspAdapter {
2908 capabilities: lsp::ServerCapabilities {
2909 completion_provider: Some(lsp::CompletionOptions {
2910 trigger_characters: Some(vec![".".to_string()]),
2911 ..Default::default()
2912 }),
2913 ..Default::default()
2914 },
2915 ..Default::default()
2916 },
2917 );
2918
2919 let (buffer, _handle) = project
2920 .update(cx, |p, cx| {
2921 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
2922 })
2923 .await
2924 .unwrap();
2925
2926 let fake_server = fake_language_servers.next().await.unwrap();
2927
2928 // When text_edit exists, it takes precedence over insert_text and label
2929 let text = "let a = obj.fqn";
2930 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2931 let completions = project.update(cx, |project, cx| {
2932 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2933 });
2934
2935 fake_server
2936 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
2937 Ok(Some(lsp::CompletionResponse::Array(vec![
2938 lsp::CompletionItem {
2939 label: "labelText".into(),
2940 insert_text: Some("insertText".into()),
2941 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
2942 range: lsp::Range::new(
2943 lsp::Position::new(0, text.len() as u32 - 3),
2944 lsp::Position::new(0, text.len() as u32),
2945 ),
2946 new_text: "textEditText".into(),
2947 })),
2948 ..Default::default()
2949 },
2950 ])))
2951 })
2952 .next()
2953 .await;
2954
2955 let completions = completions.await.unwrap().unwrap();
2956 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2957
2958 assert_eq!(completions.len(), 1);
2959 assert_eq!(completions[0].new_text, "textEditText");
2960 assert_eq!(
2961 completions[0].old_range.to_offset(&snapshot),
2962 text.len() - 3..text.len()
2963 );
2964}
2965
2966#[gpui::test]
2967async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
2968 init_test(cx);
2969
2970 let fs = FakeFs::new(cx.executor());
2971 fs.insert_tree(
2972 path!("/dir"),
2973 json!({
2974 "a.ts": "",
2975 }),
2976 )
2977 .await;
2978
2979 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2980
2981 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2982 language_registry.add(typescript_lang());
2983 let mut fake_language_servers = language_registry.register_fake_lsp(
2984 "TypeScript",
2985 FakeLspAdapter {
2986 capabilities: lsp::ServerCapabilities {
2987 completion_provider: Some(lsp::CompletionOptions {
2988 trigger_characters: Some(vec![".".to_string()]),
2989 ..Default::default()
2990 }),
2991 ..Default::default()
2992 },
2993 ..Default::default()
2994 },
2995 );
2996
2997 let (buffer, _handle) = project
2998 .update(cx, |p, cx| {
2999 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3000 })
3001 .await
3002 .unwrap();
3003
3004 let fake_server = fake_language_servers.next().await.unwrap();
3005 let text = "let a = obj.fqn";
3006
3007 // Test 1: When text_edit is None but insert_text exists with default edit_range
3008 {
3009 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3010 let completions = project.update(cx, |project, cx| {
3011 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3012 });
3013
3014 fake_server
3015 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3016 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3017 is_incomplete: false,
3018 item_defaults: Some(lsp::CompletionListItemDefaults {
3019 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3020 lsp::Range::new(
3021 lsp::Position::new(0, text.len() as u32 - 3),
3022 lsp::Position::new(0, text.len() as u32),
3023 ),
3024 )),
3025 ..Default::default()
3026 }),
3027 items: vec![lsp::CompletionItem {
3028 label: "labelText".into(),
3029 insert_text: Some("insertText".into()),
3030 text_edit: None,
3031 ..Default::default()
3032 }],
3033 })))
3034 })
3035 .next()
3036 .await;
3037
3038 let completions = completions.await.unwrap().unwrap();
3039 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3040
3041 assert_eq!(completions.len(), 1);
3042 assert_eq!(completions[0].new_text, "insertText");
3043 assert_eq!(
3044 completions[0].old_range.to_offset(&snapshot),
3045 text.len() - 3..text.len()
3046 );
3047 }
3048
3049 // Test 2: When both text_edit and insert_text are None with default edit_range
3050 {
3051 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3052 let completions = project.update(cx, |project, cx| {
3053 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3054 });
3055
3056 fake_server
3057 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3058 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3059 is_incomplete: false,
3060 item_defaults: Some(lsp::CompletionListItemDefaults {
3061 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3062 lsp::Range::new(
3063 lsp::Position::new(0, text.len() as u32 - 3),
3064 lsp::Position::new(0, text.len() as u32),
3065 ),
3066 )),
3067 ..Default::default()
3068 }),
3069 items: vec![lsp::CompletionItem {
3070 label: "labelText".into(),
3071 insert_text: None,
3072 text_edit: None,
3073 ..Default::default()
3074 }],
3075 })))
3076 })
3077 .next()
3078 .await;
3079
3080 let completions = completions.await.unwrap().unwrap();
3081 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3082
3083 assert_eq!(completions.len(), 1);
3084 assert_eq!(completions[0].new_text, "labelText");
3085 assert_eq!(
3086 completions[0].old_range.to_offset(&snapshot),
3087 text.len() - 3..text.len()
3088 );
3089 }
3090}
3091
3092#[gpui::test]
3093async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
3094 init_test(cx);
3095
3096 let fs = FakeFs::new(cx.executor());
3097 fs.insert_tree(
3098 path!("/dir"),
3099 json!({
3100 "a.ts": "",
3101 }),
3102 )
3103 .await;
3104
3105 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3106
3107 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3108 language_registry.add(typescript_lang());
3109 let mut fake_language_servers = language_registry.register_fake_lsp(
3110 "TypeScript",
3111 FakeLspAdapter {
3112 capabilities: lsp::ServerCapabilities {
3113 completion_provider: Some(lsp::CompletionOptions {
3114 trigger_characters: Some(vec![":".to_string()]),
3115 ..Default::default()
3116 }),
3117 ..Default::default()
3118 },
3119 ..Default::default()
3120 },
3121 );
3122
3123 let (buffer, _handle) = project
3124 .update(cx, |p, cx| {
3125 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3126 })
3127 .await
3128 .unwrap();
3129
3130 let fake_server = fake_language_servers.next().await.unwrap();
3131
3132 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
3133 let text = "let a = b.fqn";
3134 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3135 let completions = project.update(cx, |project, cx| {
3136 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3137 });
3138
3139 fake_server
3140 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3141 Ok(Some(lsp::CompletionResponse::Array(vec![
3142 lsp::CompletionItem {
3143 label: "fullyQualifiedName?".into(),
3144 insert_text: Some("fullyQualifiedName".into()),
3145 ..Default::default()
3146 },
3147 ])))
3148 })
3149 .next()
3150 .await;
3151 let completions = completions.await.unwrap().unwrap();
3152 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3153 assert_eq!(completions.len(), 1);
3154 assert_eq!(completions[0].new_text, "fullyQualifiedName");
3155 assert_eq!(
3156 completions[0].old_range.to_offset(&snapshot),
3157 text.len() - 3..text.len()
3158 );
3159
3160 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
3161 let text = "let a = \"atoms/cmp\"";
3162 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3163 let completions = project.update(cx, |project, cx| {
3164 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
3165 });
3166
3167 fake_server
3168 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3169 Ok(Some(lsp::CompletionResponse::Array(vec![
3170 lsp::CompletionItem {
3171 label: "component".into(),
3172 ..Default::default()
3173 },
3174 ])))
3175 })
3176 .next()
3177 .await;
3178 let completions = completions.await.unwrap().unwrap();
3179 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3180 assert_eq!(completions.len(), 1);
3181 assert_eq!(completions[0].new_text, "component");
3182 assert_eq!(
3183 completions[0].old_range.to_offset(&snapshot),
3184 text.len() - 4..text.len() - 1
3185 );
3186}
3187
3188#[gpui::test]
3189async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
3190 init_test(cx);
3191
3192 let fs = FakeFs::new(cx.executor());
3193 fs.insert_tree(
3194 path!("/dir"),
3195 json!({
3196 "a.ts": "",
3197 }),
3198 )
3199 .await;
3200
3201 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3202
3203 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3204 language_registry.add(typescript_lang());
3205 let mut fake_language_servers = language_registry.register_fake_lsp(
3206 "TypeScript",
3207 FakeLspAdapter {
3208 capabilities: lsp::ServerCapabilities {
3209 completion_provider: Some(lsp::CompletionOptions {
3210 trigger_characters: Some(vec![":".to_string()]),
3211 ..Default::default()
3212 }),
3213 ..Default::default()
3214 },
3215 ..Default::default()
3216 },
3217 );
3218
3219 let (buffer, _handle) = project
3220 .update(cx, |p, cx| {
3221 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3222 })
3223 .await
3224 .unwrap();
3225
3226 let fake_server = fake_language_servers.next().await.unwrap();
3227
3228 let text = "let a = b.fqn";
3229 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3230 let completions = project.update(cx, |project, cx| {
3231 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3232 });
3233
3234 fake_server
3235 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3236 Ok(Some(lsp::CompletionResponse::Array(vec![
3237 lsp::CompletionItem {
3238 label: "fullyQualifiedName?".into(),
3239 insert_text: Some("fully\rQualified\r\nName".into()),
3240 ..Default::default()
3241 },
3242 ])))
3243 })
3244 .next()
3245 .await;
3246 let completions = completions.await.unwrap().unwrap();
3247 assert_eq!(completions.len(), 1);
3248 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
3249}
3250
3251#[gpui::test(iterations = 10)]
3252async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
3253 init_test(cx);
3254
3255 let fs = FakeFs::new(cx.executor());
3256 fs.insert_tree(
3257 path!("/dir"),
3258 json!({
3259 "a.ts": "a",
3260 }),
3261 )
3262 .await;
3263
3264 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3265
3266 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3267 language_registry.add(typescript_lang());
3268 let mut fake_language_servers = language_registry.register_fake_lsp(
3269 "TypeScript",
3270 FakeLspAdapter {
3271 capabilities: lsp::ServerCapabilities {
3272 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
3273 lsp::CodeActionOptions {
3274 resolve_provider: Some(true),
3275 ..lsp::CodeActionOptions::default()
3276 },
3277 )),
3278 execute_command_provider: Some(lsp::ExecuteCommandOptions {
3279 commands: vec!["_the/command".to_string()],
3280 ..lsp::ExecuteCommandOptions::default()
3281 }),
3282 ..lsp::ServerCapabilities::default()
3283 },
3284 ..FakeLspAdapter::default()
3285 },
3286 );
3287
3288 let (buffer, _handle) = project
3289 .update(cx, |p, cx| {
3290 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3291 })
3292 .await
3293 .unwrap();
3294
3295 let fake_server = fake_language_servers.next().await.unwrap();
3296
3297 // Language server returns code actions that contain commands, and not edits.
3298 let actions = project.update(cx, |project, cx| {
3299 project.code_actions(&buffer, 0..0, None, cx)
3300 });
3301 fake_server
3302 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
3303 Ok(Some(vec![
3304 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3305 title: "The code action".into(),
3306 data: Some(serde_json::json!({
3307 "command": "_the/command",
3308 })),
3309 ..lsp::CodeAction::default()
3310 }),
3311 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3312 title: "two".into(),
3313 ..lsp::CodeAction::default()
3314 }),
3315 ]))
3316 })
3317 .next()
3318 .await;
3319
3320 let action = actions.await.unwrap()[0].clone();
3321 let apply = project.update(cx, |project, cx| {
3322 project.apply_code_action(buffer.clone(), action, true, cx)
3323 });
3324
3325 // Resolving the code action does not populate its edits. In absence of
3326 // edits, we must execute the given command.
3327 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
3328 |mut action, _| async move {
3329 if action.data.is_some() {
3330 action.command = Some(lsp::Command {
3331 title: "The command".into(),
3332 command: "_the/command".into(),
3333 arguments: Some(vec![json!("the-argument")]),
3334 });
3335 }
3336 Ok(action)
3337 },
3338 );
3339
3340 // While executing the command, the language server sends the editor
3341 // a `workspaceEdit` request.
3342 fake_server
3343 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
3344 let fake = fake_server.clone();
3345 move |params, _| {
3346 assert_eq!(params.command, "_the/command");
3347 let fake = fake.clone();
3348 async move {
3349 fake.server
3350 .request::<lsp::request::ApplyWorkspaceEdit>(
3351 lsp::ApplyWorkspaceEditParams {
3352 label: None,
3353 edit: lsp::WorkspaceEdit {
3354 changes: Some(
3355 [(
3356 lsp::Url::from_file_path(path!("/dir/a.ts")).unwrap(),
3357 vec![lsp::TextEdit {
3358 range: lsp::Range::new(
3359 lsp::Position::new(0, 0),
3360 lsp::Position::new(0, 0),
3361 ),
3362 new_text: "X".into(),
3363 }],
3364 )]
3365 .into_iter()
3366 .collect(),
3367 ),
3368 ..Default::default()
3369 },
3370 },
3371 )
3372 .await
3373 .unwrap();
3374 Ok(Some(json!(null)))
3375 }
3376 }
3377 })
3378 .next()
3379 .await;
3380
3381 // Applying the code action returns a project transaction containing the edits
3382 // sent by the language server in its `workspaceEdit` request.
3383 let transaction = apply.await.unwrap();
3384 assert!(transaction.0.contains_key(&buffer));
3385 buffer.update(cx, |buffer, cx| {
3386 assert_eq!(buffer.text(), "Xa");
3387 buffer.undo(cx);
3388 assert_eq!(buffer.text(), "a");
3389 });
3390}
3391
3392#[gpui::test(iterations = 10)]
3393async fn test_save_file(cx: &mut gpui::TestAppContext) {
3394 init_test(cx);
3395
3396 let fs = FakeFs::new(cx.executor());
3397 fs.insert_tree(
3398 path!("/dir"),
3399 json!({
3400 "file1": "the old contents",
3401 }),
3402 )
3403 .await;
3404
3405 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3406 let buffer = project
3407 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3408 .await
3409 .unwrap();
3410 buffer.update(cx, |buffer, cx| {
3411 assert_eq!(buffer.text(), "the old contents");
3412 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3413 });
3414
3415 project
3416 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3417 .await
3418 .unwrap();
3419
3420 let new_text = fs
3421 .load(Path::new(path!("/dir/file1")))
3422 .await
3423 .unwrap()
3424 .replace("\r\n", "\n");
3425 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3426}
3427
3428#[gpui::test(iterations = 30)]
3429async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
3430 init_test(cx);
3431
3432 let fs = FakeFs::new(cx.executor().clone());
3433 fs.insert_tree(
3434 path!("/dir"),
3435 json!({
3436 "file1": "the original contents",
3437 }),
3438 )
3439 .await;
3440
3441 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3442 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3443 let buffer = project
3444 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3445 .await
3446 .unwrap();
3447
3448 // Simulate buffer diffs being slow, so that they don't complete before
3449 // the next file change occurs.
3450 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3451
3452 // Change the buffer's file on disk, and then wait for the file change
3453 // to be detected by the worktree, so that the buffer starts reloading.
3454 fs.save(
3455 path!("/dir/file1").as_ref(),
3456 &"the first contents".into(),
3457 Default::default(),
3458 )
3459 .await
3460 .unwrap();
3461 worktree.next_event(cx).await;
3462
3463 // Change the buffer's file again. Depending on the random seed, the
3464 // previous file change may still be in progress.
3465 fs.save(
3466 path!("/dir/file1").as_ref(),
3467 &"the second contents".into(),
3468 Default::default(),
3469 )
3470 .await
3471 .unwrap();
3472 worktree.next_event(cx).await;
3473
3474 cx.executor().run_until_parked();
3475 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3476 buffer.read_with(cx, |buffer, _| {
3477 assert_eq!(buffer.text(), on_disk_text);
3478 assert!(!buffer.is_dirty(), "buffer should not be dirty");
3479 assert!(!buffer.has_conflict(), "buffer should not be dirty");
3480 });
3481}
3482
3483#[gpui::test(iterations = 30)]
3484async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
3485 init_test(cx);
3486
3487 let fs = FakeFs::new(cx.executor().clone());
3488 fs.insert_tree(
3489 path!("/dir"),
3490 json!({
3491 "file1": "the original contents",
3492 }),
3493 )
3494 .await;
3495
3496 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3497 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3498 let buffer = project
3499 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3500 .await
3501 .unwrap();
3502
3503 // Simulate buffer diffs being slow, so that they don't complete before
3504 // the next file change occurs.
3505 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3506
3507 // Change the buffer's file on disk, and then wait for the file change
3508 // to be detected by the worktree, so that the buffer starts reloading.
3509 fs.save(
3510 path!("/dir/file1").as_ref(),
3511 &"the first contents".into(),
3512 Default::default(),
3513 )
3514 .await
3515 .unwrap();
3516 worktree.next_event(cx).await;
3517
3518 cx.executor()
3519 .spawn(cx.executor().simulate_random_delay())
3520 .await;
3521
3522 // Perform a noop edit, causing the buffer's version to increase.
3523 buffer.update(cx, |buffer, cx| {
3524 buffer.edit([(0..0, " ")], None, cx);
3525 buffer.undo(cx);
3526 });
3527
3528 cx.executor().run_until_parked();
3529 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3530 buffer.read_with(cx, |buffer, _| {
3531 let buffer_text = buffer.text();
3532 if buffer_text == on_disk_text {
3533 assert!(
3534 !buffer.is_dirty() && !buffer.has_conflict(),
3535 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
3536 );
3537 }
3538 // If the file change occurred while the buffer was processing the first
3539 // change, the buffer will be in a conflicting state.
3540 else {
3541 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3542 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3543 }
3544 });
3545}
3546
3547#[gpui::test]
3548async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
3549 init_test(cx);
3550
3551 let fs = FakeFs::new(cx.executor());
3552 fs.insert_tree(
3553 path!("/dir"),
3554 json!({
3555 "file1": "the old contents",
3556 }),
3557 )
3558 .await;
3559
3560 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
3561 let buffer = project
3562 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3563 .await
3564 .unwrap();
3565 buffer.update(cx, |buffer, cx| {
3566 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3567 });
3568
3569 project
3570 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3571 .await
3572 .unwrap();
3573
3574 let new_text = fs
3575 .load(Path::new(path!("/dir/file1")))
3576 .await
3577 .unwrap()
3578 .replace("\r\n", "\n");
3579 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3580}
3581
3582#[gpui::test]
3583async fn test_save_as(cx: &mut gpui::TestAppContext) {
3584 init_test(cx);
3585
3586 let fs = FakeFs::new(cx.executor());
3587 fs.insert_tree("/dir", json!({})).await;
3588
3589 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3590
3591 let languages = project.update(cx, |project, _| project.languages().clone());
3592 languages.add(rust_lang());
3593
3594 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
3595 buffer.update(cx, |buffer, cx| {
3596 buffer.edit([(0..0, "abc")], None, cx);
3597 assert!(buffer.is_dirty());
3598 assert!(!buffer.has_conflict());
3599 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
3600 });
3601 project
3602 .update(cx, |project, cx| {
3603 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
3604 let path = ProjectPath {
3605 worktree_id,
3606 path: Arc::from(Path::new("file1.rs")),
3607 };
3608 project.save_buffer_as(buffer.clone(), path, cx)
3609 })
3610 .await
3611 .unwrap();
3612 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
3613
3614 cx.executor().run_until_parked();
3615 buffer.update(cx, |buffer, cx| {
3616 assert_eq!(
3617 buffer.file().unwrap().full_path(cx),
3618 Path::new("dir/file1.rs")
3619 );
3620 assert!(!buffer.is_dirty());
3621 assert!(!buffer.has_conflict());
3622 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
3623 });
3624
3625 let opened_buffer = project
3626 .update(cx, |project, cx| {
3627 project.open_local_buffer("/dir/file1.rs", cx)
3628 })
3629 .await
3630 .unwrap();
3631 assert_eq!(opened_buffer, buffer);
3632}
3633
3634#[gpui::test(retries = 5)]
3635async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3636 use worktree::WorktreeModelHandle as _;
3637
3638 init_test(cx);
3639 cx.executor().allow_parking();
3640
3641 let dir = TempTree::new(json!({
3642 "a": {
3643 "file1": "",
3644 "file2": "",
3645 "file3": "",
3646 },
3647 "b": {
3648 "c": {
3649 "file4": "",
3650 "file5": "",
3651 }
3652 }
3653 }));
3654
3655 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
3656
3657 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3658 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3659 async move { buffer.await.unwrap() }
3660 };
3661 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3662 project.update(cx, |project, cx| {
3663 let tree = project.worktrees(cx).next().unwrap();
3664 tree.read(cx)
3665 .entry_for_path(path)
3666 .unwrap_or_else(|| panic!("no entry for path {}", path))
3667 .id
3668 })
3669 };
3670
3671 let buffer2 = buffer_for_path("a/file2", cx).await;
3672 let buffer3 = buffer_for_path("a/file3", cx).await;
3673 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3674 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3675
3676 let file2_id = id_for_path("a/file2", cx);
3677 let file3_id = id_for_path("a/file3", cx);
3678 let file4_id = id_for_path("b/c/file4", cx);
3679
3680 // Create a remote copy of this worktree.
3681 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3682 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3683
3684 let updates = Arc::new(Mutex::new(Vec::new()));
3685 tree.update(cx, |tree, cx| {
3686 let updates = updates.clone();
3687 tree.observe_updates(0, cx, move |update| {
3688 updates.lock().push(update);
3689 async { true }
3690 });
3691 });
3692
3693 let remote =
3694 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
3695
3696 cx.executor().run_until_parked();
3697
3698 cx.update(|cx| {
3699 assert!(!buffer2.read(cx).is_dirty());
3700 assert!(!buffer3.read(cx).is_dirty());
3701 assert!(!buffer4.read(cx).is_dirty());
3702 assert!(!buffer5.read(cx).is_dirty());
3703 });
3704
3705 // Rename and delete files and directories.
3706 tree.flush_fs_events(cx).await;
3707 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3708 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3709 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3710 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3711 tree.flush_fs_events(cx).await;
3712
3713 cx.update(|app| {
3714 assert_eq!(
3715 tree.read(app)
3716 .paths()
3717 .map(|p| p.to_str().unwrap())
3718 .collect::<Vec<_>>(),
3719 vec![
3720 "a",
3721 separator!("a/file1"),
3722 separator!("a/file2.new"),
3723 "b",
3724 "d",
3725 separator!("d/file3"),
3726 separator!("d/file4"),
3727 ]
3728 );
3729 });
3730
3731 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3732 assert_eq!(id_for_path("d/file3", cx), file3_id);
3733 assert_eq!(id_for_path("d/file4", cx), file4_id);
3734
3735 cx.update(|cx| {
3736 assert_eq!(
3737 buffer2.read(cx).file().unwrap().path().as_ref(),
3738 Path::new("a/file2.new")
3739 );
3740 assert_eq!(
3741 buffer3.read(cx).file().unwrap().path().as_ref(),
3742 Path::new("d/file3")
3743 );
3744 assert_eq!(
3745 buffer4.read(cx).file().unwrap().path().as_ref(),
3746 Path::new("d/file4")
3747 );
3748 assert_eq!(
3749 buffer5.read(cx).file().unwrap().path().as_ref(),
3750 Path::new("b/c/file5")
3751 );
3752
3753 assert_matches!(
3754 buffer2.read(cx).file().unwrap().disk_state(),
3755 DiskState::Present { .. }
3756 );
3757 assert_matches!(
3758 buffer3.read(cx).file().unwrap().disk_state(),
3759 DiskState::Present { .. }
3760 );
3761 assert_matches!(
3762 buffer4.read(cx).file().unwrap().disk_state(),
3763 DiskState::Present { .. }
3764 );
3765 assert_eq!(
3766 buffer5.read(cx).file().unwrap().disk_state(),
3767 DiskState::Deleted
3768 );
3769 });
3770
3771 // Update the remote worktree. Check that it becomes consistent with the
3772 // local worktree.
3773 cx.executor().run_until_parked();
3774
3775 remote.update(cx, |remote, _| {
3776 for update in updates.lock().drain(..) {
3777 remote.as_remote_mut().unwrap().update_from_remote(update);
3778 }
3779 });
3780 cx.executor().run_until_parked();
3781 remote.update(cx, |remote, _| {
3782 assert_eq!(
3783 remote
3784 .paths()
3785 .map(|p| p.to_str().unwrap())
3786 .collect::<Vec<_>>(),
3787 vec![
3788 "a",
3789 separator!("a/file1"),
3790 separator!("a/file2.new"),
3791 "b",
3792 "d",
3793 separator!("d/file3"),
3794 separator!("d/file4"),
3795 ]
3796 );
3797 });
3798}
3799
3800#[gpui::test(iterations = 10)]
3801async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3802 init_test(cx);
3803
3804 let fs = FakeFs::new(cx.executor());
3805 fs.insert_tree(
3806 path!("/dir"),
3807 json!({
3808 "a": {
3809 "file1": "",
3810 }
3811 }),
3812 )
3813 .await;
3814
3815 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3816 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3817 let tree_id = tree.update(cx, |tree, _| tree.id());
3818
3819 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3820 project.update(cx, |project, cx| {
3821 let tree = project.worktrees(cx).next().unwrap();
3822 tree.read(cx)
3823 .entry_for_path(path)
3824 .unwrap_or_else(|| panic!("no entry for path {}", path))
3825 .id
3826 })
3827 };
3828
3829 let dir_id = id_for_path("a", cx);
3830 let file_id = id_for_path("a/file1", cx);
3831 let buffer = project
3832 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3833 .await
3834 .unwrap();
3835 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3836
3837 project
3838 .update(cx, |project, cx| {
3839 project.rename_entry(dir_id, Path::new("b"), cx)
3840 })
3841 .unwrap()
3842 .await
3843 .to_included()
3844 .unwrap();
3845 cx.executor().run_until_parked();
3846
3847 assert_eq!(id_for_path("b", cx), dir_id);
3848 assert_eq!(id_for_path("b/file1", cx), file_id);
3849 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3850}
3851
3852#[gpui::test]
3853async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3854 init_test(cx);
3855
3856 let fs = FakeFs::new(cx.executor());
3857 fs.insert_tree(
3858 "/dir",
3859 json!({
3860 "a.txt": "a-contents",
3861 "b.txt": "b-contents",
3862 }),
3863 )
3864 .await;
3865
3866 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3867
3868 // Spawn multiple tasks to open paths, repeating some paths.
3869 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3870 (
3871 p.open_local_buffer("/dir/a.txt", cx),
3872 p.open_local_buffer("/dir/b.txt", cx),
3873 p.open_local_buffer("/dir/a.txt", cx),
3874 )
3875 });
3876
3877 let buffer_a_1 = buffer_a_1.await.unwrap();
3878 let buffer_a_2 = buffer_a_2.await.unwrap();
3879 let buffer_b = buffer_b.await.unwrap();
3880 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3881 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3882
3883 // There is only one buffer per path.
3884 let buffer_a_id = buffer_a_1.entity_id();
3885 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3886
3887 // Open the same path again while it is still open.
3888 drop(buffer_a_1);
3889 let buffer_a_3 = project
3890 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3891 .await
3892 .unwrap();
3893
3894 // There's still only one buffer per path.
3895 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3896}
3897
3898#[gpui::test]
3899async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3900 init_test(cx);
3901
3902 let fs = FakeFs::new(cx.executor());
3903 fs.insert_tree(
3904 path!("/dir"),
3905 json!({
3906 "file1": "abc",
3907 "file2": "def",
3908 "file3": "ghi",
3909 }),
3910 )
3911 .await;
3912
3913 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3914
3915 let buffer1 = project
3916 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3917 .await
3918 .unwrap();
3919 let events = Arc::new(Mutex::new(Vec::new()));
3920
3921 // initially, the buffer isn't dirty.
3922 buffer1.update(cx, |buffer, cx| {
3923 cx.subscribe(&buffer1, {
3924 let events = events.clone();
3925 move |_, _, event, _| match event {
3926 BufferEvent::Operation { .. } => {}
3927 _ => events.lock().push(event.clone()),
3928 }
3929 })
3930 .detach();
3931
3932 assert!(!buffer.is_dirty());
3933 assert!(events.lock().is_empty());
3934
3935 buffer.edit([(1..2, "")], None, cx);
3936 });
3937
3938 // after the first edit, the buffer is dirty, and emits a dirtied event.
3939 buffer1.update(cx, |buffer, cx| {
3940 assert!(buffer.text() == "ac");
3941 assert!(buffer.is_dirty());
3942 assert_eq!(
3943 *events.lock(),
3944 &[
3945 language::BufferEvent::Edited,
3946 language::BufferEvent::DirtyChanged
3947 ]
3948 );
3949 events.lock().clear();
3950 buffer.did_save(
3951 buffer.version(),
3952 buffer.file().unwrap().disk_state().mtime(),
3953 cx,
3954 );
3955 });
3956
3957 // after saving, the buffer is not dirty, and emits a saved event.
3958 buffer1.update(cx, |buffer, cx| {
3959 assert!(!buffer.is_dirty());
3960 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
3961 events.lock().clear();
3962
3963 buffer.edit([(1..1, "B")], None, cx);
3964 buffer.edit([(2..2, "D")], None, cx);
3965 });
3966
3967 // after editing again, the buffer is dirty, and emits another dirty event.
3968 buffer1.update(cx, |buffer, cx| {
3969 assert!(buffer.text() == "aBDc");
3970 assert!(buffer.is_dirty());
3971 assert_eq!(
3972 *events.lock(),
3973 &[
3974 language::BufferEvent::Edited,
3975 language::BufferEvent::DirtyChanged,
3976 language::BufferEvent::Edited,
3977 ],
3978 );
3979 events.lock().clear();
3980
3981 // After restoring the buffer to its previously-saved state,
3982 // the buffer is not considered dirty anymore.
3983 buffer.edit([(1..3, "")], None, cx);
3984 assert!(buffer.text() == "ac");
3985 assert!(!buffer.is_dirty());
3986 });
3987
3988 assert_eq!(
3989 *events.lock(),
3990 &[
3991 language::BufferEvent::Edited,
3992 language::BufferEvent::DirtyChanged
3993 ]
3994 );
3995
3996 // When a file is deleted, it is not considered dirty.
3997 let events = Arc::new(Mutex::new(Vec::new()));
3998 let buffer2 = project
3999 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4000 .await
4001 .unwrap();
4002 buffer2.update(cx, |_, cx| {
4003 cx.subscribe(&buffer2, {
4004 let events = events.clone();
4005 move |_, _, event, _| match event {
4006 BufferEvent::Operation { .. } => {}
4007 _ => events.lock().push(event.clone()),
4008 }
4009 })
4010 .detach();
4011 });
4012
4013 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
4014 .await
4015 .unwrap();
4016 cx.executor().run_until_parked();
4017 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4018 assert_eq!(
4019 mem::take(&mut *events.lock()),
4020 &[language::BufferEvent::FileHandleChanged]
4021 );
4022
4023 // Buffer becomes dirty when edited.
4024 buffer2.update(cx, |buffer, cx| {
4025 buffer.edit([(2..3, "")], None, cx);
4026 assert_eq!(buffer.is_dirty(), true);
4027 });
4028 assert_eq!(
4029 mem::take(&mut *events.lock()),
4030 &[
4031 language::BufferEvent::Edited,
4032 language::BufferEvent::DirtyChanged
4033 ]
4034 );
4035
4036 // Buffer becomes clean again when all of its content is removed, because
4037 // the file was deleted.
4038 buffer2.update(cx, |buffer, cx| {
4039 buffer.edit([(0..2, "")], None, cx);
4040 assert_eq!(buffer.is_empty(), true);
4041 assert_eq!(buffer.is_dirty(), false);
4042 });
4043 assert_eq!(
4044 *events.lock(),
4045 &[
4046 language::BufferEvent::Edited,
4047 language::BufferEvent::DirtyChanged
4048 ]
4049 );
4050
4051 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4052 let events = Arc::new(Mutex::new(Vec::new()));
4053 let buffer3 = project
4054 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
4055 .await
4056 .unwrap();
4057 buffer3.update(cx, |_, cx| {
4058 cx.subscribe(&buffer3, {
4059 let events = events.clone();
4060 move |_, _, event, _| match event {
4061 BufferEvent::Operation { .. } => {}
4062 _ => events.lock().push(event.clone()),
4063 }
4064 })
4065 .detach();
4066 });
4067
4068 buffer3.update(cx, |buffer, cx| {
4069 buffer.edit([(0..0, "x")], None, cx);
4070 });
4071 events.lock().clear();
4072 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
4073 .await
4074 .unwrap();
4075 cx.executor().run_until_parked();
4076 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
4077 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
4078}
4079
4080#[gpui::test]
4081async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
4082 init_test(cx);
4083
4084 let (initial_contents, initial_offsets) =
4085 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
4086 let fs = FakeFs::new(cx.executor());
4087 fs.insert_tree(
4088 path!("/dir"),
4089 json!({
4090 "the-file": initial_contents,
4091 }),
4092 )
4093 .await;
4094 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4095 let buffer = project
4096 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
4097 .await
4098 .unwrap();
4099
4100 let anchors = initial_offsets
4101 .iter()
4102 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
4103 .collect::<Vec<_>>();
4104
4105 // Change the file on disk, adding two new lines of text, and removing
4106 // one line.
4107 buffer.update(cx, |buffer, _| {
4108 assert!(!buffer.is_dirty());
4109 assert!(!buffer.has_conflict());
4110 });
4111
4112 let (new_contents, new_offsets) =
4113 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
4114 fs.save(
4115 path!("/dir/the-file").as_ref(),
4116 &new_contents.as_str().into(),
4117 LineEnding::Unix,
4118 )
4119 .await
4120 .unwrap();
4121
4122 // Because the buffer was not modified, it is reloaded from disk. Its
4123 // contents are edited according to the diff between the old and new
4124 // file contents.
4125 cx.executor().run_until_parked();
4126 buffer.update(cx, |buffer, _| {
4127 assert_eq!(buffer.text(), new_contents);
4128 assert!(!buffer.is_dirty());
4129 assert!(!buffer.has_conflict());
4130
4131 let anchor_offsets = anchors
4132 .iter()
4133 .map(|anchor| anchor.to_offset(&*buffer))
4134 .collect::<Vec<_>>();
4135 assert_eq!(anchor_offsets, new_offsets);
4136 });
4137
4138 // Modify the buffer
4139 buffer.update(cx, |buffer, cx| {
4140 buffer.edit([(0..0, " ")], None, cx);
4141 assert!(buffer.is_dirty());
4142 assert!(!buffer.has_conflict());
4143 });
4144
4145 // Change the file on disk again, adding blank lines to the beginning.
4146 fs.save(
4147 path!("/dir/the-file").as_ref(),
4148 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
4149 LineEnding::Unix,
4150 )
4151 .await
4152 .unwrap();
4153
4154 // Because the buffer is modified, it doesn't reload from disk, but is
4155 // marked as having a conflict.
4156 cx.executor().run_until_parked();
4157 buffer.update(cx, |buffer, _| {
4158 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
4159 assert!(buffer.has_conflict());
4160 });
4161}
4162
4163#[gpui::test]
4164async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
4165 init_test(cx);
4166
4167 let fs = FakeFs::new(cx.executor());
4168 fs.insert_tree(
4169 path!("/dir"),
4170 json!({
4171 "file1": "a\nb\nc\n",
4172 "file2": "one\r\ntwo\r\nthree\r\n",
4173 }),
4174 )
4175 .await;
4176
4177 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4178 let buffer1 = project
4179 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4180 .await
4181 .unwrap();
4182 let buffer2 = project
4183 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4184 .await
4185 .unwrap();
4186
4187 buffer1.update(cx, |buffer, _| {
4188 assert_eq!(buffer.text(), "a\nb\nc\n");
4189 assert_eq!(buffer.line_ending(), LineEnding::Unix);
4190 });
4191 buffer2.update(cx, |buffer, _| {
4192 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
4193 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4194 });
4195
4196 // Change a file's line endings on disk from unix to windows. The buffer's
4197 // state updates correctly.
4198 fs.save(
4199 path!("/dir/file1").as_ref(),
4200 &"aaa\nb\nc\n".into(),
4201 LineEnding::Windows,
4202 )
4203 .await
4204 .unwrap();
4205 cx.executor().run_until_parked();
4206 buffer1.update(cx, |buffer, _| {
4207 assert_eq!(buffer.text(), "aaa\nb\nc\n");
4208 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4209 });
4210
4211 // Save a file with windows line endings. The file is written correctly.
4212 buffer2.update(cx, |buffer, cx| {
4213 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
4214 });
4215 project
4216 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
4217 .await
4218 .unwrap();
4219 assert_eq!(
4220 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
4221 "one\r\ntwo\r\nthree\r\nfour\r\n",
4222 );
4223}
4224
4225#[gpui::test]
4226async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
4227 init_test(cx);
4228
4229 let fs = FakeFs::new(cx.executor());
4230 fs.insert_tree(
4231 path!("/dir"),
4232 json!({
4233 "a.rs": "
4234 fn foo(mut v: Vec<usize>) {
4235 for x in &v {
4236 v.push(1);
4237 }
4238 }
4239 "
4240 .unindent(),
4241 }),
4242 )
4243 .await;
4244
4245 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4246 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4247 let buffer = project
4248 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
4249 .await
4250 .unwrap();
4251
4252 let buffer_uri = Url::from_file_path(path!("/dir/a.rs")).unwrap();
4253 let message = lsp::PublishDiagnosticsParams {
4254 uri: buffer_uri.clone(),
4255 diagnostics: vec![
4256 lsp::Diagnostic {
4257 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4258 severity: Some(DiagnosticSeverity::WARNING),
4259 message: "error 1".to_string(),
4260 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4261 location: lsp::Location {
4262 uri: buffer_uri.clone(),
4263 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4264 },
4265 message: "error 1 hint 1".to_string(),
4266 }]),
4267 ..Default::default()
4268 },
4269 lsp::Diagnostic {
4270 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4271 severity: Some(DiagnosticSeverity::HINT),
4272 message: "error 1 hint 1".to_string(),
4273 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4274 location: lsp::Location {
4275 uri: buffer_uri.clone(),
4276 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4277 },
4278 message: "original diagnostic".to_string(),
4279 }]),
4280 ..Default::default()
4281 },
4282 lsp::Diagnostic {
4283 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4284 severity: Some(DiagnosticSeverity::ERROR),
4285 message: "error 2".to_string(),
4286 related_information: Some(vec![
4287 lsp::DiagnosticRelatedInformation {
4288 location: lsp::Location {
4289 uri: buffer_uri.clone(),
4290 range: lsp::Range::new(
4291 lsp::Position::new(1, 13),
4292 lsp::Position::new(1, 15),
4293 ),
4294 },
4295 message: "error 2 hint 1".to_string(),
4296 },
4297 lsp::DiagnosticRelatedInformation {
4298 location: lsp::Location {
4299 uri: buffer_uri.clone(),
4300 range: lsp::Range::new(
4301 lsp::Position::new(1, 13),
4302 lsp::Position::new(1, 15),
4303 ),
4304 },
4305 message: "error 2 hint 2".to_string(),
4306 },
4307 ]),
4308 ..Default::default()
4309 },
4310 lsp::Diagnostic {
4311 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4312 severity: Some(DiagnosticSeverity::HINT),
4313 message: "error 2 hint 1".to_string(),
4314 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4315 location: lsp::Location {
4316 uri: buffer_uri.clone(),
4317 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4318 },
4319 message: "original diagnostic".to_string(),
4320 }]),
4321 ..Default::default()
4322 },
4323 lsp::Diagnostic {
4324 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4325 severity: Some(DiagnosticSeverity::HINT),
4326 message: "error 2 hint 2".to_string(),
4327 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4328 location: lsp::Location {
4329 uri: buffer_uri,
4330 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4331 },
4332 message: "original diagnostic".to_string(),
4333 }]),
4334 ..Default::default()
4335 },
4336 ],
4337 version: None,
4338 };
4339
4340 lsp_store
4341 .update(cx, |lsp_store, cx| {
4342 lsp_store.update_diagnostics(LanguageServerId(0), message, &[], cx)
4343 })
4344 .unwrap();
4345 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
4346
4347 assert_eq!(
4348 buffer
4349 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
4350 .collect::<Vec<_>>(),
4351 &[
4352 DiagnosticEntry {
4353 range: Point::new(1, 8)..Point::new(1, 9),
4354 diagnostic: Diagnostic {
4355 severity: DiagnosticSeverity::WARNING,
4356 message: "error 1".to_string(),
4357 group_id: 1,
4358 is_primary: true,
4359 ..Default::default()
4360 }
4361 },
4362 DiagnosticEntry {
4363 range: Point::new(1, 8)..Point::new(1, 9),
4364 diagnostic: Diagnostic {
4365 severity: DiagnosticSeverity::HINT,
4366 message: "error 1 hint 1".to_string(),
4367 group_id: 1,
4368 is_primary: false,
4369 ..Default::default()
4370 }
4371 },
4372 DiagnosticEntry {
4373 range: Point::new(1, 13)..Point::new(1, 15),
4374 diagnostic: Diagnostic {
4375 severity: DiagnosticSeverity::HINT,
4376 message: "error 2 hint 1".to_string(),
4377 group_id: 0,
4378 is_primary: false,
4379 ..Default::default()
4380 }
4381 },
4382 DiagnosticEntry {
4383 range: Point::new(1, 13)..Point::new(1, 15),
4384 diagnostic: Diagnostic {
4385 severity: DiagnosticSeverity::HINT,
4386 message: "error 2 hint 2".to_string(),
4387 group_id: 0,
4388 is_primary: false,
4389 ..Default::default()
4390 }
4391 },
4392 DiagnosticEntry {
4393 range: Point::new(2, 8)..Point::new(2, 17),
4394 diagnostic: Diagnostic {
4395 severity: DiagnosticSeverity::ERROR,
4396 message: "error 2".to_string(),
4397 group_id: 0,
4398 is_primary: true,
4399 ..Default::default()
4400 }
4401 }
4402 ]
4403 );
4404
4405 assert_eq!(
4406 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4407 &[
4408 DiagnosticEntry {
4409 range: Point::new(1, 13)..Point::new(1, 15),
4410 diagnostic: Diagnostic {
4411 severity: DiagnosticSeverity::HINT,
4412 message: "error 2 hint 1".to_string(),
4413 group_id: 0,
4414 is_primary: false,
4415 ..Default::default()
4416 }
4417 },
4418 DiagnosticEntry {
4419 range: Point::new(1, 13)..Point::new(1, 15),
4420 diagnostic: Diagnostic {
4421 severity: DiagnosticSeverity::HINT,
4422 message: "error 2 hint 2".to_string(),
4423 group_id: 0,
4424 is_primary: false,
4425 ..Default::default()
4426 }
4427 },
4428 DiagnosticEntry {
4429 range: Point::new(2, 8)..Point::new(2, 17),
4430 diagnostic: Diagnostic {
4431 severity: DiagnosticSeverity::ERROR,
4432 message: "error 2".to_string(),
4433 group_id: 0,
4434 is_primary: true,
4435 ..Default::default()
4436 }
4437 }
4438 ]
4439 );
4440
4441 assert_eq!(
4442 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4443 &[
4444 DiagnosticEntry {
4445 range: Point::new(1, 8)..Point::new(1, 9),
4446 diagnostic: Diagnostic {
4447 severity: DiagnosticSeverity::WARNING,
4448 message: "error 1".to_string(),
4449 group_id: 1,
4450 is_primary: true,
4451 ..Default::default()
4452 }
4453 },
4454 DiagnosticEntry {
4455 range: Point::new(1, 8)..Point::new(1, 9),
4456 diagnostic: Diagnostic {
4457 severity: DiagnosticSeverity::HINT,
4458 message: "error 1 hint 1".to_string(),
4459 group_id: 1,
4460 is_primary: false,
4461 ..Default::default()
4462 }
4463 },
4464 ]
4465 );
4466}
4467
4468#[gpui::test]
4469async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
4470 init_test(cx);
4471
4472 let fs = FakeFs::new(cx.executor());
4473 fs.insert_tree(
4474 path!("/dir"),
4475 json!({
4476 "one.rs": "const ONE: usize = 1;",
4477 "two": {
4478 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4479 }
4480
4481 }),
4482 )
4483 .await;
4484 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4485
4486 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4487 language_registry.add(rust_lang());
4488 let watched_paths = lsp::FileOperationRegistrationOptions {
4489 filters: vec![
4490 FileOperationFilter {
4491 scheme: Some("file".to_owned()),
4492 pattern: lsp::FileOperationPattern {
4493 glob: "**/*.rs".to_owned(),
4494 matches: Some(lsp::FileOperationPatternKind::File),
4495 options: None,
4496 },
4497 },
4498 FileOperationFilter {
4499 scheme: Some("file".to_owned()),
4500 pattern: lsp::FileOperationPattern {
4501 glob: "**/**".to_owned(),
4502 matches: Some(lsp::FileOperationPatternKind::Folder),
4503 options: None,
4504 },
4505 },
4506 ],
4507 };
4508 let mut fake_servers = language_registry.register_fake_lsp(
4509 "Rust",
4510 FakeLspAdapter {
4511 capabilities: lsp::ServerCapabilities {
4512 workspace: Some(lsp::WorkspaceServerCapabilities {
4513 workspace_folders: None,
4514 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
4515 did_rename: Some(watched_paths.clone()),
4516 will_rename: Some(watched_paths),
4517 ..Default::default()
4518 }),
4519 }),
4520 ..Default::default()
4521 },
4522 ..Default::default()
4523 },
4524 );
4525
4526 let _ = project
4527 .update(cx, |project, cx| {
4528 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4529 })
4530 .await
4531 .unwrap();
4532
4533 let fake_server = fake_servers.next().await.unwrap();
4534 let response = project.update(cx, |project, cx| {
4535 let worktree = project.worktrees(cx).next().unwrap();
4536 let entry = worktree.read(cx).entry_for_path("one.rs").unwrap();
4537 project.rename_entry(entry.id, "three.rs".as_ref(), cx)
4538 });
4539 let expected_edit = lsp::WorkspaceEdit {
4540 changes: None,
4541 document_changes: Some(DocumentChanges::Edits({
4542 vec![TextDocumentEdit {
4543 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
4544 range: lsp::Range {
4545 start: lsp::Position {
4546 line: 0,
4547 character: 1,
4548 },
4549 end: lsp::Position {
4550 line: 0,
4551 character: 3,
4552 },
4553 },
4554 new_text: "This is not a drill".to_owned(),
4555 })],
4556 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
4557 uri: Url::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
4558 version: Some(1337),
4559 },
4560 }]
4561 })),
4562 change_annotations: None,
4563 };
4564 let resolved_workspace_edit = Arc::new(OnceLock::new());
4565 fake_server
4566 .set_request_handler::<WillRenameFiles, _, _>({
4567 let resolved_workspace_edit = resolved_workspace_edit.clone();
4568 let expected_edit = expected_edit.clone();
4569 move |params, _| {
4570 let resolved_workspace_edit = resolved_workspace_edit.clone();
4571 let expected_edit = expected_edit.clone();
4572 async move {
4573 assert_eq!(params.files.len(), 1);
4574 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4575 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4576 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
4577 Ok(Some(expected_edit))
4578 }
4579 }
4580 })
4581 .next()
4582 .await
4583 .unwrap();
4584 let _ = response.await.unwrap();
4585 fake_server
4586 .handle_notification::<DidRenameFiles, _>(|params, _| {
4587 assert_eq!(params.files.len(), 1);
4588 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4589 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4590 })
4591 .next()
4592 .await
4593 .unwrap();
4594 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
4595}
4596
4597#[gpui::test]
4598async fn test_rename(cx: &mut gpui::TestAppContext) {
4599 // hi
4600 init_test(cx);
4601
4602 let fs = FakeFs::new(cx.executor());
4603 fs.insert_tree(
4604 path!("/dir"),
4605 json!({
4606 "one.rs": "const ONE: usize = 1;",
4607 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4608 }),
4609 )
4610 .await;
4611
4612 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4613
4614 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4615 language_registry.add(rust_lang());
4616 let mut fake_servers = language_registry.register_fake_lsp(
4617 "Rust",
4618 FakeLspAdapter {
4619 capabilities: lsp::ServerCapabilities {
4620 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
4621 prepare_provider: Some(true),
4622 work_done_progress_options: Default::default(),
4623 })),
4624 ..Default::default()
4625 },
4626 ..Default::default()
4627 },
4628 );
4629
4630 let (buffer, _handle) = project
4631 .update(cx, |project, cx| {
4632 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4633 })
4634 .await
4635 .unwrap();
4636
4637 let fake_server = fake_servers.next().await.unwrap();
4638
4639 let response = project.update(cx, |project, cx| {
4640 project.prepare_rename(buffer.clone(), 7, cx)
4641 });
4642 fake_server
4643 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
4644 assert_eq!(
4645 params.text_document.uri.as_str(),
4646 uri!("file:///dir/one.rs")
4647 );
4648 assert_eq!(params.position, lsp::Position::new(0, 7));
4649 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4650 lsp::Position::new(0, 6),
4651 lsp::Position::new(0, 9),
4652 ))))
4653 })
4654 .next()
4655 .await
4656 .unwrap();
4657 let response = response.await.unwrap();
4658 let PrepareRenameResponse::Success(range) = response else {
4659 panic!("{:?}", response);
4660 };
4661 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
4662 assert_eq!(range, 6..9);
4663
4664 let response = project.update(cx, |project, cx| {
4665 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
4666 });
4667 fake_server
4668 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
4669 assert_eq!(
4670 params.text_document_position.text_document.uri.as_str(),
4671 uri!("file:///dir/one.rs")
4672 );
4673 assert_eq!(
4674 params.text_document_position.position,
4675 lsp::Position::new(0, 7)
4676 );
4677 assert_eq!(params.new_name, "THREE");
4678 Ok(Some(lsp::WorkspaceEdit {
4679 changes: Some(
4680 [
4681 (
4682 lsp::Url::from_file_path(path!("/dir/one.rs")).unwrap(),
4683 vec![lsp::TextEdit::new(
4684 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
4685 "THREE".to_string(),
4686 )],
4687 ),
4688 (
4689 lsp::Url::from_file_path(path!("/dir/two.rs")).unwrap(),
4690 vec![
4691 lsp::TextEdit::new(
4692 lsp::Range::new(
4693 lsp::Position::new(0, 24),
4694 lsp::Position::new(0, 27),
4695 ),
4696 "THREE".to_string(),
4697 ),
4698 lsp::TextEdit::new(
4699 lsp::Range::new(
4700 lsp::Position::new(0, 35),
4701 lsp::Position::new(0, 38),
4702 ),
4703 "THREE".to_string(),
4704 ),
4705 ],
4706 ),
4707 ]
4708 .into_iter()
4709 .collect(),
4710 ),
4711 ..Default::default()
4712 }))
4713 })
4714 .next()
4715 .await
4716 .unwrap();
4717 let mut transaction = response.await.unwrap().0;
4718 assert_eq!(transaction.len(), 2);
4719 assert_eq!(
4720 transaction
4721 .remove_entry(&buffer)
4722 .unwrap()
4723 .0
4724 .update(cx, |buffer, _| buffer.text()),
4725 "const THREE: usize = 1;"
4726 );
4727 assert_eq!(
4728 transaction
4729 .into_keys()
4730 .next()
4731 .unwrap()
4732 .update(cx, |buffer, _| buffer.text()),
4733 "const TWO: usize = one::THREE + one::THREE;"
4734 );
4735}
4736
4737#[gpui::test]
4738async fn test_search(cx: &mut gpui::TestAppContext) {
4739 init_test(cx);
4740
4741 let fs = FakeFs::new(cx.executor());
4742 fs.insert_tree(
4743 path!("/dir"),
4744 json!({
4745 "one.rs": "const ONE: usize = 1;",
4746 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
4747 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
4748 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
4749 }),
4750 )
4751 .await;
4752 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4753 assert_eq!(
4754 search(
4755 &project,
4756 SearchQuery::text(
4757 "TWO",
4758 false,
4759 true,
4760 false,
4761 Default::default(),
4762 Default::default(),
4763 None
4764 )
4765 .unwrap(),
4766 cx
4767 )
4768 .await
4769 .unwrap(),
4770 HashMap::from_iter([
4771 (separator!("dir/two.rs").to_string(), vec![6..9]),
4772 (separator!("dir/three.rs").to_string(), vec![37..40])
4773 ])
4774 );
4775
4776 let buffer_4 = project
4777 .update(cx, |project, cx| {
4778 project.open_local_buffer(path!("/dir/four.rs"), cx)
4779 })
4780 .await
4781 .unwrap();
4782 buffer_4.update(cx, |buffer, cx| {
4783 let text = "two::TWO";
4784 buffer.edit([(20..28, text), (31..43, text)], None, cx);
4785 });
4786
4787 assert_eq!(
4788 search(
4789 &project,
4790 SearchQuery::text(
4791 "TWO",
4792 false,
4793 true,
4794 false,
4795 Default::default(),
4796 Default::default(),
4797 None,
4798 )
4799 .unwrap(),
4800 cx
4801 )
4802 .await
4803 .unwrap(),
4804 HashMap::from_iter([
4805 (separator!("dir/two.rs").to_string(), vec![6..9]),
4806 (separator!("dir/three.rs").to_string(), vec![37..40]),
4807 (separator!("dir/four.rs").to_string(), vec![25..28, 36..39])
4808 ])
4809 );
4810}
4811
4812#[gpui::test]
4813async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
4814 init_test(cx);
4815
4816 let search_query = "file";
4817
4818 let fs = FakeFs::new(cx.executor());
4819 fs.insert_tree(
4820 path!("/dir"),
4821 json!({
4822 "one.rs": r#"// Rust file one"#,
4823 "one.ts": r#"// TypeScript file one"#,
4824 "two.rs": r#"// Rust file two"#,
4825 "two.ts": r#"// TypeScript file two"#,
4826 }),
4827 )
4828 .await;
4829 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4830
4831 assert!(
4832 search(
4833 &project,
4834 SearchQuery::text(
4835 search_query,
4836 false,
4837 true,
4838 false,
4839 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4840 Default::default(),
4841 None
4842 )
4843 .unwrap(),
4844 cx
4845 )
4846 .await
4847 .unwrap()
4848 .is_empty(),
4849 "If no inclusions match, no files should be returned"
4850 );
4851
4852 assert_eq!(
4853 search(
4854 &project,
4855 SearchQuery::text(
4856 search_query,
4857 false,
4858 true,
4859 false,
4860 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4861 Default::default(),
4862 None
4863 )
4864 .unwrap(),
4865 cx
4866 )
4867 .await
4868 .unwrap(),
4869 HashMap::from_iter([
4870 (separator!("dir/one.rs").to_string(), vec![8..12]),
4871 (separator!("dir/two.rs").to_string(), vec![8..12]),
4872 ]),
4873 "Rust only search should give only Rust files"
4874 );
4875
4876 assert_eq!(
4877 search(
4878 &project,
4879 SearchQuery::text(
4880 search_query,
4881 false,
4882 true,
4883 false,
4884 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4885 Default::default(),
4886 None,
4887 )
4888 .unwrap(),
4889 cx
4890 )
4891 .await
4892 .unwrap(),
4893 HashMap::from_iter([
4894 (separator!("dir/one.ts").to_string(), vec![14..18]),
4895 (separator!("dir/two.ts").to_string(), vec![14..18]),
4896 ]),
4897 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
4898 );
4899
4900 assert_eq!(
4901 search(
4902 &project,
4903 SearchQuery::text(
4904 search_query,
4905 false,
4906 true,
4907 false,
4908 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
4909 .unwrap(),
4910 Default::default(),
4911 None,
4912 )
4913 .unwrap(),
4914 cx
4915 )
4916 .await
4917 .unwrap(),
4918 HashMap::from_iter([
4919 (separator!("dir/two.ts").to_string(), vec![14..18]),
4920 (separator!("dir/one.rs").to_string(), vec![8..12]),
4921 (separator!("dir/one.ts").to_string(), vec![14..18]),
4922 (separator!("dir/two.rs").to_string(), vec![8..12]),
4923 ]),
4924 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4925 );
4926}
4927
4928#[gpui::test]
4929async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4930 init_test(cx);
4931
4932 let search_query = "file";
4933
4934 let fs = FakeFs::new(cx.executor());
4935 fs.insert_tree(
4936 path!("/dir"),
4937 json!({
4938 "one.rs": r#"// Rust file one"#,
4939 "one.ts": r#"// TypeScript file one"#,
4940 "two.rs": r#"// Rust file two"#,
4941 "two.ts": r#"// TypeScript file two"#,
4942 }),
4943 )
4944 .await;
4945 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4946
4947 assert_eq!(
4948 search(
4949 &project,
4950 SearchQuery::text(
4951 search_query,
4952 false,
4953 true,
4954 false,
4955 Default::default(),
4956 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4957 None,
4958 )
4959 .unwrap(),
4960 cx
4961 )
4962 .await
4963 .unwrap(),
4964 HashMap::from_iter([
4965 (separator!("dir/one.rs").to_string(), vec![8..12]),
4966 (separator!("dir/one.ts").to_string(), vec![14..18]),
4967 (separator!("dir/two.rs").to_string(), vec![8..12]),
4968 (separator!("dir/two.ts").to_string(), vec![14..18]),
4969 ]),
4970 "If no exclusions match, all files should be returned"
4971 );
4972
4973 assert_eq!(
4974 search(
4975 &project,
4976 SearchQuery::text(
4977 search_query,
4978 false,
4979 true,
4980 false,
4981 Default::default(),
4982 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4983 None,
4984 )
4985 .unwrap(),
4986 cx
4987 )
4988 .await
4989 .unwrap(),
4990 HashMap::from_iter([
4991 (separator!("dir/one.ts").to_string(), vec![14..18]),
4992 (separator!("dir/two.ts").to_string(), vec![14..18]),
4993 ]),
4994 "Rust exclusion search should give only TypeScript files"
4995 );
4996
4997 assert_eq!(
4998 search(
4999 &project,
5000 SearchQuery::text(
5001 search_query,
5002 false,
5003 true,
5004 false,
5005 Default::default(),
5006 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5007 None,
5008 )
5009 .unwrap(),
5010 cx
5011 )
5012 .await
5013 .unwrap(),
5014 HashMap::from_iter([
5015 (separator!("dir/one.rs").to_string(), vec![8..12]),
5016 (separator!("dir/two.rs").to_string(), vec![8..12]),
5017 ]),
5018 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5019 );
5020
5021 assert!(
5022 search(
5023 &project,
5024 SearchQuery::text(
5025 search_query,
5026 false,
5027 true,
5028 false,
5029 Default::default(),
5030 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
5031 .unwrap(),
5032 None,
5033 )
5034 .unwrap(),
5035 cx
5036 )
5037 .await
5038 .unwrap()
5039 .is_empty(),
5040 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5041 );
5042}
5043
5044#[gpui::test]
5045async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
5046 init_test(cx);
5047
5048 let search_query = "file";
5049
5050 let fs = FakeFs::new(cx.executor());
5051 fs.insert_tree(
5052 path!("/dir"),
5053 json!({
5054 "one.rs": r#"// Rust file one"#,
5055 "one.ts": r#"// TypeScript file one"#,
5056 "two.rs": r#"// Rust file two"#,
5057 "two.ts": r#"// TypeScript file two"#,
5058 }),
5059 )
5060 .await;
5061 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5062
5063 assert!(
5064 search(
5065 &project,
5066 SearchQuery::text(
5067 search_query,
5068 false,
5069 true,
5070 false,
5071 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5072 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5073 None,
5074 )
5075 .unwrap(),
5076 cx
5077 )
5078 .await
5079 .unwrap()
5080 .is_empty(),
5081 "If both no exclusions and inclusions match, exclusions should win and return nothing"
5082 );
5083
5084 assert!(
5085 search(
5086 &project,
5087 SearchQuery::text(
5088 search_query,
5089 false,
5090 true,
5091 false,
5092 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5093 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5094 None,
5095 )
5096 .unwrap(),
5097 cx
5098 )
5099 .await
5100 .unwrap()
5101 .is_empty(),
5102 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
5103 );
5104
5105 assert!(
5106 search(
5107 &project,
5108 SearchQuery::text(
5109 search_query,
5110 false,
5111 true,
5112 false,
5113 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5114 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5115 None,
5116 )
5117 .unwrap(),
5118 cx
5119 )
5120 .await
5121 .unwrap()
5122 .is_empty(),
5123 "Non-matching inclusions and exclusions should not change that."
5124 );
5125
5126 assert_eq!(
5127 search(
5128 &project,
5129 SearchQuery::text(
5130 search_query,
5131 false,
5132 true,
5133 false,
5134 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5135 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
5136 None,
5137 )
5138 .unwrap(),
5139 cx
5140 )
5141 .await
5142 .unwrap(),
5143 HashMap::from_iter([
5144 (separator!("dir/one.ts").to_string(), vec![14..18]),
5145 (separator!("dir/two.ts").to_string(), vec![14..18]),
5146 ]),
5147 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
5148 );
5149}
5150
5151#[gpui::test]
5152async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
5153 init_test(cx);
5154
5155 let fs = FakeFs::new(cx.executor());
5156 fs.insert_tree(
5157 path!("/worktree-a"),
5158 json!({
5159 "haystack.rs": r#"// NEEDLE"#,
5160 "haystack.ts": r#"// NEEDLE"#,
5161 }),
5162 )
5163 .await;
5164 fs.insert_tree(
5165 path!("/worktree-b"),
5166 json!({
5167 "haystack.rs": r#"// NEEDLE"#,
5168 "haystack.ts": r#"// NEEDLE"#,
5169 }),
5170 )
5171 .await;
5172
5173 let project = Project::test(
5174 fs.clone(),
5175 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
5176 cx,
5177 )
5178 .await;
5179
5180 assert_eq!(
5181 search(
5182 &project,
5183 SearchQuery::text(
5184 "NEEDLE",
5185 false,
5186 true,
5187 false,
5188 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
5189 Default::default(),
5190 None,
5191 )
5192 .unwrap(),
5193 cx
5194 )
5195 .await
5196 .unwrap(),
5197 HashMap::from_iter([(separator!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
5198 "should only return results from included worktree"
5199 );
5200 assert_eq!(
5201 search(
5202 &project,
5203 SearchQuery::text(
5204 "NEEDLE",
5205 false,
5206 true,
5207 false,
5208 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
5209 Default::default(),
5210 None,
5211 )
5212 .unwrap(),
5213 cx
5214 )
5215 .await
5216 .unwrap(),
5217 HashMap::from_iter([(separator!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
5218 "should only return results from included worktree"
5219 );
5220
5221 assert_eq!(
5222 search(
5223 &project,
5224 SearchQuery::text(
5225 "NEEDLE",
5226 false,
5227 true,
5228 false,
5229 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5230 Default::default(),
5231 None,
5232 )
5233 .unwrap(),
5234 cx
5235 )
5236 .await
5237 .unwrap(),
5238 HashMap::from_iter([
5239 (separator!("worktree-a/haystack.ts").to_string(), vec![3..9]),
5240 (separator!("worktree-b/haystack.ts").to_string(), vec![3..9])
5241 ]),
5242 "should return results from both worktrees"
5243 );
5244}
5245
5246#[gpui::test]
5247async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
5248 init_test(cx);
5249
5250 let fs = FakeFs::new(cx.background_executor.clone());
5251 fs.insert_tree(
5252 path!("/dir"),
5253 json!({
5254 ".git": {},
5255 ".gitignore": "**/target\n/node_modules\n",
5256 "target": {
5257 "index.txt": "index_key:index_value"
5258 },
5259 "node_modules": {
5260 "eslint": {
5261 "index.ts": "const eslint_key = 'eslint value'",
5262 "package.json": r#"{ "some_key": "some value" }"#,
5263 },
5264 "prettier": {
5265 "index.ts": "const prettier_key = 'prettier value'",
5266 "package.json": r#"{ "other_key": "other value" }"#,
5267 },
5268 },
5269 "package.json": r#"{ "main_key": "main value" }"#,
5270 }),
5271 )
5272 .await;
5273 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5274
5275 let query = "key";
5276 assert_eq!(
5277 search(
5278 &project,
5279 SearchQuery::text(
5280 query,
5281 false,
5282 false,
5283 false,
5284 Default::default(),
5285 Default::default(),
5286 None,
5287 )
5288 .unwrap(),
5289 cx
5290 )
5291 .await
5292 .unwrap(),
5293 HashMap::from_iter([(separator!("dir/package.json").to_string(), vec![8..11])]),
5294 "Only one non-ignored file should have the query"
5295 );
5296
5297 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5298 assert_eq!(
5299 search(
5300 &project,
5301 SearchQuery::text(
5302 query,
5303 false,
5304 false,
5305 true,
5306 Default::default(),
5307 Default::default(),
5308 None,
5309 )
5310 .unwrap(),
5311 cx
5312 )
5313 .await
5314 .unwrap(),
5315 HashMap::from_iter([
5316 (separator!("dir/package.json").to_string(), vec![8..11]),
5317 (separator!("dir/target/index.txt").to_string(), vec![6..9]),
5318 (
5319 separator!("dir/node_modules/prettier/package.json").to_string(),
5320 vec![9..12]
5321 ),
5322 (
5323 separator!("dir/node_modules/prettier/index.ts").to_string(),
5324 vec![15..18]
5325 ),
5326 (
5327 separator!("dir/node_modules/eslint/index.ts").to_string(),
5328 vec![13..16]
5329 ),
5330 (
5331 separator!("dir/node_modules/eslint/package.json").to_string(),
5332 vec![8..11]
5333 ),
5334 ]),
5335 "Unrestricted search with ignored directories should find every file with the query"
5336 );
5337
5338 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
5339 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
5340 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5341 assert_eq!(
5342 search(
5343 &project,
5344 SearchQuery::text(
5345 query,
5346 false,
5347 false,
5348 true,
5349 files_to_include,
5350 files_to_exclude,
5351 None,
5352 )
5353 .unwrap(),
5354 cx
5355 )
5356 .await
5357 .unwrap(),
5358 HashMap::from_iter([(
5359 separator!("dir/node_modules/prettier/package.json").to_string(),
5360 vec![9..12]
5361 )]),
5362 "With search including ignored prettier directory and excluding TS files, only one file should be found"
5363 );
5364}
5365
5366#[gpui::test]
5367async fn test_create_entry(cx: &mut gpui::TestAppContext) {
5368 init_test(cx);
5369
5370 let fs = FakeFs::new(cx.executor().clone());
5371 fs.insert_tree(
5372 "/one/two",
5373 json!({
5374 "three": {
5375 "a.txt": "",
5376 "four": {}
5377 },
5378 "c.rs": ""
5379 }),
5380 )
5381 .await;
5382
5383 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
5384 project
5385 .update(cx, |project, cx| {
5386 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5387 project.create_entry((id, "b.."), true, cx)
5388 })
5389 .await
5390 .unwrap()
5391 .to_included()
5392 .unwrap();
5393
5394 // Can't create paths outside the project
5395 let result = project
5396 .update(cx, |project, cx| {
5397 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5398 project.create_entry((id, "../../boop"), true, cx)
5399 })
5400 .await;
5401 assert!(result.is_err());
5402
5403 // Can't create paths with '..'
5404 let result = project
5405 .update(cx, |project, cx| {
5406 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5407 project.create_entry((id, "four/../beep"), true, cx)
5408 })
5409 .await;
5410 assert!(result.is_err());
5411
5412 assert_eq!(
5413 fs.paths(true),
5414 vec![
5415 PathBuf::from(path!("/")),
5416 PathBuf::from(path!("/one")),
5417 PathBuf::from(path!("/one/two")),
5418 PathBuf::from(path!("/one/two/c.rs")),
5419 PathBuf::from(path!("/one/two/three")),
5420 PathBuf::from(path!("/one/two/three/a.txt")),
5421 PathBuf::from(path!("/one/two/three/b..")),
5422 PathBuf::from(path!("/one/two/three/four")),
5423 ]
5424 );
5425
5426 // And we cannot open buffers with '..'
5427 let result = project
5428 .update(cx, |project, cx| {
5429 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5430 project.open_buffer((id, "../c.rs"), cx)
5431 })
5432 .await;
5433 assert!(result.is_err())
5434}
5435
5436#[gpui::test]
5437async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
5438 init_test(cx);
5439
5440 let fs = FakeFs::new(cx.executor());
5441 fs.insert_tree(
5442 path!("/dir"),
5443 json!({
5444 "a.tsx": "a",
5445 }),
5446 )
5447 .await;
5448
5449 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5450
5451 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5452 language_registry.add(tsx_lang());
5453 let language_server_names = [
5454 "TypeScriptServer",
5455 "TailwindServer",
5456 "ESLintServer",
5457 "NoHoverCapabilitiesServer",
5458 ];
5459 let mut language_servers = [
5460 language_registry.register_fake_lsp(
5461 "tsx",
5462 FakeLspAdapter {
5463 name: language_server_names[0],
5464 capabilities: lsp::ServerCapabilities {
5465 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5466 ..lsp::ServerCapabilities::default()
5467 },
5468 ..FakeLspAdapter::default()
5469 },
5470 ),
5471 language_registry.register_fake_lsp(
5472 "tsx",
5473 FakeLspAdapter {
5474 name: language_server_names[1],
5475 capabilities: lsp::ServerCapabilities {
5476 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5477 ..lsp::ServerCapabilities::default()
5478 },
5479 ..FakeLspAdapter::default()
5480 },
5481 ),
5482 language_registry.register_fake_lsp(
5483 "tsx",
5484 FakeLspAdapter {
5485 name: language_server_names[2],
5486 capabilities: lsp::ServerCapabilities {
5487 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5488 ..lsp::ServerCapabilities::default()
5489 },
5490 ..FakeLspAdapter::default()
5491 },
5492 ),
5493 language_registry.register_fake_lsp(
5494 "tsx",
5495 FakeLspAdapter {
5496 name: language_server_names[3],
5497 capabilities: lsp::ServerCapabilities {
5498 hover_provider: None,
5499 ..lsp::ServerCapabilities::default()
5500 },
5501 ..FakeLspAdapter::default()
5502 },
5503 ),
5504 ];
5505
5506 let (buffer, _handle) = project
5507 .update(cx, |p, cx| {
5508 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
5509 })
5510 .await
5511 .unwrap();
5512 cx.executor().run_until_parked();
5513
5514 let mut servers_with_hover_requests = HashMap::default();
5515 for i in 0..language_server_names.len() {
5516 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
5517 panic!(
5518 "Failed to get language server #{i} with name {}",
5519 &language_server_names[i]
5520 )
5521 });
5522 let new_server_name = new_server.server.name();
5523 assert!(
5524 !servers_with_hover_requests.contains_key(&new_server_name),
5525 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5526 );
5527 match new_server_name.as_ref() {
5528 "TailwindServer" | "TypeScriptServer" => {
5529 servers_with_hover_requests.insert(
5530 new_server_name.clone(),
5531 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
5532 move |_, _| {
5533 let name = new_server_name.clone();
5534 async move {
5535 Ok(Some(lsp::Hover {
5536 contents: lsp::HoverContents::Scalar(
5537 lsp::MarkedString::String(format!("{name} hover")),
5538 ),
5539 range: None,
5540 }))
5541 }
5542 },
5543 ),
5544 );
5545 }
5546 "ESLintServer" => {
5547 servers_with_hover_requests.insert(
5548 new_server_name,
5549 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
5550 |_, _| async move { Ok(None) },
5551 ),
5552 );
5553 }
5554 "NoHoverCapabilitiesServer" => {
5555 let _never_handled = new_server
5556 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
5557 panic!(
5558 "Should not call for hovers server with no corresponding capabilities"
5559 )
5560 });
5561 }
5562 unexpected => panic!("Unexpected server name: {unexpected}"),
5563 }
5564 }
5565
5566 let hover_task = project.update(cx, |project, cx| {
5567 project.hover(&buffer, Point::new(0, 0), cx)
5568 });
5569 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
5570 |mut hover_request| async move {
5571 hover_request
5572 .next()
5573 .await
5574 .expect("All hover requests should have been triggered")
5575 },
5576 ))
5577 .await;
5578 assert_eq!(
5579 vec!["TailwindServer hover", "TypeScriptServer hover"],
5580 hover_task
5581 .await
5582 .into_iter()
5583 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5584 .sorted()
5585 .collect::<Vec<_>>(),
5586 "Should receive hover responses from all related servers with hover capabilities"
5587 );
5588}
5589
5590#[gpui::test]
5591async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
5592 init_test(cx);
5593
5594 let fs = FakeFs::new(cx.executor());
5595 fs.insert_tree(
5596 path!("/dir"),
5597 json!({
5598 "a.ts": "a",
5599 }),
5600 )
5601 .await;
5602
5603 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5604
5605 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5606 language_registry.add(typescript_lang());
5607 let mut fake_language_servers = language_registry.register_fake_lsp(
5608 "TypeScript",
5609 FakeLspAdapter {
5610 capabilities: lsp::ServerCapabilities {
5611 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5612 ..lsp::ServerCapabilities::default()
5613 },
5614 ..FakeLspAdapter::default()
5615 },
5616 );
5617
5618 let (buffer, _handle) = project
5619 .update(cx, |p, cx| {
5620 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
5621 })
5622 .await
5623 .unwrap();
5624 cx.executor().run_until_parked();
5625
5626 let fake_server = fake_language_servers
5627 .next()
5628 .await
5629 .expect("failed to get the language server");
5630
5631 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
5632 move |_, _| async move {
5633 Ok(Some(lsp::Hover {
5634 contents: lsp::HoverContents::Array(vec![
5635 lsp::MarkedString::String("".to_string()),
5636 lsp::MarkedString::String(" ".to_string()),
5637 lsp::MarkedString::String("\n\n\n".to_string()),
5638 ]),
5639 range: None,
5640 }))
5641 },
5642 );
5643
5644 let hover_task = project.update(cx, |project, cx| {
5645 project.hover(&buffer, Point::new(0, 0), cx)
5646 });
5647 let () = request_handled
5648 .next()
5649 .await
5650 .expect("All hover requests should have been triggered");
5651 assert_eq!(
5652 Vec::<String>::new(),
5653 hover_task
5654 .await
5655 .into_iter()
5656 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5657 .sorted()
5658 .collect::<Vec<_>>(),
5659 "Empty hover parts should be ignored"
5660 );
5661}
5662
5663#[gpui::test]
5664async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
5665 init_test(cx);
5666
5667 let fs = FakeFs::new(cx.executor());
5668 fs.insert_tree(
5669 path!("/dir"),
5670 json!({
5671 "a.ts": "a",
5672 }),
5673 )
5674 .await;
5675
5676 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5677
5678 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5679 language_registry.add(typescript_lang());
5680 let mut fake_language_servers = language_registry.register_fake_lsp(
5681 "TypeScript",
5682 FakeLspAdapter {
5683 capabilities: lsp::ServerCapabilities {
5684 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5685 ..lsp::ServerCapabilities::default()
5686 },
5687 ..FakeLspAdapter::default()
5688 },
5689 );
5690
5691 let (buffer, _handle) = project
5692 .update(cx, |p, cx| {
5693 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
5694 })
5695 .await
5696 .unwrap();
5697 cx.executor().run_until_parked();
5698
5699 let fake_server = fake_language_servers
5700 .next()
5701 .await
5702 .expect("failed to get the language server");
5703
5704 let mut request_handled = fake_server
5705 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
5706 Ok(Some(vec![
5707 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5708 title: "organize imports".to_string(),
5709 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
5710 ..lsp::CodeAction::default()
5711 }),
5712 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5713 title: "fix code".to_string(),
5714 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
5715 ..lsp::CodeAction::default()
5716 }),
5717 ]))
5718 });
5719
5720 let code_actions_task = project.update(cx, |project, cx| {
5721 project.code_actions(
5722 &buffer,
5723 0..buffer.read(cx).len(),
5724 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
5725 cx,
5726 )
5727 });
5728
5729 let () = request_handled
5730 .next()
5731 .await
5732 .expect("The code action request should have been triggered");
5733
5734 let code_actions = code_actions_task.await.unwrap();
5735 assert_eq!(code_actions.len(), 1);
5736 assert_eq!(
5737 code_actions[0].lsp_action.action_kind(),
5738 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
5739 );
5740}
5741
5742#[gpui::test]
5743async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
5744 init_test(cx);
5745
5746 let fs = FakeFs::new(cx.executor());
5747 fs.insert_tree(
5748 path!("/dir"),
5749 json!({
5750 "a.tsx": "a",
5751 }),
5752 )
5753 .await;
5754
5755 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5756
5757 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5758 language_registry.add(tsx_lang());
5759 let language_server_names = [
5760 "TypeScriptServer",
5761 "TailwindServer",
5762 "ESLintServer",
5763 "NoActionsCapabilitiesServer",
5764 ];
5765
5766 let mut language_server_rxs = [
5767 language_registry.register_fake_lsp(
5768 "tsx",
5769 FakeLspAdapter {
5770 name: language_server_names[0],
5771 capabilities: lsp::ServerCapabilities {
5772 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5773 ..lsp::ServerCapabilities::default()
5774 },
5775 ..FakeLspAdapter::default()
5776 },
5777 ),
5778 language_registry.register_fake_lsp(
5779 "tsx",
5780 FakeLspAdapter {
5781 name: language_server_names[1],
5782 capabilities: lsp::ServerCapabilities {
5783 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5784 ..lsp::ServerCapabilities::default()
5785 },
5786 ..FakeLspAdapter::default()
5787 },
5788 ),
5789 language_registry.register_fake_lsp(
5790 "tsx",
5791 FakeLspAdapter {
5792 name: language_server_names[2],
5793 capabilities: lsp::ServerCapabilities {
5794 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5795 ..lsp::ServerCapabilities::default()
5796 },
5797 ..FakeLspAdapter::default()
5798 },
5799 ),
5800 language_registry.register_fake_lsp(
5801 "tsx",
5802 FakeLspAdapter {
5803 name: language_server_names[3],
5804 capabilities: lsp::ServerCapabilities {
5805 code_action_provider: None,
5806 ..lsp::ServerCapabilities::default()
5807 },
5808 ..FakeLspAdapter::default()
5809 },
5810 ),
5811 ];
5812
5813 let (buffer, _handle) = project
5814 .update(cx, |p, cx| {
5815 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
5816 })
5817 .await
5818 .unwrap();
5819 cx.executor().run_until_parked();
5820
5821 let mut servers_with_actions_requests = HashMap::default();
5822 for i in 0..language_server_names.len() {
5823 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
5824 panic!(
5825 "Failed to get language server #{i} with name {}",
5826 &language_server_names[i]
5827 )
5828 });
5829 let new_server_name = new_server.server.name();
5830
5831 assert!(
5832 !servers_with_actions_requests.contains_key(&new_server_name),
5833 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5834 );
5835 match new_server_name.0.as_ref() {
5836 "TailwindServer" | "TypeScriptServer" => {
5837 servers_with_actions_requests.insert(
5838 new_server_name.clone(),
5839 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
5840 move |_, _| {
5841 let name = new_server_name.clone();
5842 async move {
5843 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
5844 lsp::CodeAction {
5845 title: format!("{name} code action"),
5846 ..lsp::CodeAction::default()
5847 },
5848 )]))
5849 }
5850 },
5851 ),
5852 );
5853 }
5854 "ESLintServer" => {
5855 servers_with_actions_requests.insert(
5856 new_server_name,
5857 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
5858 |_, _| async move { Ok(None) },
5859 ),
5860 );
5861 }
5862 "NoActionsCapabilitiesServer" => {
5863 let _never_handled = new_server
5864 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
5865 panic!(
5866 "Should not call for code actions server with no corresponding capabilities"
5867 )
5868 });
5869 }
5870 unexpected => panic!("Unexpected server name: {unexpected}"),
5871 }
5872 }
5873
5874 let code_actions_task = project.update(cx, |project, cx| {
5875 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
5876 });
5877
5878 // cx.run_until_parked();
5879 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
5880 |mut code_actions_request| async move {
5881 code_actions_request
5882 .next()
5883 .await
5884 .expect("All code actions requests should have been triggered")
5885 },
5886 ))
5887 .await;
5888 assert_eq!(
5889 vec!["TailwindServer code action", "TypeScriptServer code action"],
5890 code_actions_task
5891 .await
5892 .unwrap()
5893 .into_iter()
5894 .map(|code_action| code_action.lsp_action.title().to_owned())
5895 .sorted()
5896 .collect::<Vec<_>>(),
5897 "Should receive code actions responses from all related servers with hover capabilities"
5898 );
5899}
5900
5901#[gpui::test]
5902async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
5903 init_test(cx);
5904
5905 let fs = FakeFs::new(cx.executor());
5906 fs.insert_tree(
5907 "/dir",
5908 json!({
5909 "a.rs": "let a = 1;",
5910 "b.rs": "let b = 2;",
5911 "c.rs": "let c = 2;",
5912 }),
5913 )
5914 .await;
5915
5916 let project = Project::test(
5917 fs,
5918 [
5919 "/dir/a.rs".as_ref(),
5920 "/dir/b.rs".as_ref(),
5921 "/dir/c.rs".as_ref(),
5922 ],
5923 cx,
5924 )
5925 .await;
5926
5927 // check the initial state and get the worktrees
5928 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
5929 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5930 assert_eq!(worktrees.len(), 3);
5931
5932 let worktree_a = worktrees[0].read(cx);
5933 let worktree_b = worktrees[1].read(cx);
5934 let worktree_c = worktrees[2].read(cx);
5935
5936 // check they start in the right order
5937 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
5938 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
5939 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
5940
5941 (
5942 worktrees[0].clone(),
5943 worktrees[1].clone(),
5944 worktrees[2].clone(),
5945 )
5946 });
5947
5948 // move first worktree to after the second
5949 // [a, b, c] -> [b, a, c]
5950 project
5951 .update(cx, |project, cx| {
5952 let first = worktree_a.read(cx);
5953 let second = worktree_b.read(cx);
5954 project.move_worktree(first.id(), second.id(), cx)
5955 })
5956 .expect("moving first after second");
5957
5958 // check the state after moving
5959 project.update(cx, |project, cx| {
5960 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5961 assert_eq!(worktrees.len(), 3);
5962
5963 let first = worktrees[0].read(cx);
5964 let second = worktrees[1].read(cx);
5965 let third = worktrees[2].read(cx);
5966
5967 // check they are now in the right order
5968 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5969 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
5970 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5971 });
5972
5973 // move the second worktree to before the first
5974 // [b, a, c] -> [a, b, c]
5975 project
5976 .update(cx, |project, cx| {
5977 let second = worktree_a.read(cx);
5978 let first = worktree_b.read(cx);
5979 project.move_worktree(first.id(), second.id(), cx)
5980 })
5981 .expect("moving second before first");
5982
5983 // check the state after moving
5984 project.update(cx, |project, cx| {
5985 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5986 assert_eq!(worktrees.len(), 3);
5987
5988 let first = worktrees[0].read(cx);
5989 let second = worktrees[1].read(cx);
5990 let third = worktrees[2].read(cx);
5991
5992 // check they are now in the right order
5993 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5994 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5995 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5996 });
5997
5998 // move the second worktree to after the third
5999 // [a, b, c] -> [a, c, b]
6000 project
6001 .update(cx, |project, cx| {
6002 let second = worktree_b.read(cx);
6003 let third = worktree_c.read(cx);
6004 project.move_worktree(second.id(), third.id(), cx)
6005 })
6006 .expect("moving second after third");
6007
6008 // check the state after moving
6009 project.update(cx, |project, cx| {
6010 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6011 assert_eq!(worktrees.len(), 3);
6012
6013 let first = worktrees[0].read(cx);
6014 let second = worktrees[1].read(cx);
6015 let third = worktrees[2].read(cx);
6016
6017 // check they are now in the right order
6018 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6019 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6020 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
6021 });
6022
6023 // move the third worktree to before the second
6024 // [a, c, b] -> [a, b, c]
6025 project
6026 .update(cx, |project, cx| {
6027 let third = worktree_c.read(cx);
6028 let second = worktree_b.read(cx);
6029 project.move_worktree(third.id(), second.id(), cx)
6030 })
6031 .expect("moving third before second");
6032
6033 // check the state after moving
6034 project.update(cx, |project, cx| {
6035 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6036 assert_eq!(worktrees.len(), 3);
6037
6038 let first = worktrees[0].read(cx);
6039 let second = worktrees[1].read(cx);
6040 let third = worktrees[2].read(cx);
6041
6042 // check they are now in the right order
6043 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6044 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6045 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6046 });
6047
6048 // move the first worktree to after the third
6049 // [a, b, c] -> [b, c, a]
6050 project
6051 .update(cx, |project, cx| {
6052 let first = worktree_a.read(cx);
6053 let third = worktree_c.read(cx);
6054 project.move_worktree(first.id(), third.id(), cx)
6055 })
6056 .expect("moving first after third");
6057
6058 // check the state after moving
6059 project.update(cx, |project, cx| {
6060 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6061 assert_eq!(worktrees.len(), 3);
6062
6063 let first = worktrees[0].read(cx);
6064 let second = worktrees[1].read(cx);
6065 let third = worktrees[2].read(cx);
6066
6067 // check they are now in the right order
6068 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6069 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6070 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
6071 });
6072
6073 // move the third worktree to before the first
6074 // [b, c, a] -> [a, b, c]
6075 project
6076 .update(cx, |project, cx| {
6077 let third = worktree_a.read(cx);
6078 let first = worktree_b.read(cx);
6079 project.move_worktree(third.id(), first.id(), cx)
6080 })
6081 .expect("moving third before first");
6082
6083 // check the state after moving
6084 project.update(cx, |project, cx| {
6085 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6086 assert_eq!(worktrees.len(), 3);
6087
6088 let first = worktrees[0].read(cx);
6089 let second = worktrees[1].read(cx);
6090 let third = worktrees[2].read(cx);
6091
6092 // check they are now in the right order
6093 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6094 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6095 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6096 });
6097}
6098
6099#[gpui::test]
6100async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
6101 init_test(cx);
6102
6103 let staged_contents = r#"
6104 fn main() {
6105 println!("hello world");
6106 }
6107 "#
6108 .unindent();
6109 let file_contents = r#"
6110 // print goodbye
6111 fn main() {
6112 println!("goodbye world");
6113 }
6114 "#
6115 .unindent();
6116
6117 let fs = FakeFs::new(cx.background_executor.clone());
6118 fs.insert_tree(
6119 "/dir",
6120 json!({
6121 ".git": {},
6122 "src": {
6123 "main.rs": file_contents,
6124 }
6125 }),
6126 )
6127 .await;
6128
6129 fs.set_index_for_repo(
6130 Path::new("/dir/.git"),
6131 &[("src/main.rs".into(), staged_contents)],
6132 );
6133
6134 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6135
6136 let buffer = project
6137 .update(cx, |project, cx| {
6138 project.open_local_buffer("/dir/src/main.rs", cx)
6139 })
6140 .await
6141 .unwrap();
6142 let unstaged_diff = project
6143 .update(cx, |project, cx| {
6144 project.open_unstaged_diff(buffer.clone(), cx)
6145 })
6146 .await
6147 .unwrap();
6148
6149 cx.run_until_parked();
6150 unstaged_diff.update(cx, |unstaged_diff, cx| {
6151 let snapshot = buffer.read(cx).snapshot();
6152 assert_hunks(
6153 unstaged_diff.hunks(&snapshot, cx),
6154 &snapshot,
6155 &unstaged_diff.base_text_string().unwrap(),
6156 &[
6157 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
6158 (
6159 2..3,
6160 " println!(\"hello world\");\n",
6161 " println!(\"goodbye world\");\n",
6162 DiffHunkStatus::modified_none(),
6163 ),
6164 ],
6165 );
6166 });
6167
6168 let staged_contents = r#"
6169 // print goodbye
6170 fn main() {
6171 }
6172 "#
6173 .unindent();
6174
6175 fs.set_index_for_repo(
6176 Path::new("/dir/.git"),
6177 &[("src/main.rs".into(), staged_contents)],
6178 );
6179
6180 cx.run_until_parked();
6181 unstaged_diff.update(cx, |unstaged_diff, cx| {
6182 let snapshot = buffer.read(cx).snapshot();
6183 assert_hunks(
6184 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6185 &snapshot,
6186 &unstaged_diff.base_text().text(),
6187 &[(
6188 2..3,
6189 "",
6190 " println!(\"goodbye world\");\n",
6191 DiffHunkStatus::added_none(),
6192 )],
6193 );
6194 });
6195}
6196
6197#[gpui::test]
6198async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
6199 init_test(cx);
6200
6201 let committed_contents = r#"
6202 fn main() {
6203 println!("hello world");
6204 }
6205 "#
6206 .unindent();
6207 let staged_contents = r#"
6208 fn main() {
6209 println!("goodbye world");
6210 }
6211 "#
6212 .unindent();
6213 let file_contents = r#"
6214 // print goodbye
6215 fn main() {
6216 println!("goodbye world");
6217 }
6218 "#
6219 .unindent();
6220
6221 let fs = FakeFs::new(cx.background_executor.clone());
6222 fs.insert_tree(
6223 "/dir",
6224 json!({
6225 ".git": {},
6226 "src": {
6227 "modification.rs": file_contents,
6228 }
6229 }),
6230 )
6231 .await;
6232
6233 fs.set_head_for_repo(
6234 Path::new("/dir/.git"),
6235 &[
6236 ("src/modification.rs".into(), committed_contents),
6237 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6238 ],
6239 );
6240 fs.set_index_for_repo(
6241 Path::new("/dir/.git"),
6242 &[
6243 ("src/modification.rs".into(), staged_contents),
6244 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6245 ],
6246 );
6247
6248 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6249 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6250 let language = rust_lang();
6251 language_registry.add(language.clone());
6252
6253 let buffer_1 = project
6254 .update(cx, |project, cx| {
6255 project.open_local_buffer("/dir/src/modification.rs", cx)
6256 })
6257 .await
6258 .unwrap();
6259 let diff_1 = project
6260 .update(cx, |project, cx| {
6261 project.open_uncommitted_diff(buffer_1.clone(), cx)
6262 })
6263 .await
6264 .unwrap();
6265 diff_1.read_with(cx, |diff, _| {
6266 assert_eq!(diff.base_text().language().cloned(), Some(language))
6267 });
6268 cx.run_until_parked();
6269 diff_1.update(cx, |diff, cx| {
6270 let snapshot = buffer_1.read(cx).snapshot();
6271 assert_hunks(
6272 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6273 &snapshot,
6274 &diff.base_text_string().unwrap(),
6275 &[
6276 (
6277 0..1,
6278 "",
6279 "// print goodbye\n",
6280 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
6281 ),
6282 (
6283 2..3,
6284 " println!(\"hello world\");\n",
6285 " println!(\"goodbye world\");\n",
6286 DiffHunkStatus::modified_none(),
6287 ),
6288 ],
6289 );
6290 });
6291
6292 // Reset HEAD to a version that differs from both the buffer and the index.
6293 let committed_contents = r#"
6294 // print goodbye
6295 fn main() {
6296 }
6297 "#
6298 .unindent();
6299 fs.set_head_for_repo(
6300 Path::new("/dir/.git"),
6301 &[
6302 ("src/modification.rs".into(), committed_contents.clone()),
6303 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6304 ],
6305 );
6306
6307 // Buffer now has an unstaged hunk.
6308 cx.run_until_parked();
6309 diff_1.update(cx, |diff, cx| {
6310 let snapshot = buffer_1.read(cx).snapshot();
6311 assert_hunks(
6312 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6313 &snapshot,
6314 &diff.base_text().text(),
6315 &[(
6316 2..3,
6317 "",
6318 " println!(\"goodbye world\");\n",
6319 DiffHunkStatus::added_none(),
6320 )],
6321 );
6322 });
6323
6324 // Open a buffer for a file that's been deleted.
6325 let buffer_2 = project
6326 .update(cx, |project, cx| {
6327 project.open_local_buffer("/dir/src/deletion.rs", cx)
6328 })
6329 .await
6330 .unwrap();
6331 let diff_2 = project
6332 .update(cx, |project, cx| {
6333 project.open_uncommitted_diff(buffer_2.clone(), cx)
6334 })
6335 .await
6336 .unwrap();
6337 cx.run_until_parked();
6338 diff_2.update(cx, |diff, cx| {
6339 let snapshot = buffer_2.read(cx).snapshot();
6340 assert_hunks(
6341 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6342 &snapshot,
6343 &diff.base_text_string().unwrap(),
6344 &[(
6345 0..0,
6346 "// the-deleted-contents\n",
6347 "",
6348 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
6349 )],
6350 );
6351 });
6352
6353 // Stage the deletion of this file
6354 fs.set_index_for_repo(
6355 Path::new("/dir/.git"),
6356 &[("src/modification.rs".into(), committed_contents.clone())],
6357 );
6358 cx.run_until_parked();
6359 diff_2.update(cx, |diff, cx| {
6360 let snapshot = buffer_2.read(cx).snapshot();
6361 assert_hunks(
6362 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6363 &snapshot,
6364 &diff.base_text_string().unwrap(),
6365 &[(
6366 0..0,
6367 "// the-deleted-contents\n",
6368 "",
6369 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
6370 )],
6371 );
6372 });
6373}
6374
6375#[gpui::test]
6376async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
6377 use DiffHunkSecondaryStatus::*;
6378 init_test(cx);
6379
6380 let committed_contents = r#"
6381 zero
6382 one
6383 two
6384 three
6385 four
6386 five
6387 "#
6388 .unindent();
6389 let file_contents = r#"
6390 one
6391 TWO
6392 three
6393 FOUR
6394 five
6395 "#
6396 .unindent();
6397
6398 let fs = FakeFs::new(cx.background_executor.clone());
6399 fs.insert_tree(
6400 "/dir",
6401 json!({
6402 ".git": {},
6403 "file.txt": file_contents.clone()
6404 }),
6405 )
6406 .await;
6407
6408 fs.set_head_and_index_for_repo(
6409 "/dir/.git".as_ref(),
6410 &[("file.txt".into(), committed_contents.clone())],
6411 );
6412
6413 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6414
6415 let buffer = project
6416 .update(cx, |project, cx| {
6417 project.open_local_buffer("/dir/file.txt", cx)
6418 })
6419 .await
6420 .unwrap();
6421 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6422 let uncommitted_diff = project
6423 .update(cx, |project, cx| {
6424 project.open_uncommitted_diff(buffer.clone(), cx)
6425 })
6426 .await
6427 .unwrap();
6428 let mut diff_events = cx.events(&uncommitted_diff);
6429
6430 // The hunks are initially unstaged.
6431 uncommitted_diff.read_with(cx, |diff, cx| {
6432 assert_hunks(
6433 diff.hunks(&snapshot, cx),
6434 &snapshot,
6435 &diff.base_text_string().unwrap(),
6436 &[
6437 (
6438 0..0,
6439 "zero\n",
6440 "",
6441 DiffHunkStatus::deleted(HasSecondaryHunk),
6442 ),
6443 (
6444 1..2,
6445 "two\n",
6446 "TWO\n",
6447 DiffHunkStatus::modified(HasSecondaryHunk),
6448 ),
6449 (
6450 3..4,
6451 "four\n",
6452 "FOUR\n",
6453 DiffHunkStatus::modified(HasSecondaryHunk),
6454 ),
6455 ],
6456 );
6457 });
6458
6459 // Stage a hunk. It appears as optimistically staged.
6460 uncommitted_diff.update(cx, |diff, cx| {
6461 let range =
6462 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
6463 let hunks = diff
6464 .hunks_intersecting_range(range, &snapshot, cx)
6465 .collect::<Vec<_>>();
6466 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
6467
6468 assert_hunks(
6469 diff.hunks(&snapshot, cx),
6470 &snapshot,
6471 &diff.base_text_string().unwrap(),
6472 &[
6473 (
6474 0..0,
6475 "zero\n",
6476 "",
6477 DiffHunkStatus::deleted(HasSecondaryHunk),
6478 ),
6479 (
6480 1..2,
6481 "two\n",
6482 "TWO\n",
6483 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6484 ),
6485 (
6486 3..4,
6487 "four\n",
6488 "FOUR\n",
6489 DiffHunkStatus::modified(HasSecondaryHunk),
6490 ),
6491 ],
6492 );
6493 });
6494
6495 // The diff emits a change event for the range of the staged hunk.
6496 assert!(matches!(
6497 diff_events.next().await.unwrap(),
6498 BufferDiffEvent::HunksStagedOrUnstaged(_)
6499 ));
6500 let event = diff_events.next().await.unwrap();
6501 if let BufferDiffEvent::DiffChanged {
6502 changed_range: Some(changed_range),
6503 } = event
6504 {
6505 let changed_range = changed_range.to_point(&snapshot);
6506 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
6507 } else {
6508 panic!("Unexpected event {event:?}");
6509 }
6510
6511 // When the write to the index completes, it appears as staged.
6512 cx.run_until_parked();
6513 uncommitted_diff.update(cx, |diff, cx| {
6514 assert_hunks(
6515 diff.hunks(&snapshot, cx),
6516 &snapshot,
6517 &diff.base_text_string().unwrap(),
6518 &[
6519 (
6520 0..0,
6521 "zero\n",
6522 "",
6523 DiffHunkStatus::deleted(HasSecondaryHunk),
6524 ),
6525 (
6526 1..2,
6527 "two\n",
6528 "TWO\n",
6529 DiffHunkStatus::modified(NoSecondaryHunk),
6530 ),
6531 (
6532 3..4,
6533 "four\n",
6534 "FOUR\n",
6535 DiffHunkStatus::modified(HasSecondaryHunk),
6536 ),
6537 ],
6538 );
6539 });
6540
6541 // The diff emits a change event for the changed index text.
6542 let event = diff_events.next().await.unwrap();
6543 if let BufferDiffEvent::DiffChanged {
6544 changed_range: Some(changed_range),
6545 } = event
6546 {
6547 let changed_range = changed_range.to_point(&snapshot);
6548 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
6549 } else {
6550 panic!("Unexpected event {event:?}");
6551 }
6552
6553 // Simulate a problem writing to the git index.
6554 fs.set_error_message_for_index_write(
6555 "/dir/.git".as_ref(),
6556 Some("failed to write git index".into()),
6557 );
6558
6559 // Stage another hunk.
6560 uncommitted_diff.update(cx, |diff, cx| {
6561 let range =
6562 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
6563 let hunks = diff
6564 .hunks_intersecting_range(range, &snapshot, cx)
6565 .collect::<Vec<_>>();
6566 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
6567
6568 assert_hunks(
6569 diff.hunks(&snapshot, cx),
6570 &snapshot,
6571 &diff.base_text_string().unwrap(),
6572 &[
6573 (
6574 0..0,
6575 "zero\n",
6576 "",
6577 DiffHunkStatus::deleted(HasSecondaryHunk),
6578 ),
6579 (
6580 1..2,
6581 "two\n",
6582 "TWO\n",
6583 DiffHunkStatus::modified(NoSecondaryHunk),
6584 ),
6585 (
6586 3..4,
6587 "four\n",
6588 "FOUR\n",
6589 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6590 ),
6591 ],
6592 );
6593 });
6594 assert!(matches!(
6595 diff_events.next().await.unwrap(),
6596 BufferDiffEvent::HunksStagedOrUnstaged(_)
6597 ));
6598 let event = diff_events.next().await.unwrap();
6599 if let BufferDiffEvent::DiffChanged {
6600 changed_range: Some(changed_range),
6601 } = event
6602 {
6603 let changed_range = changed_range.to_point(&snapshot);
6604 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
6605 } else {
6606 panic!("Unexpected event {event:?}");
6607 }
6608
6609 // When the write fails, the hunk returns to being unstaged.
6610 cx.run_until_parked();
6611 uncommitted_diff.update(cx, |diff, cx| {
6612 assert_hunks(
6613 diff.hunks(&snapshot, cx),
6614 &snapshot,
6615 &diff.base_text_string().unwrap(),
6616 &[
6617 (
6618 0..0,
6619 "zero\n",
6620 "",
6621 DiffHunkStatus::deleted(HasSecondaryHunk),
6622 ),
6623 (
6624 1..2,
6625 "two\n",
6626 "TWO\n",
6627 DiffHunkStatus::modified(NoSecondaryHunk),
6628 ),
6629 (
6630 3..4,
6631 "four\n",
6632 "FOUR\n",
6633 DiffHunkStatus::modified(HasSecondaryHunk),
6634 ),
6635 ],
6636 );
6637 });
6638
6639 let event = diff_events.next().await.unwrap();
6640 if let BufferDiffEvent::DiffChanged {
6641 changed_range: Some(changed_range),
6642 } = event
6643 {
6644 let changed_range = changed_range.to_point(&snapshot);
6645 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
6646 } else {
6647 panic!("Unexpected event {event:?}");
6648 }
6649
6650 // Allow writing to the git index to succeed again.
6651 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
6652
6653 // Stage two hunks with separate operations.
6654 uncommitted_diff.update(cx, |diff, cx| {
6655 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
6656 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
6657 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
6658 });
6659
6660 // Both staged hunks appear as pending.
6661 uncommitted_diff.update(cx, |diff, cx| {
6662 assert_hunks(
6663 diff.hunks(&snapshot, cx),
6664 &snapshot,
6665 &diff.base_text_string().unwrap(),
6666 &[
6667 (
6668 0..0,
6669 "zero\n",
6670 "",
6671 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
6672 ),
6673 (
6674 1..2,
6675 "two\n",
6676 "TWO\n",
6677 DiffHunkStatus::modified(NoSecondaryHunk),
6678 ),
6679 (
6680 3..4,
6681 "four\n",
6682 "FOUR\n",
6683 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6684 ),
6685 ],
6686 );
6687 });
6688
6689 // Both staging operations take effect.
6690 cx.run_until_parked();
6691 uncommitted_diff.update(cx, |diff, cx| {
6692 assert_hunks(
6693 diff.hunks(&snapshot, cx),
6694 &snapshot,
6695 &diff.base_text_string().unwrap(),
6696 &[
6697 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
6698 (
6699 1..2,
6700 "two\n",
6701 "TWO\n",
6702 DiffHunkStatus::modified(NoSecondaryHunk),
6703 ),
6704 (
6705 3..4,
6706 "four\n",
6707 "FOUR\n",
6708 DiffHunkStatus::modified(NoSecondaryHunk),
6709 ),
6710 ],
6711 );
6712 });
6713}
6714
6715#[gpui::test(seeds(340, 472))]
6716async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
6717 use DiffHunkSecondaryStatus::*;
6718 init_test(cx);
6719
6720 let committed_contents = r#"
6721 zero
6722 one
6723 two
6724 three
6725 four
6726 five
6727 "#
6728 .unindent();
6729 let file_contents = r#"
6730 one
6731 TWO
6732 three
6733 FOUR
6734 five
6735 "#
6736 .unindent();
6737
6738 let fs = FakeFs::new(cx.background_executor.clone());
6739 fs.insert_tree(
6740 "/dir",
6741 json!({
6742 ".git": {},
6743 "file.txt": file_contents.clone()
6744 }),
6745 )
6746 .await;
6747
6748 fs.set_head_for_repo(
6749 "/dir/.git".as_ref(),
6750 &[("file.txt".into(), committed_contents.clone())],
6751 );
6752 fs.set_index_for_repo(
6753 "/dir/.git".as_ref(),
6754 &[("file.txt".into(), committed_contents.clone())],
6755 );
6756
6757 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6758
6759 let buffer = project
6760 .update(cx, |project, cx| {
6761 project.open_local_buffer("/dir/file.txt", cx)
6762 })
6763 .await
6764 .unwrap();
6765 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6766 let uncommitted_diff = project
6767 .update(cx, |project, cx| {
6768 project.open_uncommitted_diff(buffer.clone(), cx)
6769 })
6770 .await
6771 .unwrap();
6772
6773 // The hunks are initially unstaged.
6774 uncommitted_diff.read_with(cx, |diff, cx| {
6775 assert_hunks(
6776 diff.hunks(&snapshot, cx),
6777 &snapshot,
6778 &diff.base_text_string().unwrap(),
6779 &[
6780 (
6781 0..0,
6782 "zero\n",
6783 "",
6784 DiffHunkStatus::deleted(HasSecondaryHunk),
6785 ),
6786 (
6787 1..2,
6788 "two\n",
6789 "TWO\n",
6790 DiffHunkStatus::modified(HasSecondaryHunk),
6791 ),
6792 (
6793 3..4,
6794 "four\n",
6795 "FOUR\n",
6796 DiffHunkStatus::modified(HasSecondaryHunk),
6797 ),
6798 ],
6799 );
6800 });
6801
6802 // Pause IO events
6803 fs.pause_events();
6804
6805 // Stage the first hunk.
6806 uncommitted_diff.update(cx, |diff, cx| {
6807 let hunk = diff.hunks(&snapshot, cx).next().unwrap();
6808 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
6809 assert_hunks(
6810 diff.hunks(&snapshot, cx),
6811 &snapshot,
6812 &diff.base_text_string().unwrap(),
6813 &[
6814 (
6815 0..0,
6816 "zero\n",
6817 "",
6818 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
6819 ),
6820 (
6821 1..2,
6822 "two\n",
6823 "TWO\n",
6824 DiffHunkStatus::modified(HasSecondaryHunk),
6825 ),
6826 (
6827 3..4,
6828 "four\n",
6829 "FOUR\n",
6830 DiffHunkStatus::modified(HasSecondaryHunk),
6831 ),
6832 ],
6833 );
6834 });
6835
6836 // Stage the second hunk *before* receiving the FS event for the first hunk.
6837 cx.run_until_parked();
6838 uncommitted_diff.update(cx, |diff, cx| {
6839 let hunk = diff.hunks(&snapshot, cx).nth(1).unwrap();
6840 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
6841 assert_hunks(
6842 diff.hunks(&snapshot, cx),
6843 &snapshot,
6844 &diff.base_text_string().unwrap(),
6845 &[
6846 (
6847 0..0,
6848 "zero\n",
6849 "",
6850 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
6851 ),
6852 (
6853 1..2,
6854 "two\n",
6855 "TWO\n",
6856 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6857 ),
6858 (
6859 3..4,
6860 "four\n",
6861 "FOUR\n",
6862 DiffHunkStatus::modified(HasSecondaryHunk),
6863 ),
6864 ],
6865 );
6866 });
6867
6868 // Process the FS event for staging the first hunk (second event is still pending).
6869 fs.flush_events(1);
6870 cx.run_until_parked();
6871
6872 // Stage the third hunk before receiving the second FS event.
6873 uncommitted_diff.update(cx, |diff, cx| {
6874 let hunk = diff.hunks(&snapshot, cx).nth(2).unwrap();
6875 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
6876 });
6877
6878 // Wait for all remaining IO.
6879 cx.run_until_parked();
6880 fs.flush_events(fs.buffered_event_count());
6881
6882 // Now all hunks are staged.
6883 cx.run_until_parked();
6884 uncommitted_diff.update(cx, |diff, cx| {
6885 assert_hunks(
6886 diff.hunks(&snapshot, cx),
6887 &snapshot,
6888 &diff.base_text_string().unwrap(),
6889 &[
6890 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
6891 (
6892 1..2,
6893 "two\n",
6894 "TWO\n",
6895 DiffHunkStatus::modified(NoSecondaryHunk),
6896 ),
6897 (
6898 3..4,
6899 "four\n",
6900 "FOUR\n",
6901 DiffHunkStatus::modified(NoSecondaryHunk),
6902 ),
6903 ],
6904 );
6905 });
6906}
6907
6908#[gpui::test]
6909async fn test_staging_lots_of_hunks_fast(cx: &mut gpui::TestAppContext) {
6910 use DiffHunkSecondaryStatus::*;
6911 init_test(cx);
6912
6913 let different_lines = (0..500)
6914 .step_by(5)
6915 .map(|i| format!("diff {}\n", i))
6916 .collect::<Vec<String>>();
6917 let committed_contents = (0..500).map(|i| format!("{}\n", i)).collect::<String>();
6918 let file_contents = (0..500)
6919 .map(|i| {
6920 if i % 5 == 0 {
6921 different_lines[i / 5].clone()
6922 } else {
6923 format!("{}\n", i)
6924 }
6925 })
6926 .collect::<String>();
6927
6928 let fs = FakeFs::new(cx.background_executor.clone());
6929 fs.insert_tree(
6930 "/dir",
6931 json!({
6932 ".git": {},
6933 "file.txt": file_contents.clone()
6934 }),
6935 )
6936 .await;
6937
6938 fs.set_head_for_repo(
6939 "/dir/.git".as_ref(),
6940 &[("file.txt".into(), committed_contents.clone())],
6941 );
6942 fs.set_index_for_repo(
6943 "/dir/.git".as_ref(),
6944 &[("file.txt".into(), committed_contents.clone())],
6945 );
6946
6947 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6948
6949 let buffer = project
6950 .update(cx, |project, cx| {
6951 project.open_local_buffer("/dir/file.txt", cx)
6952 })
6953 .await
6954 .unwrap();
6955 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6956 let uncommitted_diff = project
6957 .update(cx, |project, cx| {
6958 project.open_uncommitted_diff(buffer.clone(), cx)
6959 })
6960 .await
6961 .unwrap();
6962
6963 let mut expected_hunks: Vec<(Range<u32>, String, String, DiffHunkStatus)> = (0..500)
6964 .step_by(5)
6965 .map(|i| {
6966 (
6967 i as u32..i as u32 + 1,
6968 format!("{}\n", i),
6969 different_lines[i / 5].clone(),
6970 DiffHunkStatus::modified(HasSecondaryHunk),
6971 )
6972 })
6973 .collect();
6974
6975 // The hunks are initially unstaged
6976 uncommitted_diff.read_with(cx, |diff, cx| {
6977 assert_hunks(
6978 diff.hunks(&snapshot, cx),
6979 &snapshot,
6980 &diff.base_text_string().unwrap(),
6981 &expected_hunks,
6982 );
6983 });
6984
6985 for (_, _, _, status) in expected_hunks.iter_mut() {
6986 *status = DiffHunkStatus::modified(SecondaryHunkRemovalPending);
6987 }
6988
6989 // Stage every hunk with a different call
6990 uncommitted_diff.update(cx, |diff, cx| {
6991 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
6992 for hunk in hunks {
6993 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
6994 }
6995
6996 assert_hunks(
6997 diff.hunks(&snapshot, cx),
6998 &snapshot,
6999 &diff.base_text_string().unwrap(),
7000 &expected_hunks,
7001 );
7002 });
7003
7004 // If we wait, we'll have no pending hunks
7005 cx.run_until_parked();
7006 for (_, _, _, status) in expected_hunks.iter_mut() {
7007 *status = DiffHunkStatus::modified(NoSecondaryHunk);
7008 }
7009
7010 uncommitted_diff.update(cx, |diff, cx| {
7011 assert_hunks(
7012 diff.hunks(&snapshot, cx),
7013 &snapshot,
7014 &diff.base_text_string().unwrap(),
7015 &expected_hunks,
7016 );
7017 });
7018
7019 for (_, _, _, status) in expected_hunks.iter_mut() {
7020 *status = DiffHunkStatus::modified(SecondaryHunkAdditionPending);
7021 }
7022
7023 // Unstage every hunk with a different call
7024 uncommitted_diff.update(cx, |diff, cx| {
7025 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
7026 for hunk in hunks {
7027 diff.stage_or_unstage_hunks(false, &[hunk], &snapshot, true, cx);
7028 }
7029
7030 assert_hunks(
7031 diff.hunks(&snapshot, cx),
7032 &snapshot,
7033 &diff.base_text_string().unwrap(),
7034 &expected_hunks,
7035 );
7036 });
7037
7038 // If we wait, we'll have no pending hunks, again
7039 cx.run_until_parked();
7040 for (_, _, _, status) in expected_hunks.iter_mut() {
7041 *status = DiffHunkStatus::modified(HasSecondaryHunk);
7042 }
7043
7044 uncommitted_diff.update(cx, |diff, cx| {
7045 assert_hunks(
7046 diff.hunks(&snapshot, cx),
7047 &snapshot,
7048 &diff.base_text_string().unwrap(),
7049 &expected_hunks,
7050 );
7051 });
7052}
7053
7054#[gpui::test]
7055async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
7056 init_test(cx);
7057
7058 let committed_contents = r#"
7059 fn main() {
7060 println!("hello from HEAD");
7061 }
7062 "#
7063 .unindent();
7064 let file_contents = r#"
7065 fn main() {
7066 println!("hello from the working copy");
7067 }
7068 "#
7069 .unindent();
7070
7071 let fs = FakeFs::new(cx.background_executor.clone());
7072 fs.insert_tree(
7073 "/dir",
7074 json!({
7075 ".git": {},
7076 "src": {
7077 "main.rs": file_contents,
7078 }
7079 }),
7080 )
7081 .await;
7082
7083 fs.set_head_for_repo(
7084 Path::new("/dir/.git"),
7085 &[("src/main.rs".into(), committed_contents.clone())],
7086 );
7087 fs.set_index_for_repo(
7088 Path::new("/dir/.git"),
7089 &[("src/main.rs".into(), committed_contents.clone())],
7090 );
7091
7092 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
7093
7094 let buffer = project
7095 .update(cx, |project, cx| {
7096 project.open_local_buffer("/dir/src/main.rs", cx)
7097 })
7098 .await
7099 .unwrap();
7100 let uncommitted_diff = project
7101 .update(cx, |project, cx| {
7102 project.open_uncommitted_diff(buffer.clone(), cx)
7103 })
7104 .await
7105 .unwrap();
7106
7107 cx.run_until_parked();
7108 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
7109 let snapshot = buffer.read(cx).snapshot();
7110 assert_hunks(
7111 uncommitted_diff.hunks(&snapshot, cx),
7112 &snapshot,
7113 &uncommitted_diff.base_text_string().unwrap(),
7114 &[(
7115 1..2,
7116 " println!(\"hello from HEAD\");\n",
7117 " println!(\"hello from the working copy\");\n",
7118 DiffHunkStatus {
7119 kind: DiffHunkStatusKind::Modified,
7120 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
7121 },
7122 )],
7123 );
7124 });
7125}
7126
7127#[gpui::test]
7128async fn test_repository_and_path_for_project_path(
7129 background_executor: BackgroundExecutor,
7130 cx: &mut gpui::TestAppContext,
7131) {
7132 init_test(cx);
7133 let fs = FakeFs::new(background_executor);
7134 fs.insert_tree(
7135 path!("/root"),
7136 json!({
7137 "c.txt": "",
7138 "dir1": {
7139 ".git": {},
7140 "deps": {
7141 "dep1": {
7142 ".git": {},
7143 "src": {
7144 "a.txt": ""
7145 }
7146 }
7147 },
7148 "src": {
7149 "b.txt": ""
7150 }
7151 },
7152 }),
7153 )
7154 .await;
7155
7156 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
7157 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7158 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7159 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
7160 .await;
7161 tree.flush_fs_events(cx).await;
7162
7163 project.read_with(cx, |project, cx| {
7164 let git_store = project.git_store().read(cx);
7165 let pairs = [
7166 ("c.txt", None),
7167 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
7168 (
7169 "dir1/deps/dep1/src/a.txt",
7170 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
7171 ),
7172 ];
7173 let expected = pairs
7174 .iter()
7175 .map(|(path, result)| {
7176 (
7177 path,
7178 result.map(|(repo, repo_path)| {
7179 (Path::new(repo).into(), RepoPath::from(repo_path))
7180 }),
7181 )
7182 })
7183 .collect::<Vec<_>>();
7184 let actual = pairs
7185 .iter()
7186 .map(|(path, _)| {
7187 let project_path = (tree_id, Path::new(path)).into();
7188 let result = maybe!({
7189 let (repo, repo_path) =
7190 git_store.repository_and_path_for_project_path(&project_path, cx)?;
7191 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
7192 });
7193 (path, result)
7194 })
7195 .collect::<Vec<_>>();
7196 pretty_assertions::assert_eq!(expected, actual);
7197 });
7198
7199 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
7200 .await
7201 .unwrap();
7202 tree.flush_fs_events(cx).await;
7203
7204 project.read_with(cx, |project, cx| {
7205 let git_store = project.git_store().read(cx);
7206 assert_eq!(
7207 git_store.repository_and_path_for_project_path(
7208 &(tree_id, Path::new("dir1/src/b.txt")).into(),
7209 cx
7210 ),
7211 None
7212 );
7213 });
7214}
7215
7216#[gpui::test]
7217async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
7218 init_test(cx);
7219 let fs = FakeFs::new(cx.background_executor.clone());
7220 fs.insert_tree(
7221 path!("/root"),
7222 json!({
7223 "home": {
7224 ".git": {},
7225 "project": {
7226 "a.txt": "A"
7227 },
7228 },
7229 }),
7230 )
7231 .await;
7232 fs.set_home_dir(Path::new(path!("/root/home")).to_owned());
7233
7234 let project = Project::test(fs.clone(), [path!("/root/home/project").as_ref()], cx).await;
7235 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7236 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7237 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
7238 .await;
7239 tree.flush_fs_events(cx).await;
7240
7241 project.read_with(cx, |project, cx| {
7242 let containing = project
7243 .git_store()
7244 .read(cx)
7245 .repository_and_path_for_project_path(&(tree_id, "a.txt").into(), cx);
7246 assert!(containing.is_none());
7247 });
7248
7249 let project = Project::test(fs.clone(), [path!("/root/home").as_ref()], cx).await;
7250 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7251 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7252 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
7253 .await;
7254 tree.flush_fs_events(cx).await;
7255
7256 project.read_with(cx, |project, cx| {
7257 let containing = project
7258 .git_store()
7259 .read(cx)
7260 .repository_and_path_for_project_path(&(tree_id, "project/a.txt").into(), cx);
7261 assert_eq!(
7262 containing
7263 .unwrap()
7264 .0
7265 .read(cx)
7266 .work_directory_abs_path
7267 .as_ref(),
7268 Path::new(path!("/root/home"))
7269 );
7270 });
7271}
7272
7273#[gpui::test]
7274async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
7275 init_test(cx);
7276 cx.executor().allow_parking();
7277
7278 let root = TempTree::new(json!({
7279 "project": {
7280 "a.txt": "a", // Modified
7281 "b.txt": "bb", // Added
7282 "c.txt": "ccc", // Unchanged
7283 "d.txt": "dddd", // Deleted
7284 },
7285 }));
7286
7287 // Set up git repository before creating the project.
7288 let work_dir = root.path().join("project");
7289 let repo = git_init(work_dir.as_path());
7290 git_add("a.txt", &repo);
7291 git_add("c.txt", &repo);
7292 git_add("d.txt", &repo);
7293 git_commit("Initial commit", &repo);
7294 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
7295 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
7296
7297 let project = Project::test(
7298 Arc::new(RealFs::new(None, cx.executor())),
7299 [root.path()],
7300 cx,
7301 )
7302 .await;
7303
7304 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7305 tree.flush_fs_events(cx).await;
7306 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7307 .await;
7308 cx.executor().run_until_parked();
7309
7310 let repository = project.read_with(cx, |project, cx| {
7311 project.repositories(cx).values().next().unwrap().clone()
7312 });
7313
7314 // Check that the right git state is observed on startup
7315 repository.read_with(cx, |repository, _| {
7316 let entries = repository.cached_status().collect::<Vec<_>>();
7317 assert_eq!(
7318 entries,
7319 [
7320 StatusEntry {
7321 repo_path: "a.txt".into(),
7322 status: StatusCode::Modified.worktree(),
7323 },
7324 StatusEntry {
7325 repo_path: "b.txt".into(),
7326 status: FileStatus::Untracked,
7327 },
7328 StatusEntry {
7329 repo_path: "d.txt".into(),
7330 status: StatusCode::Deleted.worktree(),
7331 },
7332 ]
7333 );
7334 });
7335
7336 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
7337
7338 tree.flush_fs_events(cx).await;
7339 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7340 .await;
7341 cx.executor().run_until_parked();
7342
7343 repository.read_with(cx, |repository, _| {
7344 let entries = repository.cached_status().collect::<Vec<_>>();
7345 assert_eq!(
7346 entries,
7347 [
7348 StatusEntry {
7349 repo_path: "a.txt".into(),
7350 status: StatusCode::Modified.worktree(),
7351 },
7352 StatusEntry {
7353 repo_path: "b.txt".into(),
7354 status: FileStatus::Untracked,
7355 },
7356 StatusEntry {
7357 repo_path: "c.txt".into(),
7358 status: StatusCode::Modified.worktree(),
7359 },
7360 StatusEntry {
7361 repo_path: "d.txt".into(),
7362 status: StatusCode::Deleted.worktree(),
7363 },
7364 ]
7365 );
7366 });
7367
7368 git_add("a.txt", &repo);
7369 git_add("c.txt", &repo);
7370 git_remove_index(Path::new("d.txt"), &repo);
7371 git_commit("Another commit", &repo);
7372 tree.flush_fs_events(cx).await;
7373 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7374 .await;
7375 cx.executor().run_until_parked();
7376
7377 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
7378 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
7379 tree.flush_fs_events(cx).await;
7380 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7381 .await;
7382 cx.executor().run_until_parked();
7383
7384 repository.read_with(cx, |repository, _cx| {
7385 let entries = repository.cached_status().collect::<Vec<_>>();
7386
7387 // Deleting an untracked entry, b.txt, should leave no status
7388 // a.txt was tracked, and so should have a status
7389 assert_eq!(
7390 entries,
7391 [StatusEntry {
7392 repo_path: "a.txt".into(),
7393 status: StatusCode::Deleted.worktree(),
7394 }]
7395 );
7396 });
7397}
7398
7399#[gpui::test]
7400async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
7401 init_test(cx);
7402 cx.executor().allow_parking();
7403
7404 let root = TempTree::new(json!({
7405 "project": {
7406 "sub": {},
7407 "a.txt": "",
7408 },
7409 }));
7410
7411 let work_dir = root.path().join("project");
7412 let repo = git_init(work_dir.as_path());
7413 // a.txt exists in HEAD and the working copy but is deleted in the index.
7414 git_add("a.txt", &repo);
7415 git_commit("Initial commit", &repo);
7416 git_remove_index("a.txt".as_ref(), &repo);
7417 // `sub` is a nested git repository.
7418 let _sub = git_init(&work_dir.join("sub"));
7419
7420 let project = Project::test(
7421 Arc::new(RealFs::new(None, cx.executor())),
7422 [root.path()],
7423 cx,
7424 )
7425 .await;
7426
7427 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7428 tree.flush_fs_events(cx).await;
7429 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7430 .await;
7431 cx.executor().run_until_parked();
7432
7433 let repository = project.read_with(cx, |project, cx| {
7434 project
7435 .repositories(cx)
7436 .values()
7437 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
7438 .unwrap()
7439 .clone()
7440 });
7441
7442 repository.read_with(cx, |repository, _cx| {
7443 let entries = repository.cached_status().collect::<Vec<_>>();
7444
7445 // `sub` doesn't appear in our computed statuses.
7446 // a.txt appears with a combined `DA` status.
7447 assert_eq!(
7448 entries,
7449 [StatusEntry {
7450 repo_path: "a.txt".into(),
7451 status: TrackedStatus {
7452 index_status: StatusCode::Deleted,
7453 worktree_status: StatusCode::Added
7454 }
7455 .into(),
7456 }]
7457 )
7458 });
7459}
7460
7461#[gpui::test]
7462async fn test_repository_subfolder_git_status(cx: &mut gpui::TestAppContext) {
7463 init_test(cx);
7464 cx.executor().allow_parking();
7465
7466 let root = TempTree::new(json!({
7467 "my-repo": {
7468 // .git folder will go here
7469 "a.txt": "a",
7470 "sub-folder-1": {
7471 "sub-folder-2": {
7472 "c.txt": "cc",
7473 "d": {
7474 "e.txt": "eee"
7475 }
7476 },
7477 }
7478 },
7479 }));
7480
7481 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
7482 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
7483
7484 // Set up git repository before creating the worktree.
7485 let git_repo_work_dir = root.path().join("my-repo");
7486 let repo = git_init(git_repo_work_dir.as_path());
7487 git_add(C_TXT, &repo);
7488 git_commit("Initial commit", &repo);
7489
7490 // Open the worktree in subfolder
7491 let project_root = Path::new("my-repo/sub-folder-1/sub-folder-2");
7492
7493 let project = Project::test(
7494 Arc::new(RealFs::new(None, cx.executor())),
7495 [root.path().join(project_root).as_path()],
7496 cx,
7497 )
7498 .await;
7499
7500 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7501 tree.flush_fs_events(cx).await;
7502 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7503 .await;
7504 cx.executor().run_until_parked();
7505
7506 let repository = project.read_with(cx, |project, cx| {
7507 project.repositories(cx).values().next().unwrap().clone()
7508 });
7509
7510 // Ensure that the git status is loaded correctly
7511 repository.read_with(cx, |repository, _cx| {
7512 assert_eq!(
7513 repository.work_directory_abs_path.canonicalize().unwrap(),
7514 root.path().join("my-repo").canonicalize().unwrap()
7515 );
7516
7517 assert_eq!(repository.status_for_path(&C_TXT.into()), None);
7518 assert_eq!(
7519 repository.status_for_path(&E_TXT.into()).unwrap().status,
7520 FileStatus::Untracked
7521 );
7522 });
7523
7524 // Now we simulate FS events, but ONLY in the .git folder that's outside
7525 // of out project root.
7526 // Meaning: we don't produce any FS events for files inside the project.
7527 git_add(E_TXT, &repo);
7528 git_commit("Second commit", &repo);
7529 tree.flush_fs_events_in_root_git_repository(cx).await;
7530 cx.executor().run_until_parked();
7531
7532 repository.read_with(cx, |repository, _cx| {
7533 assert_eq!(repository.status_for_path(&C_TXT.into()), None);
7534 assert_eq!(repository.status_for_path(&E_TXT.into()), None);
7535 });
7536}
7537
7538// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
7539#[cfg(any())]
7540#[gpui::test]
7541async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
7542 init_test(cx);
7543 cx.executor().allow_parking();
7544
7545 let root = TempTree::new(json!({
7546 "project": {
7547 "a.txt": "a",
7548 },
7549 }));
7550 let root_path = root.path();
7551
7552 let repo = git_init(&root_path.join("project"));
7553 git_add("a.txt", &repo);
7554 git_commit("init", &repo);
7555
7556 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
7557
7558 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7559 tree.flush_fs_events(cx).await;
7560 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7561 .await;
7562 cx.executor().run_until_parked();
7563
7564 let repository = project.read_with(cx, |project, cx| {
7565 project.repositories(cx).values().next().unwrap().clone()
7566 });
7567
7568 git_branch("other-branch", &repo);
7569 git_checkout("refs/heads/other-branch", &repo);
7570 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
7571 git_add("a.txt", &repo);
7572 git_commit("capitalize", &repo);
7573 let commit = repo
7574 .head()
7575 .expect("Failed to get HEAD")
7576 .peel_to_commit()
7577 .expect("HEAD is not a commit");
7578 git_checkout("refs/heads/main", &repo);
7579 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
7580 git_add("a.txt", &repo);
7581 git_commit("improve letter", &repo);
7582 git_cherry_pick(&commit, &repo);
7583 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
7584 .expect("No CHERRY_PICK_HEAD");
7585 pretty_assertions::assert_eq!(
7586 git_status(&repo),
7587 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
7588 );
7589 tree.flush_fs_events(cx).await;
7590 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7591 .await;
7592 cx.executor().run_until_parked();
7593 let conflicts = repository.update(cx, |repository, _| {
7594 repository
7595 .merge_conflicts
7596 .iter()
7597 .cloned()
7598 .collect::<Vec<_>>()
7599 });
7600 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
7601
7602 git_add("a.txt", &repo);
7603 // Attempt to manually simulate what `git cherry-pick --continue` would do.
7604 git_commit("whatevs", &repo);
7605 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
7606 .expect("Failed to remove CHERRY_PICK_HEAD");
7607 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
7608 tree.flush_fs_events(cx).await;
7609 let conflicts = repository.update(cx, |repository, _| {
7610 repository
7611 .merge_conflicts
7612 .iter()
7613 .cloned()
7614 .collect::<Vec<_>>()
7615 });
7616 pretty_assertions::assert_eq!(conflicts, []);
7617}
7618
7619#[gpui::test]
7620async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
7621 init_test(cx);
7622 let fs = FakeFs::new(cx.background_executor.clone());
7623 fs.insert_tree(
7624 path!("/root"),
7625 json!({
7626 ".git": {},
7627 ".gitignore": "*.txt\n",
7628 "a.xml": "<a></a>",
7629 "b.txt": "Some text"
7630 }),
7631 )
7632 .await;
7633
7634 fs.set_head_and_index_for_repo(
7635 path!("/root/.git").as_ref(),
7636 &[
7637 (".gitignore".into(), "*.txt\n".into()),
7638 ("a.xml".into(), "<a></a>".into()),
7639 ],
7640 );
7641
7642 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
7643
7644 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7645 tree.flush_fs_events(cx).await;
7646 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7647 .await;
7648 cx.executor().run_until_parked();
7649
7650 let repository = project.read_with(cx, |project, cx| {
7651 project.repositories(cx).values().next().unwrap().clone()
7652 });
7653
7654 // One file is unmodified, the other is ignored.
7655 cx.read(|cx| {
7656 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
7657 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
7658 });
7659
7660 // Change the gitignore, and stage the newly non-ignored file.
7661 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
7662 .await
7663 .unwrap();
7664 fs.set_index_for_repo(
7665 Path::new(path!("/root/.git")),
7666 &[
7667 (".gitignore".into(), "*.txt\n".into()),
7668 ("a.xml".into(), "<a></a>".into()),
7669 ("b.txt".into(), "Some text".into()),
7670 ],
7671 );
7672
7673 cx.executor().run_until_parked();
7674 cx.read(|cx| {
7675 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
7676 assert_entry_git_state(
7677 tree.read(cx),
7678 repository.read(cx),
7679 "b.txt",
7680 Some(StatusCode::Added),
7681 false,
7682 );
7683 });
7684}
7685
7686// NOTE:
7687// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
7688// a directory which some program has already open.
7689// This is a limitation of the Windows.
7690// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
7691#[gpui::test]
7692#[cfg_attr(target_os = "windows", ignore)]
7693async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
7694 init_test(cx);
7695 cx.executor().allow_parking();
7696 let root = TempTree::new(json!({
7697 "projects": {
7698 "project1": {
7699 "a": "",
7700 "b": "",
7701 }
7702 },
7703
7704 }));
7705 let root_path = root.path();
7706
7707 let repo = git_init(&root_path.join("projects/project1"));
7708 git_add("a", &repo);
7709 git_commit("init", &repo);
7710 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
7711
7712 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
7713
7714 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7715 tree.flush_fs_events(cx).await;
7716 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7717 .await;
7718 cx.executor().run_until_parked();
7719
7720 let repository = project.read_with(cx, |project, cx| {
7721 project.repositories(cx).values().next().unwrap().clone()
7722 });
7723
7724 repository.read_with(cx, |repository, _| {
7725 assert_eq!(
7726 repository.work_directory_abs_path.as_ref(),
7727 root_path.join("projects/project1").as_path()
7728 );
7729 assert_eq!(
7730 repository
7731 .status_for_path(&"a".into())
7732 .map(|entry| entry.status),
7733 Some(StatusCode::Modified.worktree()),
7734 );
7735 assert_eq!(
7736 repository
7737 .status_for_path(&"b".into())
7738 .map(|entry| entry.status),
7739 Some(FileStatus::Untracked),
7740 );
7741 });
7742
7743 std::fs::rename(
7744 root_path.join("projects/project1"),
7745 root_path.join("projects/project2"),
7746 )
7747 .unwrap();
7748 tree.flush_fs_events(cx).await;
7749
7750 repository.read_with(cx, |repository, _| {
7751 assert_eq!(
7752 repository.work_directory_abs_path.as_ref(),
7753 root_path.join("projects/project2").as_path()
7754 );
7755 assert_eq!(
7756 repository.status_for_path(&"a".into()).unwrap().status,
7757 StatusCode::Modified.worktree(),
7758 );
7759 assert_eq!(
7760 repository.status_for_path(&"b".into()).unwrap().status,
7761 FileStatus::Untracked,
7762 );
7763 });
7764}
7765
7766// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
7767// you can't rename a directory which some program has already open. This is a
7768// limitation of the Windows. See:
7769// https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
7770#[gpui::test]
7771#[cfg_attr(target_os = "windows", ignore)]
7772async fn test_file_status(cx: &mut gpui::TestAppContext) {
7773 init_test(cx);
7774 cx.executor().allow_parking();
7775 const IGNORE_RULE: &str = "**/target";
7776
7777 let root = TempTree::new(json!({
7778 "project": {
7779 "a.txt": "a",
7780 "b.txt": "bb",
7781 "c": {
7782 "d": {
7783 "e.txt": "eee"
7784 }
7785 },
7786 "f.txt": "ffff",
7787 "target": {
7788 "build_file": "???"
7789 },
7790 ".gitignore": IGNORE_RULE
7791 },
7792
7793 }));
7794 let root_path = root.path();
7795
7796 const A_TXT: &str = "a.txt";
7797 const B_TXT: &str = "b.txt";
7798 const E_TXT: &str = "c/d/e.txt";
7799 const F_TXT: &str = "f.txt";
7800 const DOTGITIGNORE: &str = ".gitignore";
7801 const BUILD_FILE: &str = "target/build_file";
7802
7803 // Set up git repository before creating the worktree.
7804 let work_dir = root.path().join("project");
7805 let mut repo = git_init(work_dir.as_path());
7806 repo.add_ignore_rule(IGNORE_RULE).unwrap();
7807 git_add(A_TXT, &repo);
7808 git_add(E_TXT, &repo);
7809 git_add(DOTGITIGNORE, &repo);
7810 git_commit("Initial commit", &repo);
7811
7812 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
7813
7814 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7815 tree.flush_fs_events(cx).await;
7816 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7817 .await;
7818 cx.executor().run_until_parked();
7819
7820 let repository = project.read_with(cx, |project, cx| {
7821 project.repositories(cx).values().next().unwrap().clone()
7822 });
7823
7824 // Check that the right git state is observed on startup
7825 repository.read_with(cx, |repository, _cx| {
7826 assert_eq!(
7827 repository.work_directory_abs_path.as_ref(),
7828 root_path.join("project").as_path()
7829 );
7830
7831 assert_eq!(
7832 repository.status_for_path(&B_TXT.into()).unwrap().status,
7833 FileStatus::Untracked,
7834 );
7835 assert_eq!(
7836 repository.status_for_path(&F_TXT.into()).unwrap().status,
7837 FileStatus::Untracked,
7838 );
7839 });
7840
7841 // Modify a file in the working copy.
7842 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
7843 tree.flush_fs_events(cx).await;
7844 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
7845 .await;
7846 cx.executor().run_until_parked();
7847
7848 // The worktree detects that the file's git status has changed.
7849 repository.read_with(cx, |repository, _| {
7850 assert_eq!(
7851 repository.status_for_path(&A_TXT.into()).unwrap().status,
7852 StatusCode::Modified.worktree(),
7853 );
7854 });
7855
7856 // Create a commit in the git repository.
7857 git_add(A_TXT, &repo);
7858 git_add(B_TXT, &repo);
7859 git_commit("Committing modified and added", &repo);
7860 tree.flush_fs_events(cx).await;
7861 cx.executor().run_until_parked();
7862
7863 // The worktree detects that the files' git status have changed.
7864 repository.read_with(cx, |repository, _cx| {
7865 assert_eq!(
7866 repository.status_for_path(&F_TXT.into()).unwrap().status,
7867 FileStatus::Untracked,
7868 );
7869 assert_eq!(repository.status_for_path(&B_TXT.into()), None);
7870 assert_eq!(repository.status_for_path(&A_TXT.into()), None);
7871 });
7872
7873 // Modify files in the working copy and perform git operations on other files.
7874 git_reset(0, &repo);
7875 git_remove_index(Path::new(B_TXT), &repo);
7876 git_stash(&mut repo);
7877 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
7878 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
7879 tree.flush_fs_events(cx).await;
7880 cx.executor().run_until_parked();
7881
7882 // Check that more complex repo changes are tracked
7883 repository.read_with(cx, |repository, _cx| {
7884 assert_eq!(repository.status_for_path(&A_TXT.into()), None);
7885 assert_eq!(
7886 repository.status_for_path(&B_TXT.into()).unwrap().status,
7887 FileStatus::Untracked,
7888 );
7889 assert_eq!(
7890 repository.status_for_path(&E_TXT.into()).unwrap().status,
7891 StatusCode::Modified.worktree(),
7892 );
7893 });
7894
7895 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
7896 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
7897 std::fs::write(
7898 work_dir.join(DOTGITIGNORE),
7899 [IGNORE_RULE, "f.txt"].join("\n"),
7900 )
7901 .unwrap();
7902
7903 git_add(Path::new(DOTGITIGNORE), &repo);
7904 git_commit("Committing modified git ignore", &repo);
7905
7906 tree.flush_fs_events(cx).await;
7907 cx.executor().run_until_parked();
7908
7909 let mut renamed_dir_name = "first_directory/second_directory";
7910 const RENAMED_FILE: &str = "rf.txt";
7911
7912 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
7913 std::fs::write(
7914 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
7915 "new-contents",
7916 )
7917 .unwrap();
7918
7919 tree.flush_fs_events(cx).await;
7920 cx.executor().run_until_parked();
7921
7922 repository.read_with(cx, |repository, _cx| {
7923 assert_eq!(
7924 repository
7925 .status_for_path(&Path::new(renamed_dir_name).join(RENAMED_FILE).into())
7926 .unwrap()
7927 .status,
7928 FileStatus::Untracked,
7929 );
7930 });
7931
7932 renamed_dir_name = "new_first_directory/second_directory";
7933
7934 std::fs::rename(
7935 work_dir.join("first_directory"),
7936 work_dir.join("new_first_directory"),
7937 )
7938 .unwrap();
7939
7940 tree.flush_fs_events(cx).await;
7941 cx.executor().run_until_parked();
7942
7943 repository.read_with(cx, |repository, _cx| {
7944 assert_eq!(
7945 repository
7946 .status_for_path(&Path::new(renamed_dir_name).join(RENAMED_FILE).into())
7947 .unwrap()
7948 .status,
7949 FileStatus::Untracked,
7950 );
7951 });
7952}
7953
7954#[gpui::test]
7955async fn test_repos_in_invisible_worktrees(
7956 executor: BackgroundExecutor,
7957 cx: &mut gpui::TestAppContext,
7958) {
7959 init_test(cx);
7960 let fs = FakeFs::new(executor);
7961 fs.insert_tree(
7962 path!("/root"),
7963 json!({
7964 "dir1": {
7965 ".git": {},
7966 "dep1": {
7967 ".git": {},
7968 "src": {
7969 "a.txt": "",
7970 },
7971 },
7972 "b.txt": "",
7973 },
7974 }),
7975 )
7976 .await;
7977
7978 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
7979 let visible_worktree =
7980 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7981 visible_worktree
7982 .read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
7983 .await;
7984
7985 let repos = project.read_with(cx, |project, cx| {
7986 project
7987 .repositories(cx)
7988 .values()
7989 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
7990 .collect::<Vec<_>>()
7991 });
7992 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
7993
7994 let (invisible_worktree, _) = project
7995 .update(cx, |project, cx| {
7996 project.worktree_store.update(cx, |worktree_store, cx| {
7997 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
7998 })
7999 })
8000 .await
8001 .expect("failed to create worktree");
8002 invisible_worktree
8003 .read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
8004 .await;
8005
8006 let repos = project.read_with(cx, |project, cx| {
8007 project
8008 .repositories(cx)
8009 .values()
8010 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8011 .collect::<Vec<_>>()
8012 });
8013 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
8014}
8015
8016#[gpui::test(iterations = 10)]
8017async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
8018 init_test(cx);
8019 cx.update(|cx| {
8020 cx.update_global::<SettingsStore, _>(|store, cx| {
8021 store.update_user_settings::<WorktreeSettings>(cx, |project_settings| {
8022 project_settings.file_scan_exclusions = Some(Vec::new());
8023 });
8024 });
8025 });
8026 let fs = FakeFs::new(cx.background_executor.clone());
8027 fs.insert_tree(
8028 path!("/root"),
8029 json!({
8030 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
8031 "tree": {
8032 ".git": {},
8033 ".gitignore": "ignored-dir\n",
8034 "tracked-dir": {
8035 "tracked-file1": "",
8036 "ancestor-ignored-file1": "",
8037 },
8038 "ignored-dir": {
8039 "ignored-file1": ""
8040 }
8041 }
8042 }),
8043 )
8044 .await;
8045 fs.set_head_and_index_for_repo(
8046 path!("/root/tree/.git").as_ref(),
8047 &[
8048 (".gitignore".into(), "ignored-dir\n".into()),
8049 ("tracked-dir/tracked-file1".into(), "".into()),
8050 ],
8051 );
8052
8053 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
8054
8055 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8056 tree.flush_fs_events(cx).await;
8057 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
8058 .await;
8059 cx.executor().run_until_parked();
8060
8061 let repository = project.read_with(cx, |project, cx| {
8062 project.repositories(cx).values().next().unwrap().clone()
8063 });
8064
8065 tree.read_with(cx, |tree, _| {
8066 tree.as_local()
8067 .unwrap()
8068 .manually_refresh_entries_for_paths(vec![Path::new("ignored-dir").into()])
8069 })
8070 .recv()
8071 .await;
8072
8073 cx.read(|cx| {
8074 assert_entry_git_state(
8075 tree.read(cx),
8076 repository.read(cx),
8077 "tracked-dir/tracked-file1",
8078 None,
8079 false,
8080 );
8081 assert_entry_git_state(
8082 tree.read(cx),
8083 repository.read(cx),
8084 "tracked-dir/ancestor-ignored-file1",
8085 None,
8086 false,
8087 );
8088 assert_entry_git_state(
8089 tree.read(cx),
8090 repository.read(cx),
8091 "ignored-dir/ignored-file1",
8092 None,
8093 true,
8094 );
8095 });
8096
8097 fs.create_file(
8098 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
8099 Default::default(),
8100 )
8101 .await
8102 .unwrap();
8103 fs.set_index_for_repo(
8104 path!("/root/tree/.git").as_ref(),
8105 &[
8106 (".gitignore".into(), "ignored-dir\n".into()),
8107 ("tracked-dir/tracked-file1".into(), "".into()),
8108 ("tracked-dir/tracked-file2".into(), "".into()),
8109 ],
8110 );
8111 fs.create_file(
8112 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
8113 Default::default(),
8114 )
8115 .await
8116 .unwrap();
8117 fs.create_file(
8118 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
8119 Default::default(),
8120 )
8121 .await
8122 .unwrap();
8123
8124 cx.executor().run_until_parked();
8125 cx.read(|cx| {
8126 assert_entry_git_state(
8127 tree.read(cx),
8128 repository.read(cx),
8129 "tracked-dir/tracked-file2",
8130 Some(StatusCode::Added),
8131 false,
8132 );
8133 assert_entry_git_state(
8134 tree.read(cx),
8135 repository.read(cx),
8136 "tracked-dir/ancestor-ignored-file2",
8137 None,
8138 false,
8139 );
8140 assert_entry_git_state(
8141 tree.read(cx),
8142 repository.read(cx),
8143 "ignored-dir/ignored-file2",
8144 None,
8145 true,
8146 );
8147 assert!(tree.read(cx).entry_for_path(".git").unwrap().is_ignored);
8148 });
8149}
8150
8151#[gpui::test]
8152async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
8153 init_test(cx);
8154 let fs = FakeFs::new(cx.background_executor.clone());
8155 fs.insert_tree(
8156 path!("/root"),
8157 json!({
8158 "project": {
8159 ".git": {},
8160 "child1": {
8161 "a.txt": "A",
8162 },
8163 "child2": {
8164 "b.txt": "B",
8165 }
8166 }
8167 }),
8168 )
8169 .await;
8170
8171 let project = Project::test(
8172 fs.clone(),
8173 [
8174 path!("/root/project/child1").as_ref(),
8175 path!("/root/project/child2").as_ref(),
8176 ],
8177 cx,
8178 )
8179 .await;
8180
8181 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8182 tree.flush_fs_events(cx).await;
8183 cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
8184 .await;
8185 cx.executor().run_until_parked();
8186
8187 let repos = project.read_with(cx, |project, cx| {
8188 project
8189 .repositories(cx)
8190 .values()
8191 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8192 .collect::<Vec<_>>()
8193 });
8194 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
8195}
8196
8197async fn search(
8198 project: &Entity<Project>,
8199 query: SearchQuery,
8200 cx: &mut gpui::TestAppContext,
8201) -> Result<HashMap<String, Vec<Range<usize>>>> {
8202 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
8203 let mut results = HashMap::default();
8204 while let Ok(search_result) = search_rx.recv().await {
8205 match search_result {
8206 SearchResult::Buffer { buffer, ranges } => {
8207 results.entry(buffer).or_insert(ranges);
8208 }
8209 SearchResult::LimitReached => {}
8210 }
8211 }
8212 Ok(results
8213 .into_iter()
8214 .map(|(buffer, ranges)| {
8215 buffer.update(cx, |buffer, cx| {
8216 let path = buffer
8217 .file()
8218 .unwrap()
8219 .full_path(cx)
8220 .to_string_lossy()
8221 .to_string();
8222 let ranges = ranges
8223 .into_iter()
8224 .map(|range| range.to_offset(buffer))
8225 .collect::<Vec<_>>();
8226 (path, ranges)
8227 })
8228 })
8229 .collect())
8230}
8231
8232pub fn init_test(cx: &mut gpui::TestAppContext) {
8233 if std::env::var("RUST_LOG").is_ok() {
8234 env_logger::try_init().ok();
8235 }
8236
8237 cx.update(|cx| {
8238 let settings_store = SettingsStore::test(cx);
8239 cx.set_global(settings_store);
8240 release_channel::init(SemanticVersion::default(), cx);
8241 language::init(cx);
8242 Project::init_settings(cx);
8243 });
8244}
8245
8246fn json_lang() -> Arc<Language> {
8247 Arc::new(Language::new(
8248 LanguageConfig {
8249 name: "JSON".into(),
8250 matcher: LanguageMatcher {
8251 path_suffixes: vec!["json".to_string()],
8252 ..Default::default()
8253 },
8254 ..Default::default()
8255 },
8256 None,
8257 ))
8258}
8259
8260fn js_lang() -> Arc<Language> {
8261 Arc::new(Language::new(
8262 LanguageConfig {
8263 name: "JavaScript".into(),
8264 matcher: LanguageMatcher {
8265 path_suffixes: vec!["js".to_string()],
8266 ..Default::default()
8267 },
8268 ..Default::default()
8269 },
8270 None,
8271 ))
8272}
8273
8274fn rust_lang() -> Arc<Language> {
8275 Arc::new(Language::new(
8276 LanguageConfig {
8277 name: "Rust".into(),
8278 matcher: LanguageMatcher {
8279 path_suffixes: vec!["rs".to_string()],
8280 ..Default::default()
8281 },
8282 ..Default::default()
8283 },
8284 Some(tree_sitter_rust::LANGUAGE.into()),
8285 ))
8286}
8287
8288fn typescript_lang() -> Arc<Language> {
8289 Arc::new(Language::new(
8290 LanguageConfig {
8291 name: "TypeScript".into(),
8292 matcher: LanguageMatcher {
8293 path_suffixes: vec!["ts".to_string()],
8294 ..Default::default()
8295 },
8296 ..Default::default()
8297 },
8298 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
8299 ))
8300}
8301
8302fn tsx_lang() -> Arc<Language> {
8303 Arc::new(Language::new(
8304 LanguageConfig {
8305 name: "tsx".into(),
8306 matcher: LanguageMatcher {
8307 path_suffixes: vec!["tsx".to_string()],
8308 ..Default::default()
8309 },
8310 ..Default::default()
8311 },
8312 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
8313 ))
8314}
8315
8316fn get_all_tasks(
8317 project: &Entity<Project>,
8318 task_contexts: &TaskContexts,
8319 cx: &mut App,
8320) -> Vec<(TaskSourceKind, ResolvedTask)> {
8321 let (mut old, new) = project.update(cx, |project, cx| {
8322 project
8323 .task_store
8324 .read(cx)
8325 .task_inventory()
8326 .unwrap()
8327 .read(cx)
8328 .used_and_current_resolved_tasks(task_contexts, cx)
8329 });
8330 old.extend(new);
8331 old
8332}
8333
8334#[track_caller]
8335fn assert_entry_git_state(
8336 tree: &Worktree,
8337 repository: &Repository,
8338 path: &str,
8339 index_status: Option<StatusCode>,
8340 is_ignored: bool,
8341) {
8342 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
8343 let entry = tree
8344 .entry_for_path(path)
8345 .unwrap_or_else(|| panic!("entry {path} not found"));
8346 let status = repository
8347 .status_for_path(&path.into())
8348 .map(|entry| entry.status);
8349 let expected = index_status.map(|index_status| {
8350 TrackedStatus {
8351 index_status,
8352 worktree_status: StatusCode::Unmodified,
8353 }
8354 .into()
8355 });
8356 assert_eq!(
8357 status, expected,
8358 "expected {path} to have git status: {expected:?}"
8359 );
8360 assert_eq!(
8361 entry.is_ignored, is_ignored,
8362 "expected {path} to have is_ignored: {is_ignored}"
8363 );
8364}
8365
8366#[track_caller]
8367fn git_init(path: &Path) -> git2::Repository {
8368 let mut init_opts = RepositoryInitOptions::new();
8369 init_opts.initial_head("main");
8370 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
8371}
8372
8373#[track_caller]
8374fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
8375 let path = path.as_ref();
8376 let mut index = repo.index().expect("Failed to get index");
8377 index.add_path(path).expect("Failed to add file");
8378 index.write().expect("Failed to write index");
8379}
8380
8381#[track_caller]
8382fn git_remove_index(path: &Path, repo: &git2::Repository) {
8383 let mut index = repo.index().expect("Failed to get index");
8384 index.remove_path(path).expect("Failed to add file");
8385 index.write().expect("Failed to write index");
8386}
8387
8388#[track_caller]
8389fn git_commit(msg: &'static str, repo: &git2::Repository) {
8390 use git2::Signature;
8391
8392 let signature = Signature::now("test", "test@zed.dev").unwrap();
8393 let oid = repo.index().unwrap().write_tree().unwrap();
8394 let tree = repo.find_tree(oid).unwrap();
8395 if let Ok(head) = repo.head() {
8396 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
8397
8398 let parent_commit = parent_obj.as_commit().unwrap();
8399
8400 repo.commit(
8401 Some("HEAD"),
8402 &signature,
8403 &signature,
8404 msg,
8405 &tree,
8406 &[parent_commit],
8407 )
8408 .expect("Failed to commit with parent");
8409 } else {
8410 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
8411 .expect("Failed to commit");
8412 }
8413}
8414
8415#[cfg(any())]
8416#[track_caller]
8417fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
8418 repo.cherrypick(commit, None).expect("Failed to cherrypick");
8419}
8420
8421#[track_caller]
8422fn git_stash(repo: &mut git2::Repository) {
8423 use git2::Signature;
8424
8425 let signature = Signature::now("test", "test@zed.dev").unwrap();
8426 repo.stash_save(&signature, "N/A", None)
8427 .expect("Failed to stash");
8428}
8429
8430#[track_caller]
8431fn git_reset(offset: usize, repo: &git2::Repository) {
8432 let head = repo.head().expect("Couldn't get repo head");
8433 let object = head.peel(git2::ObjectType::Commit).unwrap();
8434 let commit = object.as_commit().unwrap();
8435 let new_head = commit
8436 .parents()
8437 .inspect(|parnet| {
8438 parnet.message();
8439 })
8440 .nth(offset)
8441 .expect("Not enough history");
8442 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
8443 .expect("Could not reset");
8444}
8445
8446#[cfg(any())]
8447#[track_caller]
8448fn git_branch(name: &str, repo: &git2::Repository) {
8449 let head = repo
8450 .head()
8451 .expect("Couldn't get repo head")
8452 .peel_to_commit()
8453 .expect("HEAD is not a commit");
8454 repo.branch(name, &head, false).expect("Failed to commit");
8455}
8456
8457#[cfg(any())]
8458#[track_caller]
8459fn git_checkout(name: &str, repo: &git2::Repository) {
8460 repo.set_head(name).expect("Failed to set head");
8461 repo.checkout_head(None).expect("Failed to check out head");
8462}
8463
8464#[cfg(any())]
8465#[track_caller]
8466fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
8467 repo.statuses(None)
8468 .unwrap()
8469 .iter()
8470 .map(|status| (status.path().unwrap().to_string(), status.status()))
8471 .collect()
8472}