1use crate::{Event, *};
2use fs::FakeFs;
3use futures::{future, StreamExt};
4use git::diff::assert_hunks;
5use gpui::{AppContext, SemanticVersion, UpdateGlobal};
6use http_client::Url;
7use language::{
8 language_settings::{language_settings, AllLanguageSettings, LanguageSettingsContent},
9 tree_sitter_rust, tree_sitter_typescript, Diagnostic, DiagnosticSet, DiskState, FakeLspAdapter,
10 LanguageConfig, LanguageMatcher, LanguageName, LineEnding, OffsetRangeExt, Point, ToPoint,
11};
12use lsp::{DiagnosticSeverity, NumberOrString};
13use parking_lot::Mutex;
14use pretty_assertions::{assert_eq, assert_matches};
15use serde_json::json;
16#[cfg(not(windows))]
17use std::os;
18
19use std::{mem, num::NonZeroU32, ops::Range, task::Poll};
20use task::{ResolvedTask, TaskContext};
21use unindent::Unindent as _;
22use util::{assert_set_eq, paths::PathMatcher, test::temp_tree, TryFutureExt as _};
23
24#[gpui::test]
25async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
26 cx.executor().allow_parking();
27
28 let (tx, mut rx) = futures::channel::mpsc::unbounded();
29 let _thread = std::thread::spawn(move || {
30 std::fs::metadata("/tmp").unwrap();
31 std::thread::sleep(Duration::from_millis(1000));
32 tx.unbounded_send(1).unwrap();
33 });
34 rx.next().await.unwrap();
35}
36
37#[gpui::test]
38async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
39 cx.executor().allow_parking();
40
41 let io_task = smol::unblock(move || {
42 println!("sleeping on thread {:?}", std::thread::current().id());
43 std::thread::sleep(Duration::from_millis(10));
44 1
45 });
46
47 let task = cx.foreground_executor().spawn(async move {
48 io_task.await;
49 });
50
51 task.await;
52}
53
54#[cfg(not(windows))]
55#[gpui::test]
56async fn test_symlinks(cx: &mut gpui::TestAppContext) {
57 init_test(cx);
58 cx.executor().allow_parking();
59
60 let dir = temp_tree(json!({
61 "root": {
62 "apple": "",
63 "banana": {
64 "carrot": {
65 "date": "",
66 "endive": "",
67 }
68 },
69 "fennel": {
70 "grape": "",
71 }
72 }
73 }));
74
75 let root_link_path = dir.path().join("root_link");
76 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
77 os::unix::fs::symlink(
78 dir.path().join("root/fennel"),
79 dir.path().join("root/finnochio"),
80 )
81 .unwrap();
82
83 let project = Project::test(Arc::new(RealFs::default()), [root_link_path.as_ref()], cx).await;
84
85 project.update(cx, |project, cx| {
86 let tree = project.worktrees(cx).next().unwrap().read(cx);
87 assert_eq!(tree.file_count(), 5);
88 assert_eq!(
89 tree.inode_for_path("fennel/grape"),
90 tree.inode_for_path("finnochio/grape")
91 );
92 });
93}
94
95#[gpui::test]
96async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
97 init_test(cx);
98
99 let dir = temp_tree(json!({
100 ".editorconfig": r#"
101 root = true
102 [*.rs]
103 indent_style = tab
104 indent_size = 3
105 end_of_line = lf
106 insert_final_newline = true
107 trim_trailing_whitespace = true
108 [*.js]
109 tab_width = 10
110 "#,
111 ".zed": {
112 "settings.json": r#"{
113 "tab_size": 8,
114 "hard_tabs": false,
115 "ensure_final_newline_on_save": false,
116 "remove_trailing_whitespace_on_save": false,
117 "soft_wrap": "editor_width"
118 }"#,
119 },
120 "a.rs": "fn a() {\n A\n}",
121 "b": {
122 ".editorconfig": r#"
123 [*.rs]
124 indent_size = 2
125 "#,
126 "b.rs": "fn b() {\n B\n}",
127 },
128 "c.js": "def c\n C\nend",
129 "README.json": "tabs are better\n",
130 }));
131
132 let path = dir.path();
133 let fs = FakeFs::new(cx.executor());
134 fs.insert_tree_from_real_fs(path, path).await;
135 let project = Project::test(fs, [path], cx).await;
136
137 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
138 language_registry.add(js_lang());
139 language_registry.add(json_lang());
140 language_registry.add(rust_lang());
141
142 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
143
144 cx.executor().run_until_parked();
145
146 cx.update(|cx| {
147 let tree = worktree.read(cx);
148 let settings_for = |path: &str| {
149 let file_entry = tree.entry_for_path(path).unwrap().clone();
150 let file = File::for_entry(file_entry, worktree.clone());
151 let file_language = project
152 .read(cx)
153 .languages()
154 .language_for_file_path(file.path.as_ref());
155 let file_language = cx
156 .background_executor()
157 .block(file_language)
158 .expect("Failed to get file language");
159 let file = file as _;
160 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
161 };
162
163 let settings_a = settings_for("a.rs");
164 let settings_b = settings_for("b/b.rs");
165 let settings_c = settings_for("c.js");
166 let settings_readme = settings_for("README.json");
167
168 // .editorconfig overrides .zed/settings
169 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
170 assert_eq!(settings_a.hard_tabs, true);
171 assert_eq!(settings_a.ensure_final_newline_on_save, true);
172 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
173
174 // .editorconfig in b/ overrides .editorconfig in root
175 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
176
177 // "indent_size" is not set, so "tab_width" is used
178 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
179
180 // README.md should not be affected by .editorconfig's globe "*.rs"
181 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
182 });
183}
184
185#[gpui::test]
186async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
187 init_test(cx);
188 TaskStore::init(None);
189
190 let fs = FakeFs::new(cx.executor());
191 fs.insert_tree(
192 "/the-root",
193 json!({
194 ".zed": {
195 "settings.json": r#"{ "tab_size": 8 }"#,
196 "tasks.json": r#"[{
197 "label": "cargo check all",
198 "command": "cargo",
199 "args": ["check", "--all"]
200 },]"#,
201 },
202 "a": {
203 "a.rs": "fn a() {\n A\n}"
204 },
205 "b": {
206 ".zed": {
207 "settings.json": r#"{ "tab_size": 2 }"#,
208 "tasks.json": r#"[{
209 "label": "cargo check",
210 "command": "cargo",
211 "args": ["check"]
212 },]"#,
213 },
214 "b.rs": "fn b() {\n B\n}"
215 }
216 }),
217 )
218 .await;
219
220 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
221 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
222 let task_context = TaskContext::default();
223
224 cx.executor().run_until_parked();
225 let worktree_id = cx.update(|cx| {
226 project.update(cx, |project, cx| {
227 project.worktrees(cx).next().unwrap().read(cx).id()
228 })
229 });
230 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
231 id: worktree_id,
232 directory_in_worktree: PathBuf::from(".zed"),
233 id_base: "local worktree tasks from directory \".zed\"".into(),
234 };
235
236 let all_tasks = cx
237 .update(|cx| {
238 let tree = worktree.read(cx);
239
240 let file_a = File::for_entry(
241 tree.entry_for_path("a/a.rs").unwrap().clone(),
242 worktree.clone(),
243 ) as _;
244 let settings_a = language_settings(None, Some(&file_a), cx);
245 let file_b = File::for_entry(
246 tree.entry_for_path("b/b.rs").unwrap().clone(),
247 worktree.clone(),
248 ) as _;
249 let settings_b = language_settings(None, Some(&file_b), cx);
250
251 assert_eq!(settings_a.tab_size.get(), 8);
252 assert_eq!(settings_b.tab_size.get(), 2);
253
254 get_all_tasks(&project, Some(worktree_id), &task_context, cx)
255 })
256 .into_iter()
257 .map(|(source_kind, task)| {
258 let resolved = task.resolved.unwrap();
259 (
260 source_kind,
261 task.resolved_label,
262 resolved.args,
263 resolved.env,
264 )
265 })
266 .collect::<Vec<_>>();
267 assert_eq!(
268 all_tasks,
269 vec![
270 (
271 TaskSourceKind::Worktree {
272 id: worktree_id,
273 directory_in_worktree: PathBuf::from("b/.zed"),
274 id_base: "local worktree tasks from directory \"b/.zed\"".into(),
275 },
276 "cargo check".to_string(),
277 vec!["check".to_string()],
278 HashMap::default(),
279 ),
280 (
281 topmost_local_task_source_kind.clone(),
282 "cargo check all".to_string(),
283 vec!["check".to_string(), "--all".to_string()],
284 HashMap::default(),
285 ),
286 ]
287 );
288
289 let (_, resolved_task) = cx
290 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
291 .into_iter()
292 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
293 .expect("should have one global task");
294 project.update(cx, |project, cx| {
295 let task_inventory = project
296 .task_store
297 .read(cx)
298 .task_inventory()
299 .cloned()
300 .unwrap();
301 task_inventory.update(cx, |inventory, _| {
302 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
303 inventory
304 .update_file_based_tasks(
305 None,
306 Some(
307 &json!([{
308 "label": "cargo check unstable",
309 "command": "cargo",
310 "args": [
311 "check",
312 "--all",
313 "--all-targets"
314 ],
315 "env": {
316 "RUSTFLAGS": "-Zunstable-options"
317 }
318 }])
319 .to_string(),
320 ),
321 )
322 .unwrap();
323 });
324 });
325 cx.run_until_parked();
326
327 let all_tasks = cx
328 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
329 .into_iter()
330 .map(|(source_kind, task)| {
331 let resolved = task.resolved.unwrap();
332 (
333 source_kind,
334 task.resolved_label,
335 resolved.args,
336 resolved.env,
337 )
338 })
339 .collect::<Vec<_>>();
340 assert_eq!(
341 all_tasks,
342 vec![
343 (
344 topmost_local_task_source_kind.clone(),
345 "cargo check all".to_string(),
346 vec!["check".to_string(), "--all".to_string()],
347 HashMap::default(),
348 ),
349 (
350 TaskSourceKind::Worktree {
351 id: worktree_id,
352 directory_in_worktree: PathBuf::from("b/.zed"),
353 id_base: "local worktree tasks from directory \"b/.zed\"".into(),
354 },
355 "cargo check".to_string(),
356 vec!["check".to_string()],
357 HashMap::default(),
358 ),
359 (
360 TaskSourceKind::AbsPath {
361 abs_path: paths::tasks_file().clone(),
362 id_base: "global tasks.json".into(),
363 },
364 "cargo check unstable".to_string(),
365 vec![
366 "check".to_string(),
367 "--all".to_string(),
368 "--all-targets".to_string(),
369 ],
370 HashMap::from_iter(Some((
371 "RUSTFLAGS".to_string(),
372 "-Zunstable-options".to_string()
373 ))),
374 ),
375 ]
376 );
377}
378
379#[gpui::test]
380async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
381 init_test(cx);
382
383 let fs = FakeFs::new(cx.executor());
384 fs.insert_tree(
385 "/the-root",
386 json!({
387 "test.rs": "const A: i32 = 1;",
388 "test2.rs": "",
389 "Cargo.toml": "a = 1",
390 "package.json": "{\"a\": 1}",
391 }),
392 )
393 .await;
394
395 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
396 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
397
398 let mut fake_rust_servers = language_registry.register_fake_lsp(
399 "Rust",
400 FakeLspAdapter {
401 name: "the-rust-language-server",
402 capabilities: lsp::ServerCapabilities {
403 completion_provider: Some(lsp::CompletionOptions {
404 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
405 ..Default::default()
406 }),
407 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
408 lsp::TextDocumentSyncOptions {
409 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
410 ..Default::default()
411 },
412 )),
413 ..Default::default()
414 },
415 ..Default::default()
416 },
417 );
418 let mut fake_json_servers = language_registry.register_fake_lsp(
419 "JSON",
420 FakeLspAdapter {
421 name: "the-json-language-server",
422 capabilities: lsp::ServerCapabilities {
423 completion_provider: Some(lsp::CompletionOptions {
424 trigger_characters: Some(vec![":".to_string()]),
425 ..Default::default()
426 }),
427 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
428 lsp::TextDocumentSyncOptions {
429 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
430 ..Default::default()
431 },
432 )),
433 ..Default::default()
434 },
435 ..Default::default()
436 },
437 );
438
439 // Open a buffer without an associated language server.
440 let toml_buffer = project
441 .update(cx, |project, cx| {
442 project.open_local_buffer("/the-root/Cargo.toml", cx)
443 })
444 .await
445 .unwrap();
446
447 // Open a buffer with an associated language server before the language for it has been loaded.
448 let rust_buffer = project
449 .update(cx, |project, cx| {
450 project.open_local_buffer("/the-root/test.rs", cx)
451 })
452 .await
453 .unwrap();
454 rust_buffer.update(cx, |buffer, _| {
455 assert_eq!(buffer.language().map(|l| l.name()), None);
456 });
457
458 // Now we add the languages to the project, and ensure they get assigned to all
459 // the relevant open buffers.
460 language_registry.add(json_lang());
461 language_registry.add(rust_lang());
462 cx.executor().run_until_parked();
463 rust_buffer.update(cx, |buffer, _| {
464 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
465 });
466
467 // A server is started up, and it is notified about Rust files.
468 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
469 assert_eq!(
470 fake_rust_server
471 .receive_notification::<lsp::notification::DidOpenTextDocument>()
472 .await
473 .text_document,
474 lsp::TextDocumentItem {
475 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
476 version: 0,
477 text: "const A: i32 = 1;".to_string(),
478 language_id: "rust".to_string(),
479 }
480 );
481
482 // The buffer is configured based on the language server's capabilities.
483 rust_buffer.update(cx, |buffer, _| {
484 assert_eq!(
485 buffer
486 .completion_triggers()
487 .into_iter()
488 .cloned()
489 .collect::<Vec<_>>(),
490 &[".".to_string(), "::".to_string()]
491 );
492 });
493 toml_buffer.update(cx, |buffer, _| {
494 assert!(buffer.completion_triggers().is_empty());
495 });
496
497 // Edit a buffer. The changes are reported to the language server.
498 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
499 assert_eq!(
500 fake_rust_server
501 .receive_notification::<lsp::notification::DidChangeTextDocument>()
502 .await
503 .text_document,
504 lsp::VersionedTextDocumentIdentifier::new(
505 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
506 1
507 )
508 );
509
510 // Open a third buffer with a different associated language server.
511 let json_buffer = project
512 .update(cx, |project, cx| {
513 project.open_local_buffer("/the-root/package.json", cx)
514 })
515 .await
516 .unwrap();
517
518 // A json language server is started up and is only notified about the json buffer.
519 let mut fake_json_server = fake_json_servers.next().await.unwrap();
520 assert_eq!(
521 fake_json_server
522 .receive_notification::<lsp::notification::DidOpenTextDocument>()
523 .await
524 .text_document,
525 lsp::TextDocumentItem {
526 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
527 version: 0,
528 text: "{\"a\": 1}".to_string(),
529 language_id: "json".to_string(),
530 }
531 );
532
533 // This buffer is configured based on the second language server's
534 // capabilities.
535 json_buffer.update(cx, |buffer, _| {
536 assert_eq!(
537 buffer
538 .completion_triggers()
539 .into_iter()
540 .cloned()
541 .collect::<Vec<_>>(),
542 &[":".to_string()]
543 );
544 });
545
546 // When opening another buffer whose language server is already running,
547 // it is also configured based on the existing language server's capabilities.
548 let rust_buffer2 = project
549 .update(cx, |project, cx| {
550 project.open_local_buffer("/the-root/test2.rs", cx)
551 })
552 .await
553 .unwrap();
554 rust_buffer2.update(cx, |buffer, _| {
555 assert_eq!(
556 buffer
557 .completion_triggers()
558 .into_iter()
559 .cloned()
560 .collect::<Vec<_>>(),
561 &[".".to_string(), "::".to_string()]
562 );
563 });
564
565 // Changes are reported only to servers matching the buffer's language.
566 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
567 rust_buffer2.update(cx, |buffer, cx| {
568 buffer.edit([(0..0, "let x = 1;")], None, cx)
569 });
570 assert_eq!(
571 fake_rust_server
572 .receive_notification::<lsp::notification::DidChangeTextDocument>()
573 .await
574 .text_document,
575 lsp::VersionedTextDocumentIdentifier::new(
576 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
577 1
578 )
579 );
580
581 // Save notifications are reported to all servers.
582 project
583 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
584 .await
585 .unwrap();
586 assert_eq!(
587 fake_rust_server
588 .receive_notification::<lsp::notification::DidSaveTextDocument>()
589 .await
590 .text_document,
591 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
592 );
593 assert_eq!(
594 fake_json_server
595 .receive_notification::<lsp::notification::DidSaveTextDocument>()
596 .await
597 .text_document,
598 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
599 );
600
601 // Renames are reported only to servers matching the buffer's language.
602 fs.rename(
603 Path::new("/the-root/test2.rs"),
604 Path::new("/the-root/test3.rs"),
605 Default::default(),
606 )
607 .await
608 .unwrap();
609 assert_eq!(
610 fake_rust_server
611 .receive_notification::<lsp::notification::DidCloseTextDocument>()
612 .await
613 .text_document,
614 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
615 );
616 assert_eq!(
617 fake_rust_server
618 .receive_notification::<lsp::notification::DidOpenTextDocument>()
619 .await
620 .text_document,
621 lsp::TextDocumentItem {
622 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
623 version: 0,
624 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
625 language_id: "rust".to_string(),
626 },
627 );
628
629 rust_buffer2.update(cx, |buffer, cx| {
630 buffer.update_diagnostics(
631 LanguageServerId(0),
632 DiagnosticSet::from_sorted_entries(
633 vec![DiagnosticEntry {
634 diagnostic: Default::default(),
635 range: Anchor::MIN..Anchor::MAX,
636 }],
637 &buffer.snapshot(),
638 ),
639 cx,
640 );
641 assert_eq!(
642 buffer
643 .snapshot()
644 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
645 .count(),
646 1
647 );
648 });
649
650 // When the rename changes the extension of the file, the buffer gets closed on the old
651 // language server and gets opened on the new one.
652 fs.rename(
653 Path::new("/the-root/test3.rs"),
654 Path::new("/the-root/test3.json"),
655 Default::default(),
656 )
657 .await
658 .unwrap();
659 assert_eq!(
660 fake_rust_server
661 .receive_notification::<lsp::notification::DidCloseTextDocument>()
662 .await
663 .text_document,
664 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
665 );
666 assert_eq!(
667 fake_json_server
668 .receive_notification::<lsp::notification::DidOpenTextDocument>()
669 .await
670 .text_document,
671 lsp::TextDocumentItem {
672 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
673 version: 0,
674 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
675 language_id: "json".to_string(),
676 },
677 );
678
679 // We clear the diagnostics, since the language has changed.
680 rust_buffer2.update(cx, |buffer, _| {
681 assert_eq!(
682 buffer
683 .snapshot()
684 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
685 .count(),
686 0
687 );
688 });
689
690 // The renamed file's version resets after changing language server.
691 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
692 assert_eq!(
693 fake_json_server
694 .receive_notification::<lsp::notification::DidChangeTextDocument>()
695 .await
696 .text_document,
697 lsp::VersionedTextDocumentIdentifier::new(
698 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
699 1
700 )
701 );
702
703 // Restart language servers
704 project.update(cx, |project, cx| {
705 project.restart_language_servers_for_buffers(
706 vec![rust_buffer.clone(), json_buffer.clone()],
707 cx,
708 );
709 });
710
711 let mut rust_shutdown_requests = fake_rust_server
712 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
713 let mut json_shutdown_requests = fake_json_server
714 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
715 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
716
717 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
718 let mut fake_json_server = fake_json_servers.next().await.unwrap();
719
720 // Ensure rust document is reopened in new rust language server
721 assert_eq!(
722 fake_rust_server
723 .receive_notification::<lsp::notification::DidOpenTextDocument>()
724 .await
725 .text_document,
726 lsp::TextDocumentItem {
727 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
728 version: 0,
729 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
730 language_id: "rust".to_string(),
731 }
732 );
733
734 // Ensure json documents are reopened in new json language server
735 assert_set_eq!(
736 [
737 fake_json_server
738 .receive_notification::<lsp::notification::DidOpenTextDocument>()
739 .await
740 .text_document,
741 fake_json_server
742 .receive_notification::<lsp::notification::DidOpenTextDocument>()
743 .await
744 .text_document,
745 ],
746 [
747 lsp::TextDocumentItem {
748 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
749 version: 0,
750 text: json_buffer.update(cx, |buffer, _| buffer.text()),
751 language_id: "json".to_string(),
752 },
753 lsp::TextDocumentItem {
754 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
755 version: 0,
756 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
757 language_id: "json".to_string(),
758 }
759 ]
760 );
761
762 // Close notifications are reported only to servers matching the buffer's language.
763 cx.update(|_| drop(json_buffer));
764 let close_message = lsp::DidCloseTextDocumentParams {
765 text_document: lsp::TextDocumentIdentifier::new(
766 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
767 ),
768 };
769 assert_eq!(
770 fake_json_server
771 .receive_notification::<lsp::notification::DidCloseTextDocument>()
772 .await,
773 close_message,
774 );
775}
776
777#[gpui::test]
778async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
779 init_test(cx);
780
781 let fs = FakeFs::new(cx.executor());
782 fs.insert_tree(
783 "/the-root",
784 json!({
785 ".gitignore": "target\n",
786 "src": {
787 "a.rs": "",
788 "b.rs": "",
789 },
790 "target": {
791 "x": {
792 "out": {
793 "x.rs": ""
794 }
795 },
796 "y": {
797 "out": {
798 "y.rs": "",
799 }
800 },
801 "z": {
802 "out": {
803 "z.rs": ""
804 }
805 }
806 }
807 }),
808 )
809 .await;
810
811 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
812 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
813 language_registry.add(rust_lang());
814 let mut fake_servers = language_registry.register_fake_lsp(
815 "Rust",
816 FakeLspAdapter {
817 name: "the-language-server",
818 ..Default::default()
819 },
820 );
821
822 cx.executor().run_until_parked();
823
824 // Start the language server by opening a buffer with a compatible file extension.
825 let _buffer = project
826 .update(cx, |project, cx| {
827 project.open_local_buffer("/the-root/src/a.rs", cx)
828 })
829 .await
830 .unwrap();
831
832 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
833 project.update(cx, |project, cx| {
834 let worktree = project.worktrees(cx).next().unwrap();
835 assert_eq!(
836 worktree
837 .read(cx)
838 .snapshot()
839 .entries(true, 0)
840 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
841 .collect::<Vec<_>>(),
842 &[
843 (Path::new(""), false),
844 (Path::new(".gitignore"), false),
845 (Path::new("src"), false),
846 (Path::new("src/a.rs"), false),
847 (Path::new("src/b.rs"), false),
848 (Path::new("target"), true),
849 ]
850 );
851 });
852
853 let prev_read_dir_count = fs.read_dir_call_count();
854
855 // Keep track of the FS events reported to the language server.
856 let fake_server = fake_servers.next().await.unwrap();
857 let file_changes = Arc::new(Mutex::new(Vec::new()));
858 fake_server
859 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
860 registrations: vec![lsp::Registration {
861 id: Default::default(),
862 method: "workspace/didChangeWatchedFiles".to_string(),
863 register_options: serde_json::to_value(
864 lsp::DidChangeWatchedFilesRegistrationOptions {
865 watchers: vec![
866 lsp::FileSystemWatcher {
867 glob_pattern: lsp::GlobPattern::String(
868 "/the-root/Cargo.toml".to_string(),
869 ),
870 kind: None,
871 },
872 lsp::FileSystemWatcher {
873 glob_pattern: lsp::GlobPattern::String(
874 "/the-root/src/*.{rs,c}".to_string(),
875 ),
876 kind: None,
877 },
878 lsp::FileSystemWatcher {
879 glob_pattern: lsp::GlobPattern::String(
880 "/the-root/target/y/**/*.rs".to_string(),
881 ),
882 kind: None,
883 },
884 ],
885 },
886 )
887 .ok(),
888 }],
889 })
890 .await
891 .unwrap();
892 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
893 let file_changes = file_changes.clone();
894 move |params, _| {
895 let mut file_changes = file_changes.lock();
896 file_changes.extend(params.changes);
897 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
898 }
899 });
900
901 cx.executor().run_until_parked();
902 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
903 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
904
905 // Now the language server has asked us to watch an ignored directory path,
906 // so we recursively load it.
907 project.update(cx, |project, cx| {
908 let worktree = project.worktrees(cx).next().unwrap();
909 assert_eq!(
910 worktree
911 .read(cx)
912 .snapshot()
913 .entries(true, 0)
914 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
915 .collect::<Vec<_>>(),
916 &[
917 (Path::new(""), false),
918 (Path::new(".gitignore"), false),
919 (Path::new("src"), false),
920 (Path::new("src/a.rs"), false),
921 (Path::new("src/b.rs"), false),
922 (Path::new("target"), true),
923 (Path::new("target/x"), true),
924 (Path::new("target/y"), true),
925 (Path::new("target/y/out"), true),
926 (Path::new("target/y/out/y.rs"), true),
927 (Path::new("target/z"), true),
928 ]
929 );
930 });
931
932 // Perform some file system mutations, two of which match the watched patterns,
933 // and one of which does not.
934 fs.create_file("/the-root/src/c.rs".as_ref(), Default::default())
935 .await
936 .unwrap();
937 fs.create_file("/the-root/src/d.txt".as_ref(), Default::default())
938 .await
939 .unwrap();
940 fs.remove_file("/the-root/src/b.rs".as_ref(), Default::default())
941 .await
942 .unwrap();
943 fs.create_file("/the-root/target/x/out/x2.rs".as_ref(), Default::default())
944 .await
945 .unwrap();
946 fs.create_file("/the-root/target/y/out/y2.rs".as_ref(), Default::default())
947 .await
948 .unwrap();
949
950 // The language server receives events for the FS mutations that match its watch patterns.
951 cx.executor().run_until_parked();
952 assert_eq!(
953 &*file_changes.lock(),
954 &[
955 lsp::FileEvent {
956 uri: lsp::Url::from_file_path("/the-root/src/b.rs").unwrap(),
957 typ: lsp::FileChangeType::DELETED,
958 },
959 lsp::FileEvent {
960 uri: lsp::Url::from_file_path("/the-root/src/c.rs").unwrap(),
961 typ: lsp::FileChangeType::CREATED,
962 },
963 lsp::FileEvent {
964 uri: lsp::Url::from_file_path("/the-root/target/y/out/y2.rs").unwrap(),
965 typ: lsp::FileChangeType::CREATED,
966 },
967 ]
968 );
969}
970
971#[gpui::test]
972async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
973 init_test(cx);
974
975 let fs = FakeFs::new(cx.executor());
976 fs.insert_tree(
977 "/dir",
978 json!({
979 "a.rs": "let a = 1;",
980 "b.rs": "let b = 2;"
981 }),
982 )
983 .await;
984
985 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
986
987 let buffer_a = project
988 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
989 .await
990 .unwrap();
991 let buffer_b = project
992 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
993 .await
994 .unwrap();
995
996 project.update(cx, |project, cx| {
997 project
998 .update_diagnostics(
999 LanguageServerId(0),
1000 lsp::PublishDiagnosticsParams {
1001 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1002 version: None,
1003 diagnostics: vec![lsp::Diagnostic {
1004 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1005 severity: Some(lsp::DiagnosticSeverity::ERROR),
1006 message: "error 1".to_string(),
1007 ..Default::default()
1008 }],
1009 },
1010 &[],
1011 cx,
1012 )
1013 .unwrap();
1014 project
1015 .update_diagnostics(
1016 LanguageServerId(0),
1017 lsp::PublishDiagnosticsParams {
1018 uri: Url::from_file_path("/dir/b.rs").unwrap(),
1019 version: None,
1020 diagnostics: vec![lsp::Diagnostic {
1021 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1022 severity: Some(DiagnosticSeverity::WARNING),
1023 message: "error 2".to_string(),
1024 ..Default::default()
1025 }],
1026 },
1027 &[],
1028 cx,
1029 )
1030 .unwrap();
1031 });
1032
1033 buffer_a.update(cx, |buffer, _| {
1034 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1035 assert_eq!(
1036 chunks
1037 .iter()
1038 .map(|(s, d)| (s.as_str(), *d))
1039 .collect::<Vec<_>>(),
1040 &[
1041 ("let ", None),
1042 ("a", Some(DiagnosticSeverity::ERROR)),
1043 (" = 1;", None),
1044 ]
1045 );
1046 });
1047 buffer_b.update(cx, |buffer, _| {
1048 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1049 assert_eq!(
1050 chunks
1051 .iter()
1052 .map(|(s, d)| (s.as_str(), *d))
1053 .collect::<Vec<_>>(),
1054 &[
1055 ("let ", None),
1056 ("b", Some(DiagnosticSeverity::WARNING)),
1057 (" = 2;", None),
1058 ]
1059 );
1060 });
1061}
1062
1063#[gpui::test]
1064async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1065 init_test(cx);
1066
1067 let fs = FakeFs::new(cx.executor());
1068 fs.insert_tree(
1069 "/root",
1070 json!({
1071 "dir": {
1072 ".git": {
1073 "HEAD": "ref: refs/heads/main",
1074 },
1075 ".gitignore": "b.rs",
1076 "a.rs": "let a = 1;",
1077 "b.rs": "let b = 2;",
1078 },
1079 "other.rs": "let b = c;"
1080 }),
1081 )
1082 .await;
1083
1084 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
1085 let (worktree, _) = project
1086 .update(cx, |project, cx| {
1087 project.find_or_create_worktree("/root/dir", true, cx)
1088 })
1089 .await
1090 .unwrap();
1091 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1092
1093 let (worktree, _) = project
1094 .update(cx, |project, cx| {
1095 project.find_or_create_worktree("/root/other.rs", false, cx)
1096 })
1097 .await
1098 .unwrap();
1099 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1100
1101 let server_id = LanguageServerId(0);
1102 project.update(cx, |project, cx| {
1103 project
1104 .update_diagnostics(
1105 server_id,
1106 lsp::PublishDiagnosticsParams {
1107 uri: Url::from_file_path("/root/dir/b.rs").unwrap(),
1108 version: None,
1109 diagnostics: vec![lsp::Diagnostic {
1110 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1111 severity: Some(lsp::DiagnosticSeverity::ERROR),
1112 message: "unused variable 'b'".to_string(),
1113 ..Default::default()
1114 }],
1115 },
1116 &[],
1117 cx,
1118 )
1119 .unwrap();
1120 project
1121 .update_diagnostics(
1122 server_id,
1123 lsp::PublishDiagnosticsParams {
1124 uri: Url::from_file_path("/root/other.rs").unwrap(),
1125 version: None,
1126 diagnostics: vec![lsp::Diagnostic {
1127 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1128 severity: Some(lsp::DiagnosticSeverity::ERROR),
1129 message: "unknown variable 'c'".to_string(),
1130 ..Default::default()
1131 }],
1132 },
1133 &[],
1134 cx,
1135 )
1136 .unwrap();
1137 });
1138
1139 let main_ignored_buffer = project
1140 .update(cx, |project, cx| {
1141 project.open_buffer((main_worktree_id, "b.rs"), cx)
1142 })
1143 .await
1144 .unwrap();
1145 main_ignored_buffer.update(cx, |buffer, _| {
1146 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1147 assert_eq!(
1148 chunks
1149 .iter()
1150 .map(|(s, d)| (s.as_str(), *d))
1151 .collect::<Vec<_>>(),
1152 &[
1153 ("let ", None),
1154 ("b", Some(DiagnosticSeverity::ERROR)),
1155 (" = 2;", None),
1156 ],
1157 "Gigitnored buffers should still get in-buffer diagnostics",
1158 );
1159 });
1160 let other_buffer = project
1161 .update(cx, |project, cx| {
1162 project.open_buffer((other_worktree_id, ""), cx)
1163 })
1164 .await
1165 .unwrap();
1166 other_buffer.update(cx, |buffer, _| {
1167 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1168 assert_eq!(
1169 chunks
1170 .iter()
1171 .map(|(s, d)| (s.as_str(), *d))
1172 .collect::<Vec<_>>(),
1173 &[
1174 ("let b = ", None),
1175 ("c", Some(DiagnosticSeverity::ERROR)),
1176 (";", None),
1177 ],
1178 "Buffers from hidden projects should still get in-buffer diagnostics"
1179 );
1180 });
1181
1182 project.update(cx, |project, cx| {
1183 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1184 assert_eq!(
1185 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1186 vec![(
1187 ProjectPath {
1188 worktree_id: main_worktree_id,
1189 path: Arc::from(Path::new("b.rs")),
1190 },
1191 server_id,
1192 DiagnosticSummary {
1193 error_count: 1,
1194 warning_count: 0,
1195 }
1196 )]
1197 );
1198 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1199 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1200 });
1201}
1202
1203#[gpui::test]
1204async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1205 init_test(cx);
1206
1207 let progress_token = "the-progress-token";
1208
1209 let fs = FakeFs::new(cx.executor());
1210 fs.insert_tree(
1211 "/dir",
1212 json!({
1213 "a.rs": "fn a() { A }",
1214 "b.rs": "const y: i32 = 1",
1215 }),
1216 )
1217 .await;
1218
1219 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1220 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1221
1222 language_registry.add(rust_lang());
1223 let mut fake_servers = language_registry.register_fake_lsp(
1224 "Rust",
1225 FakeLspAdapter {
1226 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1227 disk_based_diagnostics_sources: vec!["disk".into()],
1228 ..Default::default()
1229 },
1230 );
1231
1232 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1233
1234 // Cause worktree to start the fake language server
1235 let _buffer = project
1236 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1237 .await
1238 .unwrap();
1239
1240 let mut events = cx.events(&project);
1241
1242 let fake_server = fake_servers.next().await.unwrap();
1243 assert_eq!(
1244 events.next().await.unwrap(),
1245 Event::LanguageServerAdded(
1246 LanguageServerId(0),
1247 fake_server.server.name(),
1248 Some(worktree_id)
1249 ),
1250 );
1251
1252 fake_server
1253 .start_progress(format!("{}/0", progress_token))
1254 .await;
1255 assert_eq!(
1256 events.next().await.unwrap(),
1257 Event::DiskBasedDiagnosticsStarted {
1258 language_server_id: LanguageServerId(0),
1259 }
1260 );
1261
1262 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1263 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1264 version: None,
1265 diagnostics: vec![lsp::Diagnostic {
1266 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1267 severity: Some(lsp::DiagnosticSeverity::ERROR),
1268 message: "undefined variable 'A'".to_string(),
1269 ..Default::default()
1270 }],
1271 });
1272 assert_eq!(
1273 events.next().await.unwrap(),
1274 Event::DiagnosticsUpdated {
1275 language_server_id: LanguageServerId(0),
1276 path: (worktree_id, Path::new("a.rs")).into()
1277 }
1278 );
1279
1280 fake_server.end_progress(format!("{}/0", progress_token));
1281 assert_eq!(
1282 events.next().await.unwrap(),
1283 Event::DiskBasedDiagnosticsFinished {
1284 language_server_id: LanguageServerId(0)
1285 }
1286 );
1287
1288 let buffer = project
1289 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
1290 .await
1291 .unwrap();
1292
1293 buffer.update(cx, |buffer, _| {
1294 let snapshot = buffer.snapshot();
1295 let diagnostics = snapshot
1296 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1297 .collect::<Vec<_>>();
1298 assert_eq!(
1299 diagnostics,
1300 &[DiagnosticEntry {
1301 range: Point::new(0, 9)..Point::new(0, 10),
1302 diagnostic: Diagnostic {
1303 severity: lsp::DiagnosticSeverity::ERROR,
1304 message: "undefined variable 'A'".to_string(),
1305 group_id: 0,
1306 is_primary: true,
1307 ..Default::default()
1308 }
1309 }]
1310 )
1311 });
1312
1313 // Ensure publishing empty diagnostics twice only results in one update event.
1314 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1315 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1316 version: None,
1317 diagnostics: Default::default(),
1318 });
1319 assert_eq!(
1320 events.next().await.unwrap(),
1321 Event::DiagnosticsUpdated {
1322 language_server_id: LanguageServerId(0),
1323 path: (worktree_id, Path::new("a.rs")).into()
1324 }
1325 );
1326
1327 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1328 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1329 version: None,
1330 diagnostics: Default::default(),
1331 });
1332 cx.executor().run_until_parked();
1333 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1334}
1335
1336#[gpui::test]
1337async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1338 init_test(cx);
1339
1340 let progress_token = "the-progress-token";
1341
1342 let fs = FakeFs::new(cx.executor());
1343 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1344
1345 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1346
1347 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1348 language_registry.add(rust_lang());
1349 let mut fake_servers = language_registry.register_fake_lsp(
1350 "Rust",
1351 FakeLspAdapter {
1352 name: "the-language-server",
1353 disk_based_diagnostics_sources: vec!["disk".into()],
1354 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1355 ..Default::default()
1356 },
1357 );
1358
1359 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1360
1361 let buffer = project
1362 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1363 .await
1364 .unwrap();
1365
1366 // Simulate diagnostics starting to update.
1367 let fake_server = fake_servers.next().await.unwrap();
1368 fake_server.start_progress(progress_token).await;
1369
1370 // Restart the server before the diagnostics finish updating.
1371 project.update(cx, |project, cx| {
1372 project.restart_language_servers_for_buffers([buffer], cx);
1373 });
1374 let mut events = cx.events(&project);
1375
1376 // Simulate the newly started server sending more diagnostics.
1377 let fake_server = fake_servers.next().await.unwrap();
1378 assert_eq!(
1379 events.next().await.unwrap(),
1380 Event::LanguageServerAdded(
1381 LanguageServerId(1),
1382 fake_server.server.name(),
1383 Some(worktree_id)
1384 )
1385 );
1386 fake_server.start_progress(progress_token).await;
1387 assert_eq!(
1388 events.next().await.unwrap(),
1389 Event::DiskBasedDiagnosticsStarted {
1390 language_server_id: LanguageServerId(1)
1391 }
1392 );
1393 project.update(cx, |project, cx| {
1394 assert_eq!(
1395 project
1396 .language_servers_running_disk_based_diagnostics(cx)
1397 .collect::<Vec<_>>(),
1398 [LanguageServerId(1)]
1399 );
1400 });
1401
1402 // All diagnostics are considered done, despite the old server's diagnostic
1403 // task never completing.
1404 fake_server.end_progress(progress_token);
1405 assert_eq!(
1406 events.next().await.unwrap(),
1407 Event::DiskBasedDiagnosticsFinished {
1408 language_server_id: LanguageServerId(1)
1409 }
1410 );
1411 project.update(cx, |project, cx| {
1412 assert_eq!(
1413 project
1414 .language_servers_running_disk_based_diagnostics(cx)
1415 .collect::<Vec<_>>(),
1416 [] as [language::LanguageServerId; 0]
1417 );
1418 });
1419}
1420
1421#[gpui::test]
1422async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1423 init_test(cx);
1424
1425 let fs = FakeFs::new(cx.executor());
1426 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1427
1428 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1429
1430 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1431 language_registry.add(rust_lang());
1432 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1433
1434 let buffer = project
1435 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1436 .await
1437 .unwrap();
1438
1439 // Publish diagnostics
1440 let fake_server = fake_servers.next().await.unwrap();
1441 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1442 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1443 version: None,
1444 diagnostics: vec![lsp::Diagnostic {
1445 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1446 severity: Some(lsp::DiagnosticSeverity::ERROR),
1447 message: "the message".to_string(),
1448 ..Default::default()
1449 }],
1450 });
1451
1452 cx.executor().run_until_parked();
1453 buffer.update(cx, |buffer, _| {
1454 assert_eq!(
1455 buffer
1456 .snapshot()
1457 .diagnostics_in_range::<_, usize>(0..1, false)
1458 .map(|entry| entry.diagnostic.message.clone())
1459 .collect::<Vec<_>>(),
1460 ["the message".to_string()]
1461 );
1462 });
1463 project.update(cx, |project, cx| {
1464 assert_eq!(
1465 project.diagnostic_summary(false, cx),
1466 DiagnosticSummary {
1467 error_count: 1,
1468 warning_count: 0,
1469 }
1470 );
1471 });
1472
1473 project.update(cx, |project, cx| {
1474 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1475 });
1476
1477 // The diagnostics are cleared.
1478 cx.executor().run_until_parked();
1479 buffer.update(cx, |buffer, _| {
1480 assert_eq!(
1481 buffer
1482 .snapshot()
1483 .diagnostics_in_range::<_, usize>(0..1, false)
1484 .map(|entry| entry.diagnostic.message.clone())
1485 .collect::<Vec<_>>(),
1486 Vec::<String>::new(),
1487 );
1488 });
1489 project.update(cx, |project, cx| {
1490 assert_eq!(
1491 project.diagnostic_summary(false, cx),
1492 DiagnosticSummary {
1493 error_count: 0,
1494 warning_count: 0,
1495 }
1496 );
1497 });
1498}
1499
1500#[gpui::test]
1501async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1502 init_test(cx);
1503
1504 let fs = FakeFs::new(cx.executor());
1505 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1506
1507 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1508 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1509
1510 language_registry.add(rust_lang());
1511 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1512
1513 let buffer = project
1514 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1515 .await
1516 .unwrap();
1517
1518 // Before restarting the server, report diagnostics with an unknown buffer version.
1519 let fake_server = fake_servers.next().await.unwrap();
1520 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1521 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1522 version: Some(10000),
1523 diagnostics: Vec::new(),
1524 });
1525 cx.executor().run_until_parked();
1526
1527 project.update(cx, |project, cx| {
1528 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1529 });
1530 let mut fake_server = fake_servers.next().await.unwrap();
1531 let notification = fake_server
1532 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1533 .await
1534 .text_document;
1535 assert_eq!(notification.version, 0);
1536}
1537
1538#[gpui::test]
1539async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1540 init_test(cx);
1541
1542 let progress_token = "the-progress-token";
1543
1544 let fs = FakeFs::new(cx.executor());
1545 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1546
1547 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1548
1549 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1550 language_registry.add(rust_lang());
1551 let mut fake_servers = language_registry.register_fake_lsp(
1552 "Rust",
1553 FakeLspAdapter {
1554 name: "the-language-server",
1555 disk_based_diagnostics_sources: vec!["disk".into()],
1556 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1557 ..Default::default()
1558 },
1559 );
1560
1561 let buffer = project
1562 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1563 .await
1564 .unwrap();
1565
1566 // Simulate diagnostics starting to update.
1567 let mut fake_server = fake_servers.next().await.unwrap();
1568 fake_server
1569 .start_progress_with(
1570 "another-token",
1571 lsp::WorkDoneProgressBegin {
1572 cancellable: Some(false),
1573 ..Default::default()
1574 },
1575 )
1576 .await;
1577 fake_server
1578 .start_progress_with(
1579 progress_token,
1580 lsp::WorkDoneProgressBegin {
1581 cancellable: Some(true),
1582 ..Default::default()
1583 },
1584 )
1585 .await;
1586 cx.executor().run_until_parked();
1587
1588 project.update(cx, |project, cx| {
1589 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1590 });
1591
1592 let cancel_notification = fake_server
1593 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1594 .await;
1595 assert_eq!(
1596 cancel_notification.token,
1597 NumberOrString::String(progress_token.into())
1598 );
1599}
1600
1601#[gpui::test]
1602async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1603 init_test(cx);
1604
1605 let fs = FakeFs::new(cx.executor());
1606 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1607 .await;
1608
1609 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1610 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1611
1612 let mut fake_rust_servers = language_registry.register_fake_lsp(
1613 "Rust",
1614 FakeLspAdapter {
1615 name: "rust-lsp",
1616 ..Default::default()
1617 },
1618 );
1619 let mut fake_js_servers = language_registry.register_fake_lsp(
1620 "JavaScript",
1621 FakeLspAdapter {
1622 name: "js-lsp",
1623 ..Default::default()
1624 },
1625 );
1626 language_registry.add(rust_lang());
1627 language_registry.add(js_lang());
1628
1629 let _rs_buffer = project
1630 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1631 .await
1632 .unwrap();
1633 let _js_buffer = project
1634 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
1635 .await
1636 .unwrap();
1637
1638 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1639 assert_eq!(
1640 fake_rust_server_1
1641 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1642 .await
1643 .text_document
1644 .uri
1645 .as_str(),
1646 "file:///dir/a.rs"
1647 );
1648
1649 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1650 assert_eq!(
1651 fake_js_server
1652 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1653 .await
1654 .text_document
1655 .uri
1656 .as_str(),
1657 "file:///dir/b.js"
1658 );
1659
1660 // Disable Rust language server, ensuring only that server gets stopped.
1661 cx.update(|cx| {
1662 SettingsStore::update_global(cx, |settings, cx| {
1663 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1664 settings.languages.insert(
1665 "Rust".into(),
1666 LanguageSettingsContent {
1667 enable_language_server: Some(false),
1668 ..Default::default()
1669 },
1670 );
1671 });
1672 })
1673 });
1674 fake_rust_server_1
1675 .receive_notification::<lsp::notification::Exit>()
1676 .await;
1677
1678 // Enable Rust and disable JavaScript language servers, ensuring that the
1679 // former gets started again and that the latter stops.
1680 cx.update(|cx| {
1681 SettingsStore::update_global(cx, |settings, cx| {
1682 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1683 settings.languages.insert(
1684 LanguageName::new("Rust"),
1685 LanguageSettingsContent {
1686 enable_language_server: Some(true),
1687 ..Default::default()
1688 },
1689 );
1690 settings.languages.insert(
1691 LanguageName::new("JavaScript"),
1692 LanguageSettingsContent {
1693 enable_language_server: Some(false),
1694 ..Default::default()
1695 },
1696 );
1697 });
1698 })
1699 });
1700 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1701 assert_eq!(
1702 fake_rust_server_2
1703 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1704 .await
1705 .text_document
1706 .uri
1707 .as_str(),
1708 "file:///dir/a.rs"
1709 );
1710 fake_js_server
1711 .receive_notification::<lsp::notification::Exit>()
1712 .await;
1713}
1714
1715#[gpui::test(iterations = 3)]
1716async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1717 init_test(cx);
1718
1719 let text = "
1720 fn a() { A }
1721 fn b() { BB }
1722 fn c() { CCC }
1723 "
1724 .unindent();
1725
1726 let fs = FakeFs::new(cx.executor());
1727 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1728
1729 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1730 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1731
1732 language_registry.add(rust_lang());
1733 let mut fake_servers = language_registry.register_fake_lsp(
1734 "Rust",
1735 FakeLspAdapter {
1736 disk_based_diagnostics_sources: vec!["disk".into()],
1737 ..Default::default()
1738 },
1739 );
1740
1741 let buffer = project
1742 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1743 .await
1744 .unwrap();
1745
1746 let mut fake_server = fake_servers.next().await.unwrap();
1747 let open_notification = fake_server
1748 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1749 .await;
1750
1751 // Edit the buffer, moving the content down
1752 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1753 let change_notification_1 = fake_server
1754 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1755 .await;
1756 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1757
1758 // Report some diagnostics for the initial version of the buffer
1759 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1760 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1761 version: Some(open_notification.text_document.version),
1762 diagnostics: vec![
1763 lsp::Diagnostic {
1764 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1765 severity: Some(DiagnosticSeverity::ERROR),
1766 message: "undefined variable 'A'".to_string(),
1767 source: Some("disk".to_string()),
1768 ..Default::default()
1769 },
1770 lsp::Diagnostic {
1771 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1772 severity: Some(DiagnosticSeverity::ERROR),
1773 message: "undefined variable 'BB'".to_string(),
1774 source: Some("disk".to_string()),
1775 ..Default::default()
1776 },
1777 lsp::Diagnostic {
1778 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1779 severity: Some(DiagnosticSeverity::ERROR),
1780 source: Some("disk".to_string()),
1781 message: "undefined variable 'CCC'".to_string(),
1782 ..Default::default()
1783 },
1784 ],
1785 });
1786
1787 // The diagnostics have moved down since they were created.
1788 cx.executor().run_until_parked();
1789 buffer.update(cx, |buffer, _| {
1790 assert_eq!(
1791 buffer
1792 .snapshot()
1793 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1794 .collect::<Vec<_>>(),
1795 &[
1796 DiagnosticEntry {
1797 range: Point::new(3, 9)..Point::new(3, 11),
1798 diagnostic: Diagnostic {
1799 source: Some("disk".into()),
1800 severity: DiagnosticSeverity::ERROR,
1801 message: "undefined variable 'BB'".to_string(),
1802 is_disk_based: true,
1803 group_id: 1,
1804 is_primary: true,
1805 ..Default::default()
1806 },
1807 },
1808 DiagnosticEntry {
1809 range: Point::new(4, 9)..Point::new(4, 12),
1810 diagnostic: Diagnostic {
1811 source: Some("disk".into()),
1812 severity: DiagnosticSeverity::ERROR,
1813 message: "undefined variable 'CCC'".to_string(),
1814 is_disk_based: true,
1815 group_id: 2,
1816 is_primary: true,
1817 ..Default::default()
1818 }
1819 }
1820 ]
1821 );
1822 assert_eq!(
1823 chunks_with_diagnostics(buffer, 0..buffer.len()),
1824 [
1825 ("\n\nfn a() { ".to_string(), None),
1826 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1827 (" }\nfn b() { ".to_string(), None),
1828 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1829 (" }\nfn c() { ".to_string(), None),
1830 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1831 (" }\n".to_string(), None),
1832 ]
1833 );
1834 assert_eq!(
1835 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1836 [
1837 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1838 (" }\nfn c() { ".to_string(), None),
1839 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1840 ]
1841 );
1842 });
1843
1844 // Ensure overlapping diagnostics are highlighted correctly.
1845 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1846 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1847 version: Some(open_notification.text_document.version),
1848 diagnostics: vec![
1849 lsp::Diagnostic {
1850 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1851 severity: Some(DiagnosticSeverity::ERROR),
1852 message: "undefined variable 'A'".to_string(),
1853 source: Some("disk".to_string()),
1854 ..Default::default()
1855 },
1856 lsp::Diagnostic {
1857 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1858 severity: Some(DiagnosticSeverity::WARNING),
1859 message: "unreachable statement".to_string(),
1860 source: Some("disk".to_string()),
1861 ..Default::default()
1862 },
1863 ],
1864 });
1865
1866 cx.executor().run_until_parked();
1867 buffer.update(cx, |buffer, _| {
1868 assert_eq!(
1869 buffer
1870 .snapshot()
1871 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1872 .collect::<Vec<_>>(),
1873 &[
1874 DiagnosticEntry {
1875 range: Point::new(2, 9)..Point::new(2, 12),
1876 diagnostic: Diagnostic {
1877 source: Some("disk".into()),
1878 severity: DiagnosticSeverity::WARNING,
1879 message: "unreachable statement".to_string(),
1880 is_disk_based: true,
1881 group_id: 4,
1882 is_primary: true,
1883 ..Default::default()
1884 }
1885 },
1886 DiagnosticEntry {
1887 range: Point::new(2, 9)..Point::new(2, 10),
1888 diagnostic: Diagnostic {
1889 source: Some("disk".into()),
1890 severity: DiagnosticSeverity::ERROR,
1891 message: "undefined variable 'A'".to_string(),
1892 is_disk_based: true,
1893 group_id: 3,
1894 is_primary: true,
1895 ..Default::default()
1896 },
1897 }
1898 ]
1899 );
1900 assert_eq!(
1901 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1902 [
1903 ("fn a() { ".to_string(), None),
1904 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1905 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1906 ("\n".to_string(), None),
1907 ]
1908 );
1909 assert_eq!(
1910 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1911 [
1912 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1913 ("\n".to_string(), None),
1914 ]
1915 );
1916 });
1917
1918 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1919 // changes since the last save.
1920 buffer.update(cx, |buffer, cx| {
1921 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1922 buffer.edit(
1923 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1924 None,
1925 cx,
1926 );
1927 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1928 });
1929 let change_notification_2 = fake_server
1930 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1931 .await;
1932 assert!(
1933 change_notification_2.text_document.version > change_notification_1.text_document.version
1934 );
1935
1936 // Handle out-of-order diagnostics
1937 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1938 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1939 version: Some(change_notification_2.text_document.version),
1940 diagnostics: vec![
1941 lsp::Diagnostic {
1942 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1943 severity: Some(DiagnosticSeverity::ERROR),
1944 message: "undefined variable 'BB'".to_string(),
1945 source: Some("disk".to_string()),
1946 ..Default::default()
1947 },
1948 lsp::Diagnostic {
1949 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1950 severity: Some(DiagnosticSeverity::WARNING),
1951 message: "undefined variable 'A'".to_string(),
1952 source: Some("disk".to_string()),
1953 ..Default::default()
1954 },
1955 ],
1956 });
1957
1958 cx.executor().run_until_parked();
1959 buffer.update(cx, |buffer, _| {
1960 assert_eq!(
1961 buffer
1962 .snapshot()
1963 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1964 .collect::<Vec<_>>(),
1965 &[
1966 DiagnosticEntry {
1967 range: Point::new(2, 21)..Point::new(2, 22),
1968 diagnostic: Diagnostic {
1969 source: Some("disk".into()),
1970 severity: DiagnosticSeverity::WARNING,
1971 message: "undefined variable 'A'".to_string(),
1972 is_disk_based: true,
1973 group_id: 6,
1974 is_primary: true,
1975 ..Default::default()
1976 }
1977 },
1978 DiagnosticEntry {
1979 range: Point::new(3, 9)..Point::new(3, 14),
1980 diagnostic: Diagnostic {
1981 source: Some("disk".into()),
1982 severity: DiagnosticSeverity::ERROR,
1983 message: "undefined variable 'BB'".to_string(),
1984 is_disk_based: true,
1985 group_id: 5,
1986 is_primary: true,
1987 ..Default::default()
1988 },
1989 }
1990 ]
1991 );
1992 });
1993}
1994
1995#[gpui::test]
1996async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1997 init_test(cx);
1998
1999 let text = concat!(
2000 "let one = ;\n", //
2001 "let two = \n",
2002 "let three = 3;\n",
2003 );
2004
2005 let fs = FakeFs::new(cx.executor());
2006 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2007
2008 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2009 let buffer = project
2010 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2011 .await
2012 .unwrap();
2013
2014 project.update(cx, |project, cx| {
2015 project.lsp_store.update(cx, |lsp_store, cx| {
2016 lsp_store
2017 .update_buffer_diagnostics(
2018 &buffer,
2019 LanguageServerId(0),
2020 None,
2021 vec![
2022 DiagnosticEntry {
2023 range: Unclipped(PointUtf16::new(0, 10))
2024 ..Unclipped(PointUtf16::new(0, 10)),
2025 diagnostic: Diagnostic {
2026 severity: DiagnosticSeverity::ERROR,
2027 message: "syntax error 1".to_string(),
2028 ..Default::default()
2029 },
2030 },
2031 DiagnosticEntry {
2032 range: Unclipped(PointUtf16::new(1, 10))
2033 ..Unclipped(PointUtf16::new(1, 10)),
2034 diagnostic: Diagnostic {
2035 severity: DiagnosticSeverity::ERROR,
2036 message: "syntax error 2".to_string(),
2037 ..Default::default()
2038 },
2039 },
2040 ],
2041 cx,
2042 )
2043 .unwrap();
2044 })
2045 });
2046
2047 // An empty range is extended forward to include the following character.
2048 // At the end of a line, an empty range is extended backward to include
2049 // the preceding character.
2050 buffer.update(cx, |buffer, _| {
2051 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2052 assert_eq!(
2053 chunks
2054 .iter()
2055 .map(|(s, d)| (s.as_str(), *d))
2056 .collect::<Vec<_>>(),
2057 &[
2058 ("let one = ", None),
2059 (";", Some(DiagnosticSeverity::ERROR)),
2060 ("\nlet two =", None),
2061 (" ", Some(DiagnosticSeverity::ERROR)),
2062 ("\nlet three = 3;\n", None)
2063 ]
2064 );
2065 });
2066}
2067
2068#[gpui::test]
2069async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2070 init_test(cx);
2071
2072 let fs = FakeFs::new(cx.executor());
2073 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2074 .await;
2075
2076 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2077
2078 project.update(cx, |project, cx| {
2079 project
2080 .update_diagnostic_entries(
2081 LanguageServerId(0),
2082 Path::new("/dir/a.rs").to_owned(),
2083 None,
2084 vec![DiagnosticEntry {
2085 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2086 diagnostic: Diagnostic {
2087 severity: DiagnosticSeverity::ERROR,
2088 is_primary: true,
2089 message: "syntax error a1".to_string(),
2090 ..Default::default()
2091 },
2092 }],
2093 cx,
2094 )
2095 .unwrap();
2096 project
2097 .update_diagnostic_entries(
2098 LanguageServerId(1),
2099 Path::new("/dir/a.rs").to_owned(),
2100 None,
2101 vec![DiagnosticEntry {
2102 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2103 diagnostic: Diagnostic {
2104 severity: DiagnosticSeverity::ERROR,
2105 is_primary: true,
2106 message: "syntax error b1".to_string(),
2107 ..Default::default()
2108 },
2109 }],
2110 cx,
2111 )
2112 .unwrap();
2113
2114 assert_eq!(
2115 project.diagnostic_summary(false, cx),
2116 DiagnosticSummary {
2117 error_count: 2,
2118 warning_count: 0,
2119 }
2120 );
2121 });
2122}
2123
2124#[gpui::test]
2125async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2126 init_test(cx);
2127
2128 let text = "
2129 fn a() {
2130 f1();
2131 }
2132 fn b() {
2133 f2();
2134 }
2135 fn c() {
2136 f3();
2137 }
2138 "
2139 .unindent();
2140
2141 let fs = FakeFs::new(cx.executor());
2142 fs.insert_tree(
2143 "/dir",
2144 json!({
2145 "a.rs": text.clone(),
2146 }),
2147 )
2148 .await;
2149
2150 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2151 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2152
2153 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2154 language_registry.add(rust_lang());
2155 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2156
2157 let buffer = project
2158 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2159 .await
2160 .unwrap();
2161
2162 let mut fake_server = fake_servers.next().await.unwrap();
2163 let lsp_document_version = fake_server
2164 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2165 .await
2166 .text_document
2167 .version;
2168
2169 // Simulate editing the buffer after the language server computes some edits.
2170 buffer.update(cx, |buffer, cx| {
2171 buffer.edit(
2172 [(
2173 Point::new(0, 0)..Point::new(0, 0),
2174 "// above first function\n",
2175 )],
2176 None,
2177 cx,
2178 );
2179 buffer.edit(
2180 [(
2181 Point::new(2, 0)..Point::new(2, 0),
2182 " // inside first function\n",
2183 )],
2184 None,
2185 cx,
2186 );
2187 buffer.edit(
2188 [(
2189 Point::new(6, 4)..Point::new(6, 4),
2190 "// inside second function ",
2191 )],
2192 None,
2193 cx,
2194 );
2195
2196 assert_eq!(
2197 buffer.text(),
2198 "
2199 // above first function
2200 fn a() {
2201 // inside first function
2202 f1();
2203 }
2204 fn b() {
2205 // inside second function f2();
2206 }
2207 fn c() {
2208 f3();
2209 }
2210 "
2211 .unindent()
2212 );
2213 });
2214
2215 let edits = lsp_store
2216 .update(cx, |lsp_store, cx| {
2217 lsp_store.edits_from_lsp(
2218 &buffer,
2219 vec![
2220 // replace body of first function
2221 lsp::TextEdit {
2222 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2223 new_text: "
2224 fn a() {
2225 f10();
2226 }
2227 "
2228 .unindent(),
2229 },
2230 // edit inside second function
2231 lsp::TextEdit {
2232 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2233 new_text: "00".into(),
2234 },
2235 // edit inside third function via two distinct edits
2236 lsp::TextEdit {
2237 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2238 new_text: "4000".into(),
2239 },
2240 lsp::TextEdit {
2241 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2242 new_text: "".into(),
2243 },
2244 ],
2245 LanguageServerId(0),
2246 Some(lsp_document_version),
2247 cx,
2248 )
2249 })
2250 .await
2251 .unwrap();
2252
2253 buffer.update(cx, |buffer, cx| {
2254 for (range, new_text) in edits {
2255 buffer.edit([(range, new_text)], None, cx);
2256 }
2257 assert_eq!(
2258 buffer.text(),
2259 "
2260 // above first function
2261 fn a() {
2262 // inside first function
2263 f10();
2264 }
2265 fn b() {
2266 // inside second function f200();
2267 }
2268 fn c() {
2269 f4000();
2270 }
2271 "
2272 .unindent()
2273 );
2274 });
2275}
2276
2277#[gpui::test]
2278async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2279 init_test(cx);
2280
2281 let text = "
2282 use a::b;
2283 use a::c;
2284
2285 fn f() {
2286 b();
2287 c();
2288 }
2289 "
2290 .unindent();
2291
2292 let fs = FakeFs::new(cx.executor());
2293 fs.insert_tree(
2294 "/dir",
2295 json!({
2296 "a.rs": text.clone(),
2297 }),
2298 )
2299 .await;
2300
2301 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2302 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2303 let buffer = project
2304 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2305 .await
2306 .unwrap();
2307
2308 // Simulate the language server sending us a small edit in the form of a very large diff.
2309 // Rust-analyzer does this when performing a merge-imports code action.
2310 let edits = lsp_store
2311 .update(cx, |lsp_store, cx| {
2312 lsp_store.edits_from_lsp(
2313 &buffer,
2314 [
2315 // Replace the first use statement without editing the semicolon.
2316 lsp::TextEdit {
2317 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2318 new_text: "a::{b, c}".into(),
2319 },
2320 // Reinsert the remainder of the file between the semicolon and the final
2321 // newline of the file.
2322 lsp::TextEdit {
2323 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2324 new_text: "\n\n".into(),
2325 },
2326 lsp::TextEdit {
2327 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2328 new_text: "
2329 fn f() {
2330 b();
2331 c();
2332 }"
2333 .unindent(),
2334 },
2335 // Delete everything after the first newline of the file.
2336 lsp::TextEdit {
2337 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2338 new_text: "".into(),
2339 },
2340 ],
2341 LanguageServerId(0),
2342 None,
2343 cx,
2344 )
2345 })
2346 .await
2347 .unwrap();
2348
2349 buffer.update(cx, |buffer, cx| {
2350 let edits = edits
2351 .into_iter()
2352 .map(|(range, text)| {
2353 (
2354 range.start.to_point(buffer)..range.end.to_point(buffer),
2355 text,
2356 )
2357 })
2358 .collect::<Vec<_>>();
2359
2360 assert_eq!(
2361 edits,
2362 [
2363 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2364 (Point::new(1, 0)..Point::new(2, 0), "".into())
2365 ]
2366 );
2367
2368 for (range, new_text) in edits {
2369 buffer.edit([(range, new_text)], None, cx);
2370 }
2371 assert_eq!(
2372 buffer.text(),
2373 "
2374 use a::{b, c};
2375
2376 fn f() {
2377 b();
2378 c();
2379 }
2380 "
2381 .unindent()
2382 );
2383 });
2384}
2385
2386#[gpui::test]
2387async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2388 init_test(cx);
2389
2390 let text = "
2391 use a::b;
2392 use a::c;
2393
2394 fn f() {
2395 b();
2396 c();
2397 }
2398 "
2399 .unindent();
2400
2401 let fs = FakeFs::new(cx.executor());
2402 fs.insert_tree(
2403 "/dir",
2404 json!({
2405 "a.rs": text.clone(),
2406 }),
2407 )
2408 .await;
2409
2410 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2411 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2412 let buffer = project
2413 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2414 .await
2415 .unwrap();
2416
2417 // Simulate the language server sending us edits in a non-ordered fashion,
2418 // with ranges sometimes being inverted or pointing to invalid locations.
2419 let edits = lsp_store
2420 .update(cx, |lsp_store, cx| {
2421 lsp_store.edits_from_lsp(
2422 &buffer,
2423 [
2424 lsp::TextEdit {
2425 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2426 new_text: "\n\n".into(),
2427 },
2428 lsp::TextEdit {
2429 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2430 new_text: "a::{b, c}".into(),
2431 },
2432 lsp::TextEdit {
2433 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2434 new_text: "".into(),
2435 },
2436 lsp::TextEdit {
2437 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2438 new_text: "
2439 fn f() {
2440 b();
2441 c();
2442 }"
2443 .unindent(),
2444 },
2445 ],
2446 LanguageServerId(0),
2447 None,
2448 cx,
2449 )
2450 })
2451 .await
2452 .unwrap();
2453
2454 buffer.update(cx, |buffer, cx| {
2455 let edits = edits
2456 .into_iter()
2457 .map(|(range, text)| {
2458 (
2459 range.start.to_point(buffer)..range.end.to_point(buffer),
2460 text,
2461 )
2462 })
2463 .collect::<Vec<_>>();
2464
2465 assert_eq!(
2466 edits,
2467 [
2468 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2469 (Point::new(1, 0)..Point::new(2, 0), "".into())
2470 ]
2471 );
2472
2473 for (range, new_text) in edits {
2474 buffer.edit([(range, new_text)], None, cx);
2475 }
2476 assert_eq!(
2477 buffer.text(),
2478 "
2479 use a::{b, c};
2480
2481 fn f() {
2482 b();
2483 c();
2484 }
2485 "
2486 .unindent()
2487 );
2488 });
2489}
2490
2491fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2492 buffer: &Buffer,
2493 range: Range<T>,
2494) -> Vec<(String, Option<DiagnosticSeverity>)> {
2495 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2496 for chunk in buffer.snapshot().chunks(range, true) {
2497 if chunks.last().map_or(false, |prev_chunk| {
2498 prev_chunk.1 == chunk.diagnostic_severity
2499 }) {
2500 chunks.last_mut().unwrap().0.push_str(chunk.text);
2501 } else {
2502 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2503 }
2504 }
2505 chunks
2506}
2507
2508#[gpui::test(iterations = 10)]
2509async fn test_definition(cx: &mut gpui::TestAppContext) {
2510 init_test(cx);
2511
2512 let fs = FakeFs::new(cx.executor());
2513 fs.insert_tree(
2514 "/dir",
2515 json!({
2516 "a.rs": "const fn a() { A }",
2517 "b.rs": "const y: i32 = crate::a()",
2518 }),
2519 )
2520 .await;
2521
2522 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2523
2524 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2525 language_registry.add(rust_lang());
2526 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2527
2528 let buffer = project
2529 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
2530 .await
2531 .unwrap();
2532
2533 let fake_server = fake_servers.next().await.unwrap();
2534 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2535 let params = params.text_document_position_params;
2536 assert_eq!(
2537 params.text_document.uri.to_file_path().unwrap(),
2538 Path::new("/dir/b.rs"),
2539 );
2540 assert_eq!(params.position, lsp::Position::new(0, 22));
2541
2542 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2543 lsp::Location::new(
2544 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
2545 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2546 ),
2547 )))
2548 });
2549
2550 let mut definitions = project
2551 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2552 .await
2553 .unwrap();
2554
2555 // Assert no new language server started
2556 cx.executor().run_until_parked();
2557 assert!(fake_servers.try_next().is_err());
2558
2559 assert_eq!(definitions.len(), 1);
2560 let definition = definitions.pop().unwrap();
2561 cx.update(|cx| {
2562 let target_buffer = definition.target.buffer.read(cx);
2563 assert_eq!(
2564 target_buffer
2565 .file()
2566 .unwrap()
2567 .as_local()
2568 .unwrap()
2569 .abs_path(cx),
2570 Path::new("/dir/a.rs"),
2571 );
2572 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2573 assert_eq!(
2574 list_worktrees(&project, cx),
2575 [("/dir/a.rs".as_ref(), false), ("/dir/b.rs".as_ref(), true)],
2576 );
2577
2578 drop(definition);
2579 });
2580 cx.update(|cx| {
2581 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2582 });
2583
2584 fn list_worktrees<'a>(
2585 project: &'a Model<Project>,
2586 cx: &'a AppContext,
2587 ) -> Vec<(&'a Path, bool)> {
2588 project
2589 .read(cx)
2590 .worktrees(cx)
2591 .map(|worktree| {
2592 let worktree = worktree.read(cx);
2593 (
2594 worktree.as_local().unwrap().abs_path().as_ref(),
2595 worktree.is_visible(),
2596 )
2597 })
2598 .collect::<Vec<_>>()
2599 }
2600}
2601
2602#[gpui::test]
2603async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2604 init_test(cx);
2605
2606 let fs = FakeFs::new(cx.executor());
2607 fs.insert_tree(
2608 "/dir",
2609 json!({
2610 "a.ts": "",
2611 }),
2612 )
2613 .await;
2614
2615 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2616
2617 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2618 language_registry.add(typescript_lang());
2619 let mut fake_language_servers = language_registry.register_fake_lsp(
2620 "TypeScript",
2621 FakeLspAdapter {
2622 capabilities: lsp::ServerCapabilities {
2623 completion_provider: Some(lsp::CompletionOptions {
2624 trigger_characters: Some(vec![":".to_string()]),
2625 ..Default::default()
2626 }),
2627 ..Default::default()
2628 },
2629 ..Default::default()
2630 },
2631 );
2632
2633 let buffer = project
2634 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2635 .await
2636 .unwrap();
2637
2638 let fake_server = fake_language_servers.next().await.unwrap();
2639
2640 let text = "let a = b.fqn";
2641 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2642 let completions = project.update(cx, |project, cx| {
2643 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2644 });
2645
2646 fake_server
2647 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2648 Ok(Some(lsp::CompletionResponse::Array(vec![
2649 lsp::CompletionItem {
2650 label: "fullyQualifiedName?".into(),
2651 insert_text: Some("fullyQualifiedName".into()),
2652 ..Default::default()
2653 },
2654 ])))
2655 })
2656 .next()
2657 .await;
2658 let completions = completions.await.unwrap();
2659 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2660 assert_eq!(completions.len(), 1);
2661 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2662 assert_eq!(
2663 completions[0].old_range.to_offset(&snapshot),
2664 text.len() - 3..text.len()
2665 );
2666
2667 let text = "let a = \"atoms/cmp\"";
2668 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2669 let completions = project.update(cx, |project, cx| {
2670 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
2671 });
2672
2673 fake_server
2674 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2675 Ok(Some(lsp::CompletionResponse::Array(vec![
2676 lsp::CompletionItem {
2677 label: "component".into(),
2678 ..Default::default()
2679 },
2680 ])))
2681 })
2682 .next()
2683 .await;
2684 let completions = completions.await.unwrap();
2685 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2686 assert_eq!(completions.len(), 1);
2687 assert_eq!(completions[0].new_text, "component");
2688 assert_eq!(
2689 completions[0].old_range.to_offset(&snapshot),
2690 text.len() - 4..text.len() - 1
2691 );
2692}
2693
2694#[gpui::test]
2695async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2696 init_test(cx);
2697
2698 let fs = FakeFs::new(cx.executor());
2699 fs.insert_tree(
2700 "/dir",
2701 json!({
2702 "a.ts": "",
2703 }),
2704 )
2705 .await;
2706
2707 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2708
2709 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2710 language_registry.add(typescript_lang());
2711 let mut fake_language_servers = language_registry.register_fake_lsp(
2712 "TypeScript",
2713 FakeLspAdapter {
2714 capabilities: lsp::ServerCapabilities {
2715 completion_provider: Some(lsp::CompletionOptions {
2716 trigger_characters: Some(vec![":".to_string()]),
2717 ..Default::default()
2718 }),
2719 ..Default::default()
2720 },
2721 ..Default::default()
2722 },
2723 );
2724
2725 let buffer = project
2726 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2727 .await
2728 .unwrap();
2729
2730 let fake_server = fake_language_servers.next().await.unwrap();
2731
2732 let text = "let a = b.fqn";
2733 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2734 let completions = project.update(cx, |project, cx| {
2735 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2736 });
2737
2738 fake_server
2739 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2740 Ok(Some(lsp::CompletionResponse::Array(vec![
2741 lsp::CompletionItem {
2742 label: "fullyQualifiedName?".into(),
2743 insert_text: Some("fully\rQualified\r\nName".into()),
2744 ..Default::default()
2745 },
2746 ])))
2747 })
2748 .next()
2749 .await;
2750 let completions = completions.await.unwrap();
2751 assert_eq!(completions.len(), 1);
2752 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2753}
2754
2755#[gpui::test(iterations = 10)]
2756async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2757 init_test(cx);
2758
2759 let fs = FakeFs::new(cx.executor());
2760 fs.insert_tree(
2761 "/dir",
2762 json!({
2763 "a.ts": "a",
2764 }),
2765 )
2766 .await;
2767
2768 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2769
2770 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2771 language_registry.add(typescript_lang());
2772 let mut fake_language_servers = language_registry.register_fake_lsp(
2773 "TypeScript",
2774 FakeLspAdapter {
2775 capabilities: lsp::ServerCapabilities {
2776 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
2777 lsp::CodeActionOptions {
2778 resolve_provider: Some(true),
2779 ..lsp::CodeActionOptions::default()
2780 },
2781 )),
2782 ..lsp::ServerCapabilities::default()
2783 },
2784 ..FakeLspAdapter::default()
2785 },
2786 );
2787
2788 let buffer = project
2789 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2790 .await
2791 .unwrap();
2792
2793 let fake_server = fake_language_servers.next().await.unwrap();
2794
2795 // Language server returns code actions that contain commands, and not edits.
2796 let actions = project.update(cx, |project, cx| {
2797 project.code_actions(&buffer, 0..0, None, cx)
2798 });
2799 fake_server
2800 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2801 Ok(Some(vec![
2802 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2803 title: "The code action".into(),
2804 data: Some(serde_json::json!({
2805 "command": "_the/command",
2806 })),
2807 ..lsp::CodeAction::default()
2808 }),
2809 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2810 title: "two".into(),
2811 ..lsp::CodeAction::default()
2812 }),
2813 ]))
2814 })
2815 .next()
2816 .await;
2817
2818 let action = actions.await.unwrap()[0].clone();
2819 let apply = project.update(cx, |project, cx| {
2820 project.apply_code_action(buffer.clone(), action, true, cx)
2821 });
2822
2823 // Resolving the code action does not populate its edits. In absence of
2824 // edits, we must execute the given command.
2825 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2826 |mut action, _| async move {
2827 if action.data.is_some() {
2828 action.command = Some(lsp::Command {
2829 title: "The command".into(),
2830 command: "_the/command".into(),
2831 arguments: Some(vec![json!("the-argument")]),
2832 });
2833 }
2834 Ok(action)
2835 },
2836 );
2837
2838 // While executing the command, the language server sends the editor
2839 // a `workspaceEdit` request.
2840 fake_server
2841 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2842 let fake = fake_server.clone();
2843 move |params, _| {
2844 assert_eq!(params.command, "_the/command");
2845 let fake = fake.clone();
2846 async move {
2847 fake.server
2848 .request::<lsp::request::ApplyWorkspaceEdit>(
2849 lsp::ApplyWorkspaceEditParams {
2850 label: None,
2851 edit: lsp::WorkspaceEdit {
2852 changes: Some(
2853 [(
2854 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2855 vec![lsp::TextEdit {
2856 range: lsp::Range::new(
2857 lsp::Position::new(0, 0),
2858 lsp::Position::new(0, 0),
2859 ),
2860 new_text: "X".into(),
2861 }],
2862 )]
2863 .into_iter()
2864 .collect(),
2865 ),
2866 ..Default::default()
2867 },
2868 },
2869 )
2870 .await
2871 .unwrap();
2872 Ok(Some(json!(null)))
2873 }
2874 }
2875 })
2876 .next()
2877 .await;
2878
2879 // Applying the code action returns a project transaction containing the edits
2880 // sent by the language server in its `workspaceEdit` request.
2881 let transaction = apply.await.unwrap();
2882 assert!(transaction.0.contains_key(&buffer));
2883 buffer.update(cx, |buffer, cx| {
2884 assert_eq!(buffer.text(), "Xa");
2885 buffer.undo(cx);
2886 assert_eq!(buffer.text(), "a");
2887 });
2888}
2889
2890#[gpui::test(iterations = 10)]
2891async fn test_save_file(cx: &mut gpui::TestAppContext) {
2892 init_test(cx);
2893
2894 let fs = FakeFs::new(cx.executor());
2895 fs.insert_tree(
2896 "/dir",
2897 json!({
2898 "file1": "the old contents",
2899 }),
2900 )
2901 .await;
2902
2903 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2904 let buffer = project
2905 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2906 .await
2907 .unwrap();
2908 buffer.update(cx, |buffer, cx| {
2909 assert_eq!(buffer.text(), "the old contents");
2910 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2911 });
2912
2913 project
2914 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2915 .await
2916 .unwrap();
2917
2918 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2919 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2920}
2921
2922#[gpui::test(iterations = 30)]
2923async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
2924 init_test(cx);
2925
2926 let fs = FakeFs::new(cx.executor().clone());
2927 fs.insert_tree(
2928 "/dir",
2929 json!({
2930 "file1": "the original contents",
2931 }),
2932 )
2933 .await;
2934
2935 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2936 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2937 let buffer = project
2938 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2939 .await
2940 .unwrap();
2941
2942 // Simulate buffer diffs being slow, so that they don't complete before
2943 // the next file change occurs.
2944 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2945
2946 // Change the buffer's file on disk, and then wait for the file change
2947 // to be detected by the worktree, so that the buffer starts reloading.
2948 fs.save(
2949 "/dir/file1".as_ref(),
2950 &"the first contents".into(),
2951 Default::default(),
2952 )
2953 .await
2954 .unwrap();
2955 worktree.next_event(cx).await;
2956
2957 // Change the buffer's file again. Depending on the random seed, the
2958 // previous file change may still be in progress.
2959 fs.save(
2960 "/dir/file1".as_ref(),
2961 &"the second contents".into(),
2962 Default::default(),
2963 )
2964 .await
2965 .unwrap();
2966 worktree.next_event(cx).await;
2967
2968 cx.executor().run_until_parked();
2969 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2970 buffer.read_with(cx, |buffer, _| {
2971 assert_eq!(buffer.text(), on_disk_text);
2972 assert!(!buffer.is_dirty(), "buffer should not be dirty");
2973 assert!(!buffer.has_conflict(), "buffer should not be dirty");
2974 });
2975}
2976
2977#[gpui::test(iterations = 30)]
2978async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
2979 init_test(cx);
2980
2981 let fs = FakeFs::new(cx.executor().clone());
2982 fs.insert_tree(
2983 "/dir",
2984 json!({
2985 "file1": "the original contents",
2986 }),
2987 )
2988 .await;
2989
2990 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2991 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2992 let buffer = project
2993 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2994 .await
2995 .unwrap();
2996
2997 // Simulate buffer diffs being slow, so that they don't complete before
2998 // the next file change occurs.
2999 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3000
3001 // Change the buffer's file on disk, and then wait for the file change
3002 // to be detected by the worktree, so that the buffer starts reloading.
3003 fs.save(
3004 "/dir/file1".as_ref(),
3005 &"the first contents".into(),
3006 Default::default(),
3007 )
3008 .await
3009 .unwrap();
3010 worktree.next_event(cx).await;
3011
3012 cx.executor()
3013 .spawn(cx.executor().simulate_random_delay())
3014 .await;
3015
3016 // Perform a noop edit, causing the buffer's version to increase.
3017 buffer.update(cx, |buffer, cx| {
3018 buffer.edit([(0..0, " ")], None, cx);
3019 buffer.undo(cx);
3020 });
3021
3022 cx.executor().run_until_parked();
3023 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3024 buffer.read_with(cx, |buffer, _| {
3025 let buffer_text = buffer.text();
3026 if buffer_text == on_disk_text {
3027 assert!(
3028 !buffer.is_dirty() && !buffer.has_conflict(),
3029 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
3030 );
3031 }
3032 // If the file change occurred while the buffer was processing the first
3033 // change, the buffer will be in a conflicting state.
3034 else {
3035 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3036 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3037 }
3038 });
3039}
3040
3041#[gpui::test]
3042async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
3043 init_test(cx);
3044
3045 let fs = FakeFs::new(cx.executor());
3046 fs.insert_tree(
3047 "/dir",
3048 json!({
3049 "file1": "the old contents",
3050 }),
3051 )
3052 .await;
3053
3054 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
3055 let buffer = project
3056 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3057 .await
3058 .unwrap();
3059 buffer.update(cx, |buffer, cx| {
3060 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3061 });
3062
3063 project
3064 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3065 .await
3066 .unwrap();
3067
3068 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3069 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3070}
3071
3072#[gpui::test]
3073async fn test_save_as(cx: &mut gpui::TestAppContext) {
3074 init_test(cx);
3075
3076 let fs = FakeFs::new(cx.executor());
3077 fs.insert_tree("/dir", json!({})).await;
3078
3079 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3080
3081 let languages = project.update(cx, |project, _| project.languages().clone());
3082 languages.add(rust_lang());
3083
3084 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
3085 buffer.update(cx, |buffer, cx| {
3086 buffer.edit([(0..0, "abc")], None, cx);
3087 assert!(buffer.is_dirty());
3088 assert!(!buffer.has_conflict());
3089 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
3090 });
3091 project
3092 .update(cx, |project, cx| {
3093 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
3094 let path = ProjectPath {
3095 worktree_id,
3096 path: Arc::from(Path::new("file1.rs")),
3097 };
3098 project.save_buffer_as(buffer.clone(), path, cx)
3099 })
3100 .await
3101 .unwrap();
3102 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
3103
3104 cx.executor().run_until_parked();
3105 buffer.update(cx, |buffer, cx| {
3106 assert_eq!(
3107 buffer.file().unwrap().full_path(cx),
3108 Path::new("dir/file1.rs")
3109 );
3110 assert!(!buffer.is_dirty());
3111 assert!(!buffer.has_conflict());
3112 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
3113 });
3114
3115 let opened_buffer = project
3116 .update(cx, |project, cx| {
3117 project.open_local_buffer("/dir/file1.rs", cx)
3118 })
3119 .await
3120 .unwrap();
3121 assert_eq!(opened_buffer, buffer);
3122}
3123
3124#[gpui::test(retries = 5)]
3125async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3126 use worktree::WorktreeModelHandle as _;
3127
3128 init_test(cx);
3129 cx.executor().allow_parking();
3130
3131 let dir = temp_tree(json!({
3132 "a": {
3133 "file1": "",
3134 "file2": "",
3135 "file3": "",
3136 },
3137 "b": {
3138 "c": {
3139 "file4": "",
3140 "file5": "",
3141 }
3142 }
3143 }));
3144
3145 let project = Project::test(Arc::new(RealFs::default()), [dir.path()], cx).await;
3146
3147 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3148 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3149 async move { buffer.await.unwrap() }
3150 };
3151 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3152 project.update(cx, |project, cx| {
3153 let tree = project.worktrees(cx).next().unwrap();
3154 tree.read(cx)
3155 .entry_for_path(path)
3156 .unwrap_or_else(|| panic!("no entry for path {}", path))
3157 .id
3158 })
3159 };
3160
3161 let buffer2 = buffer_for_path("a/file2", cx).await;
3162 let buffer3 = buffer_for_path("a/file3", cx).await;
3163 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3164 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3165
3166 let file2_id = id_for_path("a/file2", cx);
3167 let file3_id = id_for_path("a/file3", cx);
3168 let file4_id = id_for_path("b/c/file4", cx);
3169
3170 // Create a remote copy of this worktree.
3171 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3172 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3173
3174 let updates = Arc::new(Mutex::new(Vec::new()));
3175 tree.update(cx, |tree, cx| {
3176 let updates = updates.clone();
3177 tree.observe_updates(0, cx, move |update| {
3178 updates.lock().push(update);
3179 async { true }
3180 });
3181 });
3182
3183 let remote =
3184 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
3185
3186 cx.executor().run_until_parked();
3187
3188 cx.update(|cx| {
3189 assert!(!buffer2.read(cx).is_dirty());
3190 assert!(!buffer3.read(cx).is_dirty());
3191 assert!(!buffer4.read(cx).is_dirty());
3192 assert!(!buffer5.read(cx).is_dirty());
3193 });
3194
3195 // Rename and delete files and directories.
3196 tree.flush_fs_events(cx).await;
3197 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3198 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3199 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3200 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3201 tree.flush_fs_events(cx).await;
3202
3203 let expected_paths = vec![
3204 "a",
3205 "a/file1",
3206 "a/file2.new",
3207 "b",
3208 "d",
3209 "d/file3",
3210 "d/file4",
3211 ];
3212
3213 cx.update(|app| {
3214 assert_eq!(
3215 tree.read(app)
3216 .paths()
3217 .map(|p| p.to_str().unwrap())
3218 .collect::<Vec<_>>(),
3219 expected_paths
3220 );
3221 });
3222
3223 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3224 assert_eq!(id_for_path("d/file3", cx), file3_id);
3225 assert_eq!(id_for_path("d/file4", cx), file4_id);
3226
3227 cx.update(|cx| {
3228 assert_eq!(
3229 buffer2.read(cx).file().unwrap().path().as_ref(),
3230 Path::new("a/file2.new")
3231 );
3232 assert_eq!(
3233 buffer3.read(cx).file().unwrap().path().as_ref(),
3234 Path::new("d/file3")
3235 );
3236 assert_eq!(
3237 buffer4.read(cx).file().unwrap().path().as_ref(),
3238 Path::new("d/file4")
3239 );
3240 assert_eq!(
3241 buffer5.read(cx).file().unwrap().path().as_ref(),
3242 Path::new("b/c/file5")
3243 );
3244
3245 assert_matches!(
3246 buffer2.read(cx).file().unwrap().disk_state(),
3247 DiskState::Present { .. }
3248 );
3249 assert_matches!(
3250 buffer3.read(cx).file().unwrap().disk_state(),
3251 DiskState::Present { .. }
3252 );
3253 assert_matches!(
3254 buffer4.read(cx).file().unwrap().disk_state(),
3255 DiskState::Present { .. }
3256 );
3257 assert_eq!(
3258 buffer5.read(cx).file().unwrap().disk_state(),
3259 DiskState::Deleted
3260 );
3261 });
3262
3263 // Update the remote worktree. Check that it becomes consistent with the
3264 // local worktree.
3265 cx.executor().run_until_parked();
3266
3267 remote.update(cx, |remote, _| {
3268 for update in updates.lock().drain(..) {
3269 remote.as_remote_mut().unwrap().update_from_remote(update);
3270 }
3271 });
3272 cx.executor().run_until_parked();
3273 remote.update(cx, |remote, _| {
3274 assert_eq!(
3275 remote
3276 .paths()
3277 .map(|p| p.to_str().unwrap())
3278 .collect::<Vec<_>>(),
3279 expected_paths
3280 );
3281 });
3282}
3283
3284#[gpui::test(iterations = 10)]
3285async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3286 init_test(cx);
3287
3288 let fs = FakeFs::new(cx.executor());
3289 fs.insert_tree(
3290 "/dir",
3291 json!({
3292 "a": {
3293 "file1": "",
3294 }
3295 }),
3296 )
3297 .await;
3298
3299 let project = Project::test(fs, [Path::new("/dir")], cx).await;
3300 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3301 let tree_id = tree.update(cx, |tree, _| tree.id());
3302
3303 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3304 project.update(cx, |project, cx| {
3305 let tree = project.worktrees(cx).next().unwrap();
3306 tree.read(cx)
3307 .entry_for_path(path)
3308 .unwrap_or_else(|| panic!("no entry for path {}", path))
3309 .id
3310 })
3311 };
3312
3313 let dir_id = id_for_path("a", cx);
3314 let file_id = id_for_path("a/file1", cx);
3315 let buffer = project
3316 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3317 .await
3318 .unwrap();
3319 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3320
3321 project
3322 .update(cx, |project, cx| {
3323 project.rename_entry(dir_id, Path::new("b"), cx)
3324 })
3325 .unwrap()
3326 .await
3327 .to_included()
3328 .unwrap();
3329 cx.executor().run_until_parked();
3330
3331 assert_eq!(id_for_path("b", cx), dir_id);
3332 assert_eq!(id_for_path("b/file1", cx), file_id);
3333 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3334}
3335
3336#[gpui::test]
3337async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3338 init_test(cx);
3339
3340 let fs = FakeFs::new(cx.executor());
3341 fs.insert_tree(
3342 "/dir",
3343 json!({
3344 "a.txt": "a-contents",
3345 "b.txt": "b-contents",
3346 }),
3347 )
3348 .await;
3349
3350 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3351
3352 // Spawn multiple tasks to open paths, repeating some paths.
3353 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3354 (
3355 p.open_local_buffer("/dir/a.txt", cx),
3356 p.open_local_buffer("/dir/b.txt", cx),
3357 p.open_local_buffer("/dir/a.txt", cx),
3358 )
3359 });
3360
3361 let buffer_a_1 = buffer_a_1.await.unwrap();
3362 let buffer_a_2 = buffer_a_2.await.unwrap();
3363 let buffer_b = buffer_b.await.unwrap();
3364 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3365 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3366
3367 // There is only one buffer per path.
3368 let buffer_a_id = buffer_a_1.entity_id();
3369 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3370
3371 // Open the same path again while it is still open.
3372 drop(buffer_a_1);
3373 let buffer_a_3 = project
3374 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3375 .await
3376 .unwrap();
3377
3378 // There's still only one buffer per path.
3379 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3380}
3381
3382#[gpui::test]
3383async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3384 init_test(cx);
3385
3386 let fs = FakeFs::new(cx.executor());
3387 fs.insert_tree(
3388 "/dir",
3389 json!({
3390 "file1": "abc",
3391 "file2": "def",
3392 "file3": "ghi",
3393 }),
3394 )
3395 .await;
3396
3397 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3398
3399 let buffer1 = project
3400 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3401 .await
3402 .unwrap();
3403 let events = Arc::new(Mutex::new(Vec::new()));
3404
3405 // initially, the buffer isn't dirty.
3406 buffer1.update(cx, |buffer, cx| {
3407 cx.subscribe(&buffer1, {
3408 let events = events.clone();
3409 move |_, _, event, _| match event {
3410 BufferEvent::Operation { .. } => {}
3411 _ => events.lock().push(event.clone()),
3412 }
3413 })
3414 .detach();
3415
3416 assert!(!buffer.is_dirty());
3417 assert!(events.lock().is_empty());
3418
3419 buffer.edit([(1..2, "")], None, cx);
3420 });
3421
3422 // after the first edit, the buffer is dirty, and emits a dirtied event.
3423 buffer1.update(cx, |buffer, cx| {
3424 assert!(buffer.text() == "ac");
3425 assert!(buffer.is_dirty());
3426 assert_eq!(
3427 *events.lock(),
3428 &[
3429 language::BufferEvent::Edited,
3430 language::BufferEvent::DirtyChanged
3431 ]
3432 );
3433 events.lock().clear();
3434 buffer.did_save(
3435 buffer.version(),
3436 buffer.file().unwrap().disk_state().mtime(),
3437 cx,
3438 );
3439 });
3440
3441 // after saving, the buffer is not dirty, and emits a saved event.
3442 buffer1.update(cx, |buffer, cx| {
3443 assert!(!buffer.is_dirty());
3444 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
3445 events.lock().clear();
3446
3447 buffer.edit([(1..1, "B")], None, cx);
3448 buffer.edit([(2..2, "D")], None, cx);
3449 });
3450
3451 // after editing again, the buffer is dirty, and emits another dirty event.
3452 buffer1.update(cx, |buffer, cx| {
3453 assert!(buffer.text() == "aBDc");
3454 assert!(buffer.is_dirty());
3455 assert_eq!(
3456 *events.lock(),
3457 &[
3458 language::BufferEvent::Edited,
3459 language::BufferEvent::DirtyChanged,
3460 language::BufferEvent::Edited,
3461 ],
3462 );
3463 events.lock().clear();
3464
3465 // After restoring the buffer to its previously-saved state,
3466 // the buffer is not considered dirty anymore.
3467 buffer.edit([(1..3, "")], None, cx);
3468 assert!(buffer.text() == "ac");
3469 assert!(!buffer.is_dirty());
3470 });
3471
3472 assert_eq!(
3473 *events.lock(),
3474 &[
3475 language::BufferEvent::Edited,
3476 language::BufferEvent::DirtyChanged
3477 ]
3478 );
3479
3480 // When a file is deleted, the buffer is considered dirty.
3481 let events = Arc::new(Mutex::new(Vec::new()));
3482 let buffer2 = project
3483 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3484 .await
3485 .unwrap();
3486 buffer2.update(cx, |_, cx| {
3487 cx.subscribe(&buffer2, {
3488 let events = events.clone();
3489 move |_, _, event, _| events.lock().push(event.clone())
3490 })
3491 .detach();
3492 });
3493
3494 fs.remove_file("/dir/file2".as_ref(), Default::default())
3495 .await
3496 .unwrap();
3497 cx.executor().run_until_parked();
3498 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3499 assert_eq!(
3500 *events.lock(),
3501 &[
3502 language::BufferEvent::DirtyChanged,
3503 language::BufferEvent::FileHandleChanged
3504 ]
3505 );
3506
3507 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3508 let events = Arc::new(Mutex::new(Vec::new()));
3509 let buffer3 = project
3510 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
3511 .await
3512 .unwrap();
3513 buffer3.update(cx, |_, cx| {
3514 cx.subscribe(&buffer3, {
3515 let events = events.clone();
3516 move |_, _, event, _| events.lock().push(event.clone())
3517 })
3518 .detach();
3519 });
3520
3521 buffer3.update(cx, |buffer, cx| {
3522 buffer.edit([(0..0, "x")], None, cx);
3523 });
3524 events.lock().clear();
3525 fs.remove_file("/dir/file3".as_ref(), Default::default())
3526 .await
3527 .unwrap();
3528 cx.executor().run_until_parked();
3529 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
3530 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3531}
3532
3533#[gpui::test]
3534async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3535 init_test(cx);
3536
3537 let initial_contents = "aaa\nbbbbb\nc\n";
3538 let fs = FakeFs::new(cx.executor());
3539 fs.insert_tree(
3540 "/dir",
3541 json!({
3542 "the-file": initial_contents,
3543 }),
3544 )
3545 .await;
3546 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3547 let buffer = project
3548 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
3549 .await
3550 .unwrap();
3551
3552 let anchors = (0..3)
3553 .map(|row| buffer.update(cx, |b, _| b.anchor_before(Point::new(row, 1))))
3554 .collect::<Vec<_>>();
3555
3556 // Change the file on disk, adding two new lines of text, and removing
3557 // one line.
3558 buffer.update(cx, |buffer, _| {
3559 assert!(!buffer.is_dirty());
3560 assert!(!buffer.has_conflict());
3561 });
3562 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3563 fs.save(
3564 "/dir/the-file".as_ref(),
3565 &new_contents.into(),
3566 LineEnding::Unix,
3567 )
3568 .await
3569 .unwrap();
3570
3571 // Because the buffer was not modified, it is reloaded from disk. Its
3572 // contents are edited according to the diff between the old and new
3573 // file contents.
3574 cx.executor().run_until_parked();
3575 buffer.update(cx, |buffer, _| {
3576 assert_eq!(buffer.text(), new_contents);
3577 assert!(!buffer.is_dirty());
3578 assert!(!buffer.has_conflict());
3579
3580 let anchor_positions = anchors
3581 .iter()
3582 .map(|anchor| anchor.to_point(&*buffer))
3583 .collect::<Vec<_>>();
3584 assert_eq!(
3585 anchor_positions,
3586 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
3587 );
3588 });
3589
3590 // Modify the buffer
3591 buffer.update(cx, |buffer, cx| {
3592 buffer.edit([(0..0, " ")], None, cx);
3593 assert!(buffer.is_dirty());
3594 assert!(!buffer.has_conflict());
3595 });
3596
3597 // Change the file on disk again, adding blank lines to the beginning.
3598 fs.save(
3599 "/dir/the-file".as_ref(),
3600 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3601 LineEnding::Unix,
3602 )
3603 .await
3604 .unwrap();
3605
3606 // Because the buffer is modified, it doesn't reload from disk, but is
3607 // marked as having a conflict.
3608 cx.executor().run_until_parked();
3609 buffer.update(cx, |buffer, _| {
3610 assert!(buffer.has_conflict());
3611 });
3612}
3613
3614#[gpui::test]
3615async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3616 init_test(cx);
3617
3618 let fs = FakeFs::new(cx.executor());
3619 fs.insert_tree(
3620 "/dir",
3621 json!({
3622 "file1": "a\nb\nc\n",
3623 "file2": "one\r\ntwo\r\nthree\r\n",
3624 }),
3625 )
3626 .await;
3627
3628 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3629 let buffer1 = project
3630 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3631 .await
3632 .unwrap();
3633 let buffer2 = project
3634 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3635 .await
3636 .unwrap();
3637
3638 buffer1.update(cx, |buffer, _| {
3639 assert_eq!(buffer.text(), "a\nb\nc\n");
3640 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3641 });
3642 buffer2.update(cx, |buffer, _| {
3643 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3644 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3645 });
3646
3647 // Change a file's line endings on disk from unix to windows. The buffer's
3648 // state updates correctly.
3649 fs.save(
3650 "/dir/file1".as_ref(),
3651 &"aaa\nb\nc\n".into(),
3652 LineEnding::Windows,
3653 )
3654 .await
3655 .unwrap();
3656 cx.executor().run_until_parked();
3657 buffer1.update(cx, |buffer, _| {
3658 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3659 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3660 });
3661
3662 // Save a file with windows line endings. The file is written correctly.
3663 buffer2.update(cx, |buffer, cx| {
3664 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3665 });
3666 project
3667 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3668 .await
3669 .unwrap();
3670 assert_eq!(
3671 fs.load("/dir/file2".as_ref()).await.unwrap(),
3672 "one\r\ntwo\r\nthree\r\nfour\r\n",
3673 );
3674}
3675
3676#[gpui::test]
3677async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3678 init_test(cx);
3679
3680 let fs = FakeFs::new(cx.executor());
3681 fs.insert_tree(
3682 "/the-dir",
3683 json!({
3684 "a.rs": "
3685 fn foo(mut v: Vec<usize>) {
3686 for x in &v {
3687 v.push(1);
3688 }
3689 }
3690 "
3691 .unindent(),
3692 }),
3693 )
3694 .await;
3695
3696 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3697 let buffer = project
3698 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3699 .await
3700 .unwrap();
3701
3702 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3703 let message = lsp::PublishDiagnosticsParams {
3704 uri: buffer_uri.clone(),
3705 diagnostics: vec![
3706 lsp::Diagnostic {
3707 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3708 severity: Some(DiagnosticSeverity::WARNING),
3709 message: "error 1".to_string(),
3710 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3711 location: lsp::Location {
3712 uri: buffer_uri.clone(),
3713 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3714 },
3715 message: "error 1 hint 1".to_string(),
3716 }]),
3717 ..Default::default()
3718 },
3719 lsp::Diagnostic {
3720 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3721 severity: Some(DiagnosticSeverity::HINT),
3722 message: "error 1 hint 1".to_string(),
3723 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3724 location: lsp::Location {
3725 uri: buffer_uri.clone(),
3726 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3727 },
3728 message: "original diagnostic".to_string(),
3729 }]),
3730 ..Default::default()
3731 },
3732 lsp::Diagnostic {
3733 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3734 severity: Some(DiagnosticSeverity::ERROR),
3735 message: "error 2".to_string(),
3736 related_information: Some(vec![
3737 lsp::DiagnosticRelatedInformation {
3738 location: lsp::Location {
3739 uri: buffer_uri.clone(),
3740 range: lsp::Range::new(
3741 lsp::Position::new(1, 13),
3742 lsp::Position::new(1, 15),
3743 ),
3744 },
3745 message: "error 2 hint 1".to_string(),
3746 },
3747 lsp::DiagnosticRelatedInformation {
3748 location: lsp::Location {
3749 uri: buffer_uri.clone(),
3750 range: lsp::Range::new(
3751 lsp::Position::new(1, 13),
3752 lsp::Position::new(1, 15),
3753 ),
3754 },
3755 message: "error 2 hint 2".to_string(),
3756 },
3757 ]),
3758 ..Default::default()
3759 },
3760 lsp::Diagnostic {
3761 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3762 severity: Some(DiagnosticSeverity::HINT),
3763 message: "error 2 hint 1".to_string(),
3764 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3765 location: lsp::Location {
3766 uri: buffer_uri.clone(),
3767 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3768 },
3769 message: "original diagnostic".to_string(),
3770 }]),
3771 ..Default::default()
3772 },
3773 lsp::Diagnostic {
3774 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3775 severity: Some(DiagnosticSeverity::HINT),
3776 message: "error 2 hint 2".to_string(),
3777 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3778 location: lsp::Location {
3779 uri: buffer_uri,
3780 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3781 },
3782 message: "original diagnostic".to_string(),
3783 }]),
3784 ..Default::default()
3785 },
3786 ],
3787 version: None,
3788 };
3789
3790 project
3791 .update(cx, |p, cx| {
3792 p.update_diagnostics(LanguageServerId(0), message, &[], cx)
3793 })
3794 .unwrap();
3795 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3796
3797 assert_eq!(
3798 buffer
3799 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3800 .collect::<Vec<_>>(),
3801 &[
3802 DiagnosticEntry {
3803 range: Point::new(1, 8)..Point::new(1, 9),
3804 diagnostic: Diagnostic {
3805 severity: DiagnosticSeverity::WARNING,
3806 message: "error 1".to_string(),
3807 group_id: 1,
3808 is_primary: true,
3809 ..Default::default()
3810 }
3811 },
3812 DiagnosticEntry {
3813 range: Point::new(1, 8)..Point::new(1, 9),
3814 diagnostic: Diagnostic {
3815 severity: DiagnosticSeverity::HINT,
3816 message: "error 1 hint 1".to_string(),
3817 group_id: 1,
3818 is_primary: false,
3819 ..Default::default()
3820 }
3821 },
3822 DiagnosticEntry {
3823 range: Point::new(1, 13)..Point::new(1, 15),
3824 diagnostic: Diagnostic {
3825 severity: DiagnosticSeverity::HINT,
3826 message: "error 2 hint 1".to_string(),
3827 group_id: 0,
3828 is_primary: false,
3829 ..Default::default()
3830 }
3831 },
3832 DiagnosticEntry {
3833 range: Point::new(1, 13)..Point::new(1, 15),
3834 diagnostic: Diagnostic {
3835 severity: DiagnosticSeverity::HINT,
3836 message: "error 2 hint 2".to_string(),
3837 group_id: 0,
3838 is_primary: false,
3839 ..Default::default()
3840 }
3841 },
3842 DiagnosticEntry {
3843 range: Point::new(2, 8)..Point::new(2, 17),
3844 diagnostic: Diagnostic {
3845 severity: DiagnosticSeverity::ERROR,
3846 message: "error 2".to_string(),
3847 group_id: 0,
3848 is_primary: true,
3849 ..Default::default()
3850 }
3851 }
3852 ]
3853 );
3854
3855 assert_eq!(
3856 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3857 &[
3858 DiagnosticEntry {
3859 range: Point::new(1, 13)..Point::new(1, 15),
3860 diagnostic: Diagnostic {
3861 severity: DiagnosticSeverity::HINT,
3862 message: "error 2 hint 1".to_string(),
3863 group_id: 0,
3864 is_primary: false,
3865 ..Default::default()
3866 }
3867 },
3868 DiagnosticEntry {
3869 range: Point::new(1, 13)..Point::new(1, 15),
3870 diagnostic: Diagnostic {
3871 severity: DiagnosticSeverity::HINT,
3872 message: "error 2 hint 2".to_string(),
3873 group_id: 0,
3874 is_primary: false,
3875 ..Default::default()
3876 }
3877 },
3878 DiagnosticEntry {
3879 range: Point::new(2, 8)..Point::new(2, 17),
3880 diagnostic: Diagnostic {
3881 severity: DiagnosticSeverity::ERROR,
3882 message: "error 2".to_string(),
3883 group_id: 0,
3884 is_primary: true,
3885 ..Default::default()
3886 }
3887 }
3888 ]
3889 );
3890
3891 assert_eq!(
3892 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3893 &[
3894 DiagnosticEntry {
3895 range: Point::new(1, 8)..Point::new(1, 9),
3896 diagnostic: Diagnostic {
3897 severity: DiagnosticSeverity::WARNING,
3898 message: "error 1".to_string(),
3899 group_id: 1,
3900 is_primary: true,
3901 ..Default::default()
3902 }
3903 },
3904 DiagnosticEntry {
3905 range: Point::new(1, 8)..Point::new(1, 9),
3906 diagnostic: Diagnostic {
3907 severity: DiagnosticSeverity::HINT,
3908 message: "error 1 hint 1".to_string(),
3909 group_id: 1,
3910 is_primary: false,
3911 ..Default::default()
3912 }
3913 },
3914 ]
3915 );
3916}
3917
3918#[gpui::test]
3919async fn test_rename(cx: &mut gpui::TestAppContext) {
3920 // hi
3921 init_test(cx);
3922
3923 let fs = FakeFs::new(cx.executor());
3924 fs.insert_tree(
3925 "/dir",
3926 json!({
3927 "one.rs": "const ONE: usize = 1;",
3928 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3929 }),
3930 )
3931 .await;
3932
3933 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3934
3935 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3936 language_registry.add(rust_lang());
3937 let mut fake_servers = language_registry.register_fake_lsp(
3938 "Rust",
3939 FakeLspAdapter {
3940 capabilities: lsp::ServerCapabilities {
3941 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
3942 prepare_provider: Some(true),
3943 work_done_progress_options: Default::default(),
3944 })),
3945 ..Default::default()
3946 },
3947 ..Default::default()
3948 },
3949 );
3950
3951 let buffer = project
3952 .update(cx, |project, cx| {
3953 project.open_local_buffer("/dir/one.rs", cx)
3954 })
3955 .await
3956 .unwrap();
3957
3958 let fake_server = fake_servers.next().await.unwrap();
3959
3960 let response = project.update(cx, |project, cx| {
3961 project.prepare_rename(buffer.clone(), 7, cx)
3962 });
3963 fake_server
3964 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
3965 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3966 assert_eq!(params.position, lsp::Position::new(0, 7));
3967 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
3968 lsp::Position::new(0, 6),
3969 lsp::Position::new(0, 9),
3970 ))))
3971 })
3972 .next()
3973 .await
3974 .unwrap();
3975 let range = response.await.unwrap().unwrap();
3976 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
3977 assert_eq!(range, 6..9);
3978
3979 let response = project.update(cx, |project, cx| {
3980 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
3981 });
3982 fake_server
3983 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
3984 assert_eq!(
3985 params.text_document_position.text_document.uri.as_str(),
3986 "file:///dir/one.rs"
3987 );
3988 assert_eq!(
3989 params.text_document_position.position,
3990 lsp::Position::new(0, 7)
3991 );
3992 assert_eq!(params.new_name, "THREE");
3993 Ok(Some(lsp::WorkspaceEdit {
3994 changes: Some(
3995 [
3996 (
3997 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
3998 vec![lsp::TextEdit::new(
3999 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
4000 "THREE".to_string(),
4001 )],
4002 ),
4003 (
4004 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
4005 vec![
4006 lsp::TextEdit::new(
4007 lsp::Range::new(
4008 lsp::Position::new(0, 24),
4009 lsp::Position::new(0, 27),
4010 ),
4011 "THREE".to_string(),
4012 ),
4013 lsp::TextEdit::new(
4014 lsp::Range::new(
4015 lsp::Position::new(0, 35),
4016 lsp::Position::new(0, 38),
4017 ),
4018 "THREE".to_string(),
4019 ),
4020 ],
4021 ),
4022 ]
4023 .into_iter()
4024 .collect(),
4025 ),
4026 ..Default::default()
4027 }))
4028 })
4029 .next()
4030 .await
4031 .unwrap();
4032 let mut transaction = response.await.unwrap().0;
4033 assert_eq!(transaction.len(), 2);
4034 assert_eq!(
4035 transaction
4036 .remove_entry(&buffer)
4037 .unwrap()
4038 .0
4039 .update(cx, |buffer, _| buffer.text()),
4040 "const THREE: usize = 1;"
4041 );
4042 assert_eq!(
4043 transaction
4044 .into_keys()
4045 .next()
4046 .unwrap()
4047 .update(cx, |buffer, _| buffer.text()),
4048 "const TWO: usize = one::THREE + one::THREE;"
4049 );
4050}
4051
4052#[gpui::test]
4053async fn test_search(cx: &mut gpui::TestAppContext) {
4054 init_test(cx);
4055
4056 let fs = FakeFs::new(cx.executor());
4057 fs.insert_tree(
4058 "/dir",
4059 json!({
4060 "one.rs": "const ONE: usize = 1;",
4061 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
4062 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
4063 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
4064 }),
4065 )
4066 .await;
4067 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4068 assert_eq!(
4069 search(
4070 &project,
4071 SearchQuery::text(
4072 "TWO",
4073 false,
4074 true,
4075 false,
4076 Default::default(),
4077 Default::default(),
4078 None
4079 )
4080 .unwrap(),
4081 cx
4082 )
4083 .await
4084 .unwrap(),
4085 HashMap::from_iter([
4086 ("dir/two.rs".to_string(), vec![6..9]),
4087 ("dir/three.rs".to_string(), vec![37..40])
4088 ])
4089 );
4090
4091 let buffer_4 = project
4092 .update(cx, |project, cx| {
4093 project.open_local_buffer("/dir/four.rs", cx)
4094 })
4095 .await
4096 .unwrap();
4097 buffer_4.update(cx, |buffer, cx| {
4098 let text = "two::TWO";
4099 buffer.edit([(20..28, text), (31..43, text)], None, cx);
4100 });
4101
4102 assert_eq!(
4103 search(
4104 &project,
4105 SearchQuery::text(
4106 "TWO",
4107 false,
4108 true,
4109 false,
4110 Default::default(),
4111 Default::default(),
4112 None,
4113 )
4114 .unwrap(),
4115 cx
4116 )
4117 .await
4118 .unwrap(),
4119 HashMap::from_iter([
4120 ("dir/two.rs".to_string(), vec![6..9]),
4121 ("dir/three.rs".to_string(), vec![37..40]),
4122 ("dir/four.rs".to_string(), vec![25..28, 36..39])
4123 ])
4124 );
4125}
4126
4127#[gpui::test]
4128async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
4129 init_test(cx);
4130
4131 let search_query = "file";
4132
4133 let fs = FakeFs::new(cx.executor());
4134 fs.insert_tree(
4135 "/dir",
4136 json!({
4137 "one.rs": r#"// Rust file one"#,
4138 "one.ts": r#"// TypeScript file one"#,
4139 "two.rs": r#"// Rust file two"#,
4140 "two.ts": r#"// TypeScript file two"#,
4141 }),
4142 )
4143 .await;
4144 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4145
4146 assert!(
4147 search(
4148 &project,
4149 SearchQuery::text(
4150 search_query,
4151 false,
4152 true,
4153 false,
4154 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4155 Default::default(),
4156 None
4157 )
4158 .unwrap(),
4159 cx
4160 )
4161 .await
4162 .unwrap()
4163 .is_empty(),
4164 "If no inclusions match, no files should be returned"
4165 );
4166
4167 assert_eq!(
4168 search(
4169 &project,
4170 SearchQuery::text(
4171 search_query,
4172 false,
4173 true,
4174 false,
4175 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4176 Default::default(),
4177 None
4178 )
4179 .unwrap(),
4180 cx
4181 )
4182 .await
4183 .unwrap(),
4184 HashMap::from_iter([
4185 ("dir/one.rs".to_string(), vec![8..12]),
4186 ("dir/two.rs".to_string(), vec![8..12]),
4187 ]),
4188 "Rust only search should give only Rust files"
4189 );
4190
4191 assert_eq!(
4192 search(
4193 &project,
4194 SearchQuery::text(
4195 search_query,
4196 false,
4197 true,
4198 false,
4199
4200 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4201
4202 Default::default(),
4203 None,
4204 ).unwrap(),
4205 cx
4206 )
4207 .await
4208 .unwrap(),
4209 HashMap::from_iter([
4210 ("dir/one.ts".to_string(), vec![14..18]),
4211 ("dir/two.ts".to_string(), vec![14..18]),
4212 ]),
4213 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
4214 );
4215
4216 assert_eq!(
4217 search(
4218 &project,
4219 SearchQuery::text(
4220 search_query,
4221 false,
4222 true,
4223 false,
4224
4225 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4226
4227 Default::default(),
4228 None,
4229 ).unwrap(),
4230 cx
4231 )
4232 .await
4233 .unwrap(),
4234 HashMap::from_iter([
4235 ("dir/two.ts".to_string(), vec![14..18]),
4236 ("dir/one.rs".to_string(), vec![8..12]),
4237 ("dir/one.ts".to_string(), vec![14..18]),
4238 ("dir/two.rs".to_string(), vec![8..12]),
4239 ]),
4240 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4241 );
4242}
4243
4244#[gpui::test]
4245async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4246 init_test(cx);
4247
4248 let search_query = "file";
4249
4250 let fs = FakeFs::new(cx.executor());
4251 fs.insert_tree(
4252 "/dir",
4253 json!({
4254 "one.rs": r#"// Rust file one"#,
4255 "one.ts": r#"// TypeScript file one"#,
4256 "two.rs": r#"// Rust file two"#,
4257 "two.ts": r#"// TypeScript file two"#,
4258 }),
4259 )
4260 .await;
4261 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4262
4263 assert_eq!(
4264 search(
4265 &project,
4266 SearchQuery::text(
4267 search_query,
4268 false,
4269 true,
4270 false,
4271 Default::default(),
4272 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4273 None,
4274 )
4275 .unwrap(),
4276 cx
4277 )
4278 .await
4279 .unwrap(),
4280 HashMap::from_iter([
4281 ("dir/one.rs".to_string(), vec![8..12]),
4282 ("dir/one.ts".to_string(), vec![14..18]),
4283 ("dir/two.rs".to_string(), vec![8..12]),
4284 ("dir/two.ts".to_string(), vec![14..18]),
4285 ]),
4286 "If no exclusions match, all files should be returned"
4287 );
4288
4289 assert_eq!(
4290 search(
4291 &project,
4292 SearchQuery::text(
4293 search_query,
4294 false,
4295 true,
4296 false,
4297 Default::default(),
4298 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4299 None,
4300 )
4301 .unwrap(),
4302 cx
4303 )
4304 .await
4305 .unwrap(),
4306 HashMap::from_iter([
4307 ("dir/one.ts".to_string(), vec![14..18]),
4308 ("dir/two.ts".to_string(), vec![14..18]),
4309 ]),
4310 "Rust exclusion search should give only TypeScript files"
4311 );
4312
4313 assert_eq!(
4314 search(
4315 &project,
4316 SearchQuery::text(
4317 search_query,
4318 false,
4319 true,
4320 false,
4321 Default::default(),
4322 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4323 None,
4324 ).unwrap(),
4325 cx
4326 )
4327 .await
4328 .unwrap(),
4329 HashMap::from_iter([
4330 ("dir/one.rs".to_string(), vec![8..12]),
4331 ("dir/two.rs".to_string(), vec![8..12]),
4332 ]),
4333 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
4334 );
4335
4336 assert!(
4337 search(
4338 &project,
4339 SearchQuery::text(
4340 search_query,
4341 false,
4342 true,
4343 false,
4344 Default::default(),
4345
4346 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4347 None,
4348
4349 ).unwrap(),
4350 cx
4351 )
4352 .await
4353 .unwrap().is_empty(),
4354 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4355 );
4356}
4357
4358#[gpui::test]
4359async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4360 init_test(cx);
4361
4362 let search_query = "file";
4363
4364 let fs = FakeFs::new(cx.executor());
4365 fs.insert_tree(
4366 "/dir",
4367 json!({
4368 "one.rs": r#"// Rust file one"#,
4369 "one.ts": r#"// TypeScript file one"#,
4370 "two.rs": r#"// Rust file two"#,
4371 "two.ts": r#"// TypeScript file two"#,
4372 }),
4373 )
4374 .await;
4375 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4376
4377 assert!(
4378 search(
4379 &project,
4380 SearchQuery::text(
4381 search_query,
4382 false,
4383 true,
4384 false,
4385 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4386 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4387 None,
4388 )
4389 .unwrap(),
4390 cx
4391 )
4392 .await
4393 .unwrap()
4394 .is_empty(),
4395 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4396 );
4397
4398 assert!(
4399 search(
4400 &project,
4401 SearchQuery::text(
4402 search_query,
4403 false,
4404 true,
4405 false,
4406 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4407 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4408 None,
4409 ).unwrap(),
4410 cx
4411 )
4412 .await
4413 .unwrap()
4414 .is_empty(),
4415 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4416 );
4417
4418 assert!(
4419 search(
4420 &project,
4421 SearchQuery::text(
4422 search_query,
4423 false,
4424 true,
4425 false,
4426 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4427 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4428 None,
4429 )
4430 .unwrap(),
4431 cx
4432 )
4433 .await
4434 .unwrap()
4435 .is_empty(),
4436 "Non-matching inclusions and exclusions should not change that."
4437 );
4438
4439 assert_eq!(
4440 search(
4441 &project,
4442 SearchQuery::text(
4443 search_query,
4444 false,
4445 true,
4446 false,
4447 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4448 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
4449 None,
4450 )
4451 .unwrap(),
4452 cx
4453 )
4454 .await
4455 .unwrap(),
4456 HashMap::from_iter([
4457 ("dir/one.ts".to_string(), vec![14..18]),
4458 ("dir/two.ts".to_string(), vec![14..18]),
4459 ]),
4460 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4461 );
4462}
4463
4464#[gpui::test]
4465async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
4466 init_test(cx);
4467
4468 let fs = FakeFs::new(cx.executor());
4469 fs.insert_tree(
4470 "/worktree-a",
4471 json!({
4472 "haystack.rs": r#"// NEEDLE"#,
4473 "haystack.ts": r#"// NEEDLE"#,
4474 }),
4475 )
4476 .await;
4477 fs.insert_tree(
4478 "/worktree-b",
4479 json!({
4480 "haystack.rs": r#"// NEEDLE"#,
4481 "haystack.ts": r#"// NEEDLE"#,
4482 }),
4483 )
4484 .await;
4485
4486 let project = Project::test(
4487 fs.clone(),
4488 ["/worktree-a".as_ref(), "/worktree-b".as_ref()],
4489 cx,
4490 )
4491 .await;
4492
4493 assert_eq!(
4494 search(
4495 &project,
4496 SearchQuery::text(
4497 "NEEDLE",
4498 false,
4499 true,
4500 false,
4501 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
4502 Default::default(),
4503 None,
4504 )
4505 .unwrap(),
4506 cx
4507 )
4508 .await
4509 .unwrap(),
4510 HashMap::from_iter([("worktree-a/haystack.rs".to_string(), vec![3..9])]),
4511 "should only return results from included worktree"
4512 );
4513 assert_eq!(
4514 search(
4515 &project,
4516 SearchQuery::text(
4517 "NEEDLE",
4518 false,
4519 true,
4520 false,
4521 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
4522 Default::default(),
4523 None,
4524 )
4525 .unwrap(),
4526 cx
4527 )
4528 .await
4529 .unwrap(),
4530 HashMap::from_iter([("worktree-b/haystack.rs".to_string(), vec![3..9])]),
4531 "should only return results from included worktree"
4532 );
4533
4534 assert_eq!(
4535 search(
4536 &project,
4537 SearchQuery::text(
4538 "NEEDLE",
4539 false,
4540 true,
4541 false,
4542 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4543 Default::default(),
4544 None,
4545 )
4546 .unwrap(),
4547 cx
4548 )
4549 .await
4550 .unwrap(),
4551 HashMap::from_iter([
4552 ("worktree-a/haystack.ts".to_string(), vec![3..9]),
4553 ("worktree-b/haystack.ts".to_string(), vec![3..9])
4554 ]),
4555 "should return results from both worktrees"
4556 );
4557}
4558
4559#[gpui::test]
4560async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
4561 init_test(cx);
4562
4563 let fs = FakeFs::new(cx.background_executor.clone());
4564 fs.insert_tree(
4565 "/dir",
4566 json!({
4567 ".git": {},
4568 ".gitignore": "**/target\n/node_modules\n",
4569 "target": {
4570 "index.txt": "index_key:index_value"
4571 },
4572 "node_modules": {
4573 "eslint": {
4574 "index.ts": "const eslint_key = 'eslint value'",
4575 "package.json": r#"{ "some_key": "some value" }"#,
4576 },
4577 "prettier": {
4578 "index.ts": "const prettier_key = 'prettier value'",
4579 "package.json": r#"{ "other_key": "other value" }"#,
4580 },
4581 },
4582 "package.json": r#"{ "main_key": "main value" }"#,
4583 }),
4584 )
4585 .await;
4586 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4587
4588 let query = "key";
4589 assert_eq!(
4590 search(
4591 &project,
4592 SearchQuery::text(
4593 query,
4594 false,
4595 false,
4596 false,
4597 Default::default(),
4598 Default::default(),
4599 None,
4600 )
4601 .unwrap(),
4602 cx
4603 )
4604 .await
4605 .unwrap(),
4606 HashMap::from_iter([("dir/package.json".to_string(), vec![8..11])]),
4607 "Only one non-ignored file should have the query"
4608 );
4609
4610 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4611 assert_eq!(
4612 search(
4613 &project,
4614 SearchQuery::text(
4615 query,
4616 false,
4617 false,
4618 true,
4619 Default::default(),
4620 Default::default(),
4621 None,
4622 )
4623 .unwrap(),
4624 cx
4625 )
4626 .await
4627 .unwrap(),
4628 HashMap::from_iter([
4629 ("dir/package.json".to_string(), vec![8..11]),
4630 ("dir/target/index.txt".to_string(), vec![6..9]),
4631 (
4632 "dir/node_modules/prettier/package.json".to_string(),
4633 vec![9..12]
4634 ),
4635 (
4636 "dir/node_modules/prettier/index.ts".to_string(),
4637 vec![15..18]
4638 ),
4639 ("dir/node_modules/eslint/index.ts".to_string(), vec![13..16]),
4640 (
4641 "dir/node_modules/eslint/package.json".to_string(),
4642 vec![8..11]
4643 ),
4644 ]),
4645 "Unrestricted search with ignored directories should find every file with the query"
4646 );
4647
4648 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
4649 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
4650 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4651 assert_eq!(
4652 search(
4653 &project,
4654 SearchQuery::text(
4655 query,
4656 false,
4657 false,
4658 true,
4659 files_to_include,
4660 files_to_exclude,
4661 None,
4662 )
4663 .unwrap(),
4664 cx
4665 )
4666 .await
4667 .unwrap(),
4668 HashMap::from_iter([(
4669 "dir/node_modules/prettier/package.json".to_string(),
4670 vec![9..12]
4671 )]),
4672 "With search including ignored prettier directory and excluding TS files, only one file should be found"
4673 );
4674}
4675
4676#[gpui::test]
4677async fn test_create_entry(cx: &mut gpui::TestAppContext) {
4678 init_test(cx);
4679
4680 let fs = FakeFs::new(cx.executor().clone());
4681 fs.insert_tree(
4682 "/one/two",
4683 json!({
4684 "three": {
4685 "a.txt": "",
4686 "four": {}
4687 },
4688 "c.rs": ""
4689 }),
4690 )
4691 .await;
4692
4693 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
4694 project
4695 .update(cx, |project, cx| {
4696 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4697 project.create_entry((id, "b.."), true, cx)
4698 })
4699 .await
4700 .unwrap()
4701 .to_included()
4702 .unwrap();
4703
4704 // Can't create paths outside the project
4705 let result = project
4706 .update(cx, |project, cx| {
4707 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4708 project.create_entry((id, "../../boop"), true, cx)
4709 })
4710 .await;
4711 assert!(result.is_err());
4712
4713 // Can't create paths with '..'
4714 let result = project
4715 .update(cx, |project, cx| {
4716 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4717 project.create_entry((id, "four/../beep"), true, cx)
4718 })
4719 .await;
4720 assert!(result.is_err());
4721
4722 assert_eq!(
4723 fs.paths(true),
4724 vec![
4725 PathBuf::from("/"),
4726 PathBuf::from("/one"),
4727 PathBuf::from("/one/two"),
4728 PathBuf::from("/one/two/c.rs"),
4729 PathBuf::from("/one/two/three"),
4730 PathBuf::from("/one/two/three/a.txt"),
4731 PathBuf::from("/one/two/three/b.."),
4732 PathBuf::from("/one/two/three/four"),
4733 ]
4734 );
4735
4736 // And we cannot open buffers with '..'
4737 let result = project
4738 .update(cx, |project, cx| {
4739 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4740 project.open_buffer((id, "../c.rs"), cx)
4741 })
4742 .await;
4743 assert!(result.is_err())
4744}
4745
4746#[gpui::test]
4747async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
4748 init_test(cx);
4749
4750 let fs = FakeFs::new(cx.executor());
4751 fs.insert_tree(
4752 "/dir",
4753 json!({
4754 "a.tsx": "a",
4755 }),
4756 )
4757 .await;
4758
4759 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4760
4761 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4762 language_registry.add(tsx_lang());
4763 let language_server_names = [
4764 "TypeScriptServer",
4765 "TailwindServer",
4766 "ESLintServer",
4767 "NoHoverCapabilitiesServer",
4768 ];
4769 let mut language_servers = [
4770 language_registry.register_fake_lsp(
4771 "tsx",
4772 FakeLspAdapter {
4773 name: language_server_names[0],
4774 capabilities: lsp::ServerCapabilities {
4775 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4776 ..lsp::ServerCapabilities::default()
4777 },
4778 ..FakeLspAdapter::default()
4779 },
4780 ),
4781 language_registry.register_fake_lsp(
4782 "tsx",
4783 FakeLspAdapter {
4784 name: language_server_names[1],
4785 capabilities: lsp::ServerCapabilities {
4786 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4787 ..lsp::ServerCapabilities::default()
4788 },
4789 ..FakeLspAdapter::default()
4790 },
4791 ),
4792 language_registry.register_fake_lsp(
4793 "tsx",
4794 FakeLspAdapter {
4795 name: language_server_names[2],
4796 capabilities: lsp::ServerCapabilities {
4797 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4798 ..lsp::ServerCapabilities::default()
4799 },
4800 ..FakeLspAdapter::default()
4801 },
4802 ),
4803 language_registry.register_fake_lsp(
4804 "tsx",
4805 FakeLspAdapter {
4806 name: language_server_names[3],
4807 capabilities: lsp::ServerCapabilities {
4808 hover_provider: None,
4809 ..lsp::ServerCapabilities::default()
4810 },
4811 ..FakeLspAdapter::default()
4812 },
4813 ),
4814 ];
4815
4816 let buffer = project
4817 .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx))
4818 .await
4819 .unwrap();
4820 cx.executor().run_until_parked();
4821
4822 let mut servers_with_hover_requests = HashMap::default();
4823 for i in 0..language_server_names.len() {
4824 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
4825 panic!(
4826 "Failed to get language server #{i} with name {}",
4827 &language_server_names[i]
4828 )
4829 });
4830 let new_server_name = new_server.server.name();
4831 assert!(
4832 !servers_with_hover_requests.contains_key(&new_server_name),
4833 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
4834 );
4835 match new_server_name.as_ref() {
4836 "TailwindServer" | "TypeScriptServer" => {
4837 servers_with_hover_requests.insert(
4838 new_server_name.clone(),
4839 new_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| {
4840 let name = new_server_name.clone();
4841 async move {
4842 Ok(Some(lsp::Hover {
4843 contents: lsp::HoverContents::Scalar(lsp::MarkedString::String(
4844 format!("{name} hover"),
4845 )),
4846 range: None,
4847 }))
4848 }
4849 }),
4850 );
4851 }
4852 "ESLintServer" => {
4853 servers_with_hover_requests.insert(
4854 new_server_name,
4855 new_server.handle_request::<lsp::request::HoverRequest, _, _>(
4856 |_, _| async move { Ok(None) },
4857 ),
4858 );
4859 }
4860 "NoHoverCapabilitiesServer" => {
4861 let _never_handled = new_server.handle_request::<lsp::request::HoverRequest, _, _>(
4862 |_, _| async move {
4863 panic!(
4864 "Should not call for hovers server with no corresponding capabilities"
4865 )
4866 },
4867 );
4868 }
4869 unexpected => panic!("Unexpected server name: {unexpected}"),
4870 }
4871 }
4872
4873 let hover_task = project.update(cx, |project, cx| {
4874 project.hover(&buffer, Point::new(0, 0), cx)
4875 });
4876 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
4877 |mut hover_request| async move {
4878 hover_request
4879 .next()
4880 .await
4881 .expect("All hover requests should have been triggered")
4882 },
4883 ))
4884 .await;
4885 assert_eq!(
4886 vec!["TailwindServer hover", "TypeScriptServer hover"],
4887 hover_task
4888 .await
4889 .into_iter()
4890 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
4891 .sorted()
4892 .collect::<Vec<_>>(),
4893 "Should receive hover responses from all related servers with hover capabilities"
4894 );
4895}
4896
4897#[gpui::test]
4898async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
4899 init_test(cx);
4900
4901 let fs = FakeFs::new(cx.executor());
4902 fs.insert_tree(
4903 "/dir",
4904 json!({
4905 "a.ts": "a",
4906 }),
4907 )
4908 .await;
4909
4910 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4911
4912 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4913 language_registry.add(typescript_lang());
4914 let mut fake_language_servers = language_registry.register_fake_lsp(
4915 "TypeScript",
4916 FakeLspAdapter {
4917 capabilities: lsp::ServerCapabilities {
4918 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4919 ..lsp::ServerCapabilities::default()
4920 },
4921 ..FakeLspAdapter::default()
4922 },
4923 );
4924
4925 let buffer = project
4926 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
4927 .await
4928 .unwrap();
4929 cx.executor().run_until_parked();
4930
4931 let fake_server = fake_language_servers
4932 .next()
4933 .await
4934 .expect("failed to get the language server");
4935
4936 let mut request_handled =
4937 fake_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| async move {
4938 Ok(Some(lsp::Hover {
4939 contents: lsp::HoverContents::Array(vec![
4940 lsp::MarkedString::String("".to_string()),
4941 lsp::MarkedString::String(" ".to_string()),
4942 lsp::MarkedString::String("\n\n\n".to_string()),
4943 ]),
4944 range: None,
4945 }))
4946 });
4947
4948 let hover_task = project.update(cx, |project, cx| {
4949 project.hover(&buffer, Point::new(0, 0), cx)
4950 });
4951 let () = request_handled
4952 .next()
4953 .await
4954 .expect("All hover requests should have been triggered");
4955 assert_eq!(
4956 Vec::<String>::new(),
4957 hover_task
4958 .await
4959 .into_iter()
4960 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
4961 .sorted()
4962 .collect::<Vec<_>>(),
4963 "Empty hover parts should be ignored"
4964 );
4965}
4966
4967#[gpui::test]
4968async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
4969 init_test(cx);
4970
4971 let fs = FakeFs::new(cx.executor());
4972 fs.insert_tree(
4973 "/dir",
4974 json!({
4975 "a.ts": "a",
4976 }),
4977 )
4978 .await;
4979
4980 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4981
4982 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4983 language_registry.add(typescript_lang());
4984 let mut fake_language_servers = language_registry.register_fake_lsp(
4985 "TypeScript",
4986 FakeLspAdapter {
4987 capabilities: lsp::ServerCapabilities {
4988 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4989 ..lsp::ServerCapabilities::default()
4990 },
4991 ..FakeLspAdapter::default()
4992 },
4993 );
4994
4995 let buffer = project
4996 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
4997 .await
4998 .unwrap();
4999 cx.executor().run_until_parked();
5000
5001 let fake_server = fake_language_servers
5002 .next()
5003 .await
5004 .expect("failed to get the language server");
5005
5006 let mut request_handled = fake_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5007 move |_, _| async move {
5008 Ok(Some(vec![
5009 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5010 title: "organize imports".to_string(),
5011 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
5012 ..lsp::CodeAction::default()
5013 }),
5014 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5015 title: "fix code".to_string(),
5016 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
5017 ..lsp::CodeAction::default()
5018 }),
5019 ]))
5020 },
5021 );
5022
5023 let code_actions_task = project.update(cx, |project, cx| {
5024 project.code_actions(
5025 &buffer,
5026 0..buffer.read(cx).len(),
5027 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
5028 cx,
5029 )
5030 });
5031
5032 let () = request_handled
5033 .next()
5034 .await
5035 .expect("The code action request should have been triggered");
5036
5037 let code_actions = code_actions_task.await.unwrap();
5038 assert_eq!(code_actions.len(), 1);
5039 assert_eq!(
5040 code_actions[0].lsp_action.kind,
5041 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
5042 );
5043}
5044
5045#[gpui::test]
5046async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
5047 init_test(cx);
5048
5049 let fs = FakeFs::new(cx.executor());
5050 fs.insert_tree(
5051 "/dir",
5052 json!({
5053 "a.tsx": "a",
5054 }),
5055 )
5056 .await;
5057
5058 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
5059
5060 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5061 language_registry.add(tsx_lang());
5062 let language_server_names = [
5063 "TypeScriptServer",
5064 "TailwindServer",
5065 "ESLintServer",
5066 "NoActionsCapabilitiesServer",
5067 ];
5068
5069 let mut language_server_rxs = [
5070 language_registry.register_fake_lsp(
5071 "tsx",
5072 FakeLspAdapter {
5073 name: language_server_names[0],
5074 capabilities: lsp::ServerCapabilities {
5075 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5076 ..lsp::ServerCapabilities::default()
5077 },
5078 ..FakeLspAdapter::default()
5079 },
5080 ),
5081 language_registry.register_fake_lsp(
5082 "tsx",
5083 FakeLspAdapter {
5084 name: language_server_names[1],
5085 capabilities: lsp::ServerCapabilities {
5086 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5087 ..lsp::ServerCapabilities::default()
5088 },
5089 ..FakeLspAdapter::default()
5090 },
5091 ),
5092 language_registry.register_fake_lsp(
5093 "tsx",
5094 FakeLspAdapter {
5095 name: language_server_names[2],
5096 capabilities: lsp::ServerCapabilities {
5097 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5098 ..lsp::ServerCapabilities::default()
5099 },
5100 ..FakeLspAdapter::default()
5101 },
5102 ),
5103 language_registry.register_fake_lsp(
5104 "tsx",
5105 FakeLspAdapter {
5106 name: language_server_names[3],
5107 capabilities: lsp::ServerCapabilities {
5108 code_action_provider: None,
5109 ..lsp::ServerCapabilities::default()
5110 },
5111 ..FakeLspAdapter::default()
5112 },
5113 ),
5114 ];
5115
5116 let buffer = project
5117 .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx))
5118 .await
5119 .unwrap();
5120 cx.executor().run_until_parked();
5121
5122 let mut servers_with_actions_requests = HashMap::default();
5123 for i in 0..language_server_names.len() {
5124 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
5125 panic!(
5126 "Failed to get language server #{i} with name {}",
5127 &language_server_names[i]
5128 )
5129 });
5130 let new_server_name = new_server.server.name();
5131
5132 assert!(
5133 !servers_with_actions_requests.contains_key(&new_server_name),
5134 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5135 );
5136 match new_server_name.0.as_ref() {
5137 "TailwindServer" | "TypeScriptServer" => {
5138 servers_with_actions_requests.insert(
5139 new_server_name.clone(),
5140 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5141 move |_, _| {
5142 let name = new_server_name.clone();
5143 async move {
5144 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
5145 lsp::CodeAction {
5146 title: format!("{name} code action"),
5147 ..lsp::CodeAction::default()
5148 },
5149 )]))
5150 }
5151 },
5152 ),
5153 );
5154 }
5155 "ESLintServer" => {
5156 servers_with_actions_requests.insert(
5157 new_server_name,
5158 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5159 |_, _| async move { Ok(None) },
5160 ),
5161 );
5162 }
5163 "NoActionsCapabilitiesServer" => {
5164 let _never_handled = new_server
5165 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
5166 panic!(
5167 "Should not call for code actions server with no corresponding capabilities"
5168 )
5169 });
5170 }
5171 unexpected => panic!("Unexpected server name: {unexpected}"),
5172 }
5173 }
5174
5175 let code_actions_task = project.update(cx, |project, cx| {
5176 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
5177 });
5178
5179 // cx.run_until_parked();
5180 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
5181 |mut code_actions_request| async move {
5182 code_actions_request
5183 .next()
5184 .await
5185 .expect("All code actions requests should have been triggered")
5186 },
5187 ))
5188 .await;
5189 assert_eq!(
5190 vec!["TailwindServer code action", "TypeScriptServer code action"],
5191 code_actions_task
5192 .await
5193 .unwrap()
5194 .into_iter()
5195 .map(|code_action| code_action.lsp_action.title)
5196 .sorted()
5197 .collect::<Vec<_>>(),
5198 "Should receive code actions responses from all related servers with hover capabilities"
5199 );
5200}
5201
5202#[gpui::test]
5203async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
5204 init_test(cx);
5205
5206 let fs = FakeFs::new(cx.executor());
5207 fs.insert_tree(
5208 "/dir",
5209 json!({
5210 "a.rs": "let a = 1;",
5211 "b.rs": "let b = 2;",
5212 "c.rs": "let c = 2;",
5213 }),
5214 )
5215 .await;
5216
5217 let project = Project::test(
5218 fs,
5219 [
5220 "/dir/a.rs".as_ref(),
5221 "/dir/b.rs".as_ref(),
5222 "/dir/c.rs".as_ref(),
5223 ],
5224 cx,
5225 )
5226 .await;
5227
5228 // check the initial state and get the worktrees
5229 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
5230 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5231 assert_eq!(worktrees.len(), 3);
5232
5233 let worktree_a = worktrees[0].read(cx);
5234 let worktree_b = worktrees[1].read(cx);
5235 let worktree_c = worktrees[2].read(cx);
5236
5237 // check they start in the right order
5238 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
5239 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
5240 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
5241
5242 (
5243 worktrees[0].clone(),
5244 worktrees[1].clone(),
5245 worktrees[2].clone(),
5246 )
5247 });
5248
5249 // move first worktree to after the second
5250 // [a, b, c] -> [b, a, c]
5251 project
5252 .update(cx, |project, cx| {
5253 let first = worktree_a.read(cx);
5254 let second = worktree_b.read(cx);
5255 project.move_worktree(first.id(), second.id(), cx)
5256 })
5257 .expect("moving first after second");
5258
5259 // check the state after moving
5260 project.update(cx, |project, cx| {
5261 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5262 assert_eq!(worktrees.len(), 3);
5263
5264 let first = worktrees[0].read(cx);
5265 let second = worktrees[1].read(cx);
5266 let third = worktrees[2].read(cx);
5267
5268 // check they are now in the right order
5269 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5270 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
5271 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5272 });
5273
5274 // move the second worktree to before the first
5275 // [b, a, c] -> [a, b, c]
5276 project
5277 .update(cx, |project, cx| {
5278 let second = worktree_a.read(cx);
5279 let first = worktree_b.read(cx);
5280 project.move_worktree(first.id(), second.id(), cx)
5281 })
5282 .expect("moving second before first");
5283
5284 // check the state after moving
5285 project.update(cx, |project, cx| {
5286 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5287 assert_eq!(worktrees.len(), 3);
5288
5289 let first = worktrees[0].read(cx);
5290 let second = worktrees[1].read(cx);
5291 let third = worktrees[2].read(cx);
5292
5293 // check they are now in the right order
5294 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5295 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5296 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5297 });
5298
5299 // move the second worktree to after the third
5300 // [a, b, c] -> [a, c, b]
5301 project
5302 .update(cx, |project, cx| {
5303 let second = worktree_b.read(cx);
5304 let third = worktree_c.read(cx);
5305 project.move_worktree(second.id(), third.id(), cx)
5306 })
5307 .expect("moving second after third");
5308
5309 // check the state after moving
5310 project.update(cx, |project, cx| {
5311 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5312 assert_eq!(worktrees.len(), 3);
5313
5314 let first = worktrees[0].read(cx);
5315 let second = worktrees[1].read(cx);
5316 let third = worktrees[2].read(cx);
5317
5318 // check they are now in the right order
5319 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5320 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5321 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
5322 });
5323
5324 // move the third worktree to before the second
5325 // [a, c, b] -> [a, b, c]
5326 project
5327 .update(cx, |project, cx| {
5328 let third = worktree_c.read(cx);
5329 let second = worktree_b.read(cx);
5330 project.move_worktree(third.id(), second.id(), cx)
5331 })
5332 .expect("moving third before second");
5333
5334 // check the state after moving
5335 project.update(cx, |project, cx| {
5336 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5337 assert_eq!(worktrees.len(), 3);
5338
5339 let first = worktrees[0].read(cx);
5340 let second = worktrees[1].read(cx);
5341 let third = worktrees[2].read(cx);
5342
5343 // check they are now in the right order
5344 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5345 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5346 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5347 });
5348
5349 // move the first worktree to after the third
5350 // [a, b, c] -> [b, c, a]
5351 project
5352 .update(cx, |project, cx| {
5353 let first = worktree_a.read(cx);
5354 let third = worktree_c.read(cx);
5355 project.move_worktree(first.id(), third.id(), cx)
5356 })
5357 .expect("moving first after third");
5358
5359 // check the state after moving
5360 project.update(cx, |project, cx| {
5361 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5362 assert_eq!(worktrees.len(), 3);
5363
5364 let first = worktrees[0].read(cx);
5365 let second = worktrees[1].read(cx);
5366 let third = worktrees[2].read(cx);
5367
5368 // check they are now in the right order
5369 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5370 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5371 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
5372 });
5373
5374 // move the third worktree to before the first
5375 // [b, c, a] -> [a, b, c]
5376 project
5377 .update(cx, |project, cx| {
5378 let third = worktree_a.read(cx);
5379 let first = worktree_b.read(cx);
5380 project.move_worktree(third.id(), first.id(), cx)
5381 })
5382 .expect("moving third before first");
5383
5384 // check the state after moving
5385 project.update(cx, |project, cx| {
5386 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5387 assert_eq!(worktrees.len(), 3);
5388
5389 let first = worktrees[0].read(cx);
5390 let second = worktrees[1].read(cx);
5391 let third = worktrees[2].read(cx);
5392
5393 // check they are now in the right order
5394 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5395 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5396 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5397 });
5398}
5399
5400#[gpui::test]
5401async fn test_unstaged_changes_for_buffer(cx: &mut gpui::TestAppContext) {
5402 init_test(cx);
5403
5404 let staged_contents = r#"
5405 fn main() {
5406 println!("hello world");
5407 }
5408 "#
5409 .unindent();
5410 let file_contents = r#"
5411 // print goodbye
5412 fn main() {
5413 println!("goodbye world");
5414 }
5415 "#
5416 .unindent();
5417
5418 let fs = FakeFs::new(cx.background_executor.clone());
5419 fs.insert_tree(
5420 "/dir",
5421 json!({
5422 ".git": {},
5423 "src": {
5424 "main.rs": file_contents,
5425 }
5426 }),
5427 )
5428 .await;
5429
5430 fs.set_index_for_repo(
5431 Path::new("/dir/.git"),
5432 &[(Path::new("src/main.rs"), staged_contents)],
5433 );
5434
5435 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5436
5437 let buffer = project
5438 .update(cx, |project, cx| {
5439 project.open_local_buffer("/dir/src/main.rs", cx)
5440 })
5441 .await
5442 .unwrap();
5443 let unstaged_changes = project
5444 .update(cx, |project, cx| {
5445 project.open_unstaged_changes(buffer.clone(), cx)
5446 })
5447 .await
5448 .unwrap();
5449
5450 cx.run_until_parked();
5451 unstaged_changes.update(cx, |unstaged_changes, cx| {
5452 let snapshot = buffer.read(cx).snapshot();
5453 assert_hunks(
5454 unstaged_changes.diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
5455 &snapshot,
5456 &unstaged_changes.base_text.as_ref().unwrap().read(cx).text(),
5457 &[
5458 (0..1, "", "// print goodbye\n"),
5459 (
5460 2..3,
5461 " println!(\"hello world\");\n",
5462 " println!(\"goodbye world\");\n",
5463 ),
5464 ],
5465 );
5466 });
5467
5468 let staged_contents = r#"
5469 // print goodbye
5470 fn main() {
5471 }
5472 "#
5473 .unindent();
5474
5475 fs.set_index_for_repo(
5476 Path::new("/dir/.git"),
5477 &[(Path::new("src/main.rs"), staged_contents)],
5478 );
5479
5480 cx.run_until_parked();
5481 unstaged_changes.update(cx, |unstaged_changes, cx| {
5482 let snapshot = buffer.read(cx).snapshot();
5483 assert_hunks(
5484 unstaged_changes.diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
5485 &snapshot,
5486 &unstaged_changes.base_text.as_ref().unwrap().read(cx).text(),
5487 &[(2..3, "", " println!(\"goodbye world\");\n")],
5488 );
5489 });
5490}
5491
5492async fn search(
5493 project: &Model<Project>,
5494 query: SearchQuery,
5495 cx: &mut gpui::TestAppContext,
5496) -> Result<HashMap<String, Vec<Range<usize>>>> {
5497 let mut search_rx = project.update(cx, |project, cx| project.search(query, cx));
5498 let mut results = HashMap::default();
5499 while let Some(search_result) = search_rx.next().await {
5500 match search_result {
5501 SearchResult::Buffer { buffer, ranges } => {
5502 results.entry(buffer).or_insert(ranges);
5503 }
5504 SearchResult::LimitReached => {}
5505 }
5506 }
5507 Ok(results
5508 .into_iter()
5509 .map(|(buffer, ranges)| {
5510 buffer.update(cx, |buffer, cx| {
5511 let path = buffer
5512 .file()
5513 .unwrap()
5514 .full_path(cx)
5515 .to_string_lossy()
5516 .to_string();
5517 let ranges = ranges
5518 .into_iter()
5519 .map(|range| range.to_offset(buffer))
5520 .collect::<Vec<_>>();
5521 (path, ranges)
5522 })
5523 })
5524 .collect())
5525}
5526
5527pub fn init_test(cx: &mut gpui::TestAppContext) {
5528 if std::env::var("RUST_LOG").is_ok() {
5529 env_logger::try_init().ok();
5530 }
5531
5532 cx.update(|cx| {
5533 let settings_store = SettingsStore::test(cx);
5534 cx.set_global(settings_store);
5535 release_channel::init(SemanticVersion::default(), cx);
5536 language::init(cx);
5537 Project::init_settings(cx);
5538 });
5539}
5540
5541fn json_lang() -> Arc<Language> {
5542 Arc::new(Language::new(
5543 LanguageConfig {
5544 name: "JSON".into(),
5545 matcher: LanguageMatcher {
5546 path_suffixes: vec!["json".to_string()],
5547 ..Default::default()
5548 },
5549 ..Default::default()
5550 },
5551 None,
5552 ))
5553}
5554
5555fn js_lang() -> Arc<Language> {
5556 Arc::new(Language::new(
5557 LanguageConfig {
5558 name: "JavaScript".into(),
5559 matcher: LanguageMatcher {
5560 path_suffixes: vec!["js".to_string()],
5561 ..Default::default()
5562 },
5563 ..Default::default()
5564 },
5565 None,
5566 ))
5567}
5568
5569fn rust_lang() -> Arc<Language> {
5570 Arc::new(Language::new(
5571 LanguageConfig {
5572 name: "Rust".into(),
5573 matcher: LanguageMatcher {
5574 path_suffixes: vec!["rs".to_string()],
5575 ..Default::default()
5576 },
5577 ..Default::default()
5578 },
5579 Some(tree_sitter_rust::LANGUAGE.into()),
5580 ))
5581}
5582
5583fn typescript_lang() -> Arc<Language> {
5584 Arc::new(Language::new(
5585 LanguageConfig {
5586 name: "TypeScript".into(),
5587 matcher: LanguageMatcher {
5588 path_suffixes: vec!["ts".to_string()],
5589 ..Default::default()
5590 },
5591 ..Default::default()
5592 },
5593 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
5594 ))
5595}
5596
5597fn tsx_lang() -> Arc<Language> {
5598 Arc::new(Language::new(
5599 LanguageConfig {
5600 name: "tsx".into(),
5601 matcher: LanguageMatcher {
5602 path_suffixes: vec!["tsx".to_string()],
5603 ..Default::default()
5604 },
5605 ..Default::default()
5606 },
5607 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
5608 ))
5609}
5610
5611fn get_all_tasks(
5612 project: &Model<Project>,
5613 worktree_id: Option<WorktreeId>,
5614 task_context: &TaskContext,
5615 cx: &mut AppContext,
5616) -> Vec<(TaskSourceKind, ResolvedTask)> {
5617 let (mut old, new) = project.update(cx, |project, cx| {
5618 project
5619 .task_store
5620 .read(cx)
5621 .task_inventory()
5622 .unwrap()
5623 .read(cx)
5624 .used_and_current_resolved_tasks(worktree_id, None, task_context, cx)
5625 });
5626 old.extend(new);
5627 old
5628}