1use crate::{Event, *};
2use fs::FakeFs;
3use futures::{future, StreamExt};
4use gpui::AppContext;
5use language::{
6 language_settings::{AllLanguageSettings, LanguageSettingsContent},
7 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
8 LanguageMatcher, LineEnding, OffsetRangeExt, Point, ToPoint,
9};
10use lsp::Url;
11use parking_lot::Mutex;
12use pretty_assertions::assert_eq;
13use serde_json::json;
14#[cfg(not(windows))]
15use std::os;
16use std::task::Poll;
17use unindent::Unindent as _;
18use util::{assert_set_eq, paths::PathMatcher, test::temp_tree};
19use worktree::WorktreeModelHandle as _;
20
21#[gpui::test]
22async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
23 cx.executor().allow_parking();
24
25 let (tx, mut rx) = futures::channel::mpsc::unbounded();
26 let _thread = std::thread::spawn(move || {
27 std::fs::metadata("/Users").unwrap();
28 std::thread::sleep(Duration::from_millis(1000));
29 tx.unbounded_send(1).unwrap();
30 });
31 rx.next().await.unwrap();
32}
33
34#[gpui::test]
35async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
36 cx.executor().allow_parking();
37
38 let io_task = smol::unblock(move || {
39 println!("sleeping on thread {:?}", std::thread::current().id());
40 std::thread::sleep(Duration::from_millis(10));
41 1
42 });
43
44 let task = cx.foreground_executor().spawn(async move {
45 io_task.await;
46 });
47
48 task.await;
49}
50
51#[cfg(not(windows))]
52#[gpui::test]
53async fn test_symlinks(cx: &mut gpui::TestAppContext) {
54 init_test(cx);
55 cx.executor().allow_parking();
56
57 let dir = temp_tree(json!({
58 "root": {
59 "apple": "",
60 "banana": {
61 "carrot": {
62 "date": "",
63 "endive": "",
64 }
65 },
66 "fennel": {
67 "grape": "",
68 }
69 }
70 }));
71
72 let root_link_path = dir.path().join("root_link");
73 os::unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
74 os::unix::fs::symlink(
75 &dir.path().join("root/fennel"),
76 &dir.path().join("root/finnochio"),
77 )
78 .unwrap();
79
80 let project = Project::test(Arc::new(RealFs::default()), [root_link_path.as_ref()], cx).await;
81
82 project.update(cx, |project, cx| {
83 let tree = project.worktrees().next().unwrap().read(cx);
84 assert_eq!(tree.file_count(), 5);
85 assert_eq!(
86 tree.inode_for_path("fennel/grape"),
87 tree.inode_for_path("finnochio/grape")
88 );
89 });
90}
91
92#[gpui::test]
93async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
94 init_test(cx);
95
96 let fs = FakeFs::new(cx.executor());
97 fs.insert_tree(
98 "/the-root",
99 json!({
100 ".zed": {
101 "settings.json": r#"{ "tab_size": 8 }"#,
102 "tasks.json": r#"[{
103 "label": "cargo check",
104 "command": "cargo",
105 "args": ["check", "--all"]
106 },]"#,
107 },
108 "a": {
109 "a.rs": "fn a() {\n A\n}"
110 },
111 "b": {
112 ".zed": {
113 "settings.json": r#"{ "tab_size": 2 }"#,
114 "tasks.json": r#"[{
115 "label": "cargo check",
116 "command": "cargo",
117 "args": ["check"]
118 },]"#,
119 },
120 "b.rs": "fn b() {\n B\n}"
121 }
122 }),
123 )
124 .await;
125
126 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
127 let worktree = project.update(cx, |project, _| project.worktrees().next().unwrap());
128
129 cx.executor().run_until_parked();
130 cx.update(|cx| {
131 let tree = worktree.read(cx);
132
133 let settings_a = language_settings(
134 None,
135 Some(
136 &(File::for_entry(
137 tree.entry_for_path("a/a.rs").unwrap().clone(),
138 worktree.clone(),
139 ) as _),
140 ),
141 cx,
142 );
143 let settings_b = language_settings(
144 None,
145 Some(
146 &(File::for_entry(
147 tree.entry_for_path("b/b.rs").unwrap().clone(),
148 worktree.clone(),
149 ) as _),
150 ),
151 cx,
152 );
153
154 assert_eq!(settings_a.tab_size.get(), 8);
155 assert_eq!(settings_b.tab_size.get(), 2);
156
157 let workree_id = project.update(cx, |project, cx| {
158 project.worktrees().next().unwrap().read(cx).id()
159 });
160 let all_tasks = project
161 .update(cx, |project, cx| {
162 project
163 .task_inventory()
164 .update(cx, |inventory, cx| inventory.list_tasks(None, None, cx))
165 })
166 .into_iter()
167 .map(|(source_kind, task)| (source_kind, task.label))
168 .collect::<Vec<_>>();
169 assert_eq!(
170 all_tasks,
171 vec![
172 (
173 TaskSourceKind::Worktree {
174 id: workree_id,
175 abs_path: PathBuf::from("/the-root/.zed/tasks.json"),
176 id_base: "local_tasks_for_worktree",
177 },
178 "cargo check".to_string()
179 ),
180 (
181 TaskSourceKind::Worktree {
182 id: workree_id,
183 abs_path: PathBuf::from("/the-root/b/.zed/tasks.json"),
184 id_base: "local_tasks_for_worktree",
185 },
186 "cargo check".to_string()
187 ),
188 ]
189 );
190 });
191}
192
193#[gpui::test]
194async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
195 init_test(cx);
196
197 let fs = FakeFs::new(cx.executor());
198 fs.insert_tree(
199 "/the-root",
200 json!({
201 "test.rs": "const A: i32 = 1;",
202 "test2.rs": "",
203 "Cargo.toml": "a = 1",
204 "package.json": "{\"a\": 1}",
205 }),
206 )
207 .await;
208
209 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
210 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
211
212 let mut fake_rust_servers = language_registry.register_fake_lsp_adapter(
213 "Rust",
214 FakeLspAdapter {
215 name: "the-rust-language-server",
216 capabilities: lsp::ServerCapabilities {
217 completion_provider: Some(lsp::CompletionOptions {
218 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
219 ..Default::default()
220 }),
221 ..Default::default()
222 },
223 ..Default::default()
224 },
225 );
226 let mut fake_json_servers = language_registry.register_fake_lsp_adapter(
227 "JSON",
228 FakeLspAdapter {
229 name: "the-json-language-server",
230 capabilities: lsp::ServerCapabilities {
231 completion_provider: Some(lsp::CompletionOptions {
232 trigger_characters: Some(vec![":".to_string()]),
233 ..Default::default()
234 }),
235 ..Default::default()
236 },
237 ..Default::default()
238 },
239 );
240
241 // Open a buffer without an associated language server.
242 let toml_buffer = project
243 .update(cx, |project, cx| {
244 project.open_local_buffer("/the-root/Cargo.toml", cx)
245 })
246 .await
247 .unwrap();
248
249 // Open a buffer with an associated language server before the language for it has been loaded.
250 let rust_buffer = project
251 .update(cx, |project, cx| {
252 project.open_local_buffer("/the-root/test.rs", cx)
253 })
254 .await
255 .unwrap();
256 rust_buffer.update(cx, |buffer, _| {
257 assert_eq!(buffer.language().map(|l| l.name()), None);
258 });
259
260 // Now we add the languages to the project, and ensure they get assigned to all
261 // the relevant open buffers.
262 language_registry.add(json_lang());
263 language_registry.add(rust_lang());
264 cx.executor().run_until_parked();
265 rust_buffer.update(cx, |buffer, _| {
266 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
267 });
268
269 // A server is started up, and it is notified about Rust files.
270 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
271 assert_eq!(
272 fake_rust_server
273 .receive_notification::<lsp::notification::DidOpenTextDocument>()
274 .await
275 .text_document,
276 lsp::TextDocumentItem {
277 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
278 version: 0,
279 text: "const A: i32 = 1;".to_string(),
280 language_id: Default::default()
281 }
282 );
283
284 // The buffer is configured based on the language server's capabilities.
285 rust_buffer.update(cx, |buffer, _| {
286 assert_eq!(
287 buffer.completion_triggers(),
288 &[".".to_string(), "::".to_string()]
289 );
290 });
291 toml_buffer.update(cx, |buffer, _| {
292 assert!(buffer.completion_triggers().is_empty());
293 });
294
295 // Edit a buffer. The changes are reported to the language server.
296 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
297 assert_eq!(
298 fake_rust_server
299 .receive_notification::<lsp::notification::DidChangeTextDocument>()
300 .await
301 .text_document,
302 lsp::VersionedTextDocumentIdentifier::new(
303 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
304 1
305 )
306 );
307
308 // Open a third buffer with a different associated language server.
309 let json_buffer = project
310 .update(cx, |project, cx| {
311 project.open_local_buffer("/the-root/package.json", cx)
312 })
313 .await
314 .unwrap();
315
316 // A json language server is started up and is only notified about the json buffer.
317 let mut fake_json_server = fake_json_servers.next().await.unwrap();
318 assert_eq!(
319 fake_json_server
320 .receive_notification::<lsp::notification::DidOpenTextDocument>()
321 .await
322 .text_document,
323 lsp::TextDocumentItem {
324 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
325 version: 0,
326 text: "{\"a\": 1}".to_string(),
327 language_id: Default::default()
328 }
329 );
330
331 // This buffer is configured based on the second language server's
332 // capabilities.
333 json_buffer.update(cx, |buffer, _| {
334 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
335 });
336
337 // When opening another buffer whose language server is already running,
338 // it is also configured based on the existing language server's capabilities.
339 let rust_buffer2 = project
340 .update(cx, |project, cx| {
341 project.open_local_buffer("/the-root/test2.rs", cx)
342 })
343 .await
344 .unwrap();
345 rust_buffer2.update(cx, |buffer, _| {
346 assert_eq!(
347 buffer.completion_triggers(),
348 &[".".to_string(), "::".to_string()]
349 );
350 });
351
352 // Changes are reported only to servers matching the buffer's language.
353 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
354 rust_buffer2.update(cx, |buffer, cx| {
355 buffer.edit([(0..0, "let x = 1;")], None, cx)
356 });
357 assert_eq!(
358 fake_rust_server
359 .receive_notification::<lsp::notification::DidChangeTextDocument>()
360 .await
361 .text_document,
362 lsp::VersionedTextDocumentIdentifier::new(
363 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
364 1
365 )
366 );
367
368 // Save notifications are reported to all servers.
369 project
370 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
371 .await
372 .unwrap();
373 assert_eq!(
374 fake_rust_server
375 .receive_notification::<lsp::notification::DidSaveTextDocument>()
376 .await
377 .text_document,
378 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
379 );
380 assert_eq!(
381 fake_json_server
382 .receive_notification::<lsp::notification::DidSaveTextDocument>()
383 .await
384 .text_document,
385 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
386 );
387
388 // Renames are reported only to servers matching the buffer's language.
389 fs.rename(
390 Path::new("/the-root/test2.rs"),
391 Path::new("/the-root/test3.rs"),
392 Default::default(),
393 )
394 .await
395 .unwrap();
396 assert_eq!(
397 fake_rust_server
398 .receive_notification::<lsp::notification::DidCloseTextDocument>()
399 .await
400 .text_document,
401 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
402 );
403 assert_eq!(
404 fake_rust_server
405 .receive_notification::<lsp::notification::DidOpenTextDocument>()
406 .await
407 .text_document,
408 lsp::TextDocumentItem {
409 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
410 version: 0,
411 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
412 language_id: Default::default()
413 },
414 );
415
416 rust_buffer2.update(cx, |buffer, cx| {
417 buffer.update_diagnostics(
418 LanguageServerId(0),
419 DiagnosticSet::from_sorted_entries(
420 vec![DiagnosticEntry {
421 diagnostic: Default::default(),
422 range: Anchor::MIN..Anchor::MAX,
423 }],
424 &buffer.snapshot(),
425 ),
426 cx,
427 );
428 assert_eq!(
429 buffer
430 .snapshot()
431 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
432 .count(),
433 1
434 );
435 });
436
437 // When the rename changes the extension of the file, the buffer gets closed on the old
438 // language server and gets opened on the new one.
439 fs.rename(
440 Path::new("/the-root/test3.rs"),
441 Path::new("/the-root/test3.json"),
442 Default::default(),
443 )
444 .await
445 .unwrap();
446 assert_eq!(
447 fake_rust_server
448 .receive_notification::<lsp::notification::DidCloseTextDocument>()
449 .await
450 .text_document,
451 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
452 );
453 assert_eq!(
454 fake_json_server
455 .receive_notification::<lsp::notification::DidOpenTextDocument>()
456 .await
457 .text_document,
458 lsp::TextDocumentItem {
459 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
460 version: 0,
461 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
462 language_id: Default::default()
463 },
464 );
465
466 // We clear the diagnostics, since the language has changed.
467 rust_buffer2.update(cx, |buffer, _| {
468 assert_eq!(
469 buffer
470 .snapshot()
471 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
472 .count(),
473 0
474 );
475 });
476
477 // The renamed file's version resets after changing language server.
478 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
479 assert_eq!(
480 fake_json_server
481 .receive_notification::<lsp::notification::DidChangeTextDocument>()
482 .await
483 .text_document,
484 lsp::VersionedTextDocumentIdentifier::new(
485 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
486 1
487 )
488 );
489
490 // Restart language servers
491 project.update(cx, |project, cx| {
492 project.restart_language_servers_for_buffers(
493 vec![rust_buffer.clone(), json_buffer.clone()],
494 cx,
495 );
496 });
497
498 let mut rust_shutdown_requests = fake_rust_server
499 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
500 let mut json_shutdown_requests = fake_json_server
501 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
502 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
503
504 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
505 let mut fake_json_server = fake_json_servers.next().await.unwrap();
506
507 // Ensure rust document is reopened in new rust language server
508 assert_eq!(
509 fake_rust_server
510 .receive_notification::<lsp::notification::DidOpenTextDocument>()
511 .await
512 .text_document,
513 lsp::TextDocumentItem {
514 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
515 version: 0,
516 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
517 language_id: Default::default()
518 }
519 );
520
521 // Ensure json documents are reopened in new json language server
522 assert_set_eq!(
523 [
524 fake_json_server
525 .receive_notification::<lsp::notification::DidOpenTextDocument>()
526 .await
527 .text_document,
528 fake_json_server
529 .receive_notification::<lsp::notification::DidOpenTextDocument>()
530 .await
531 .text_document,
532 ],
533 [
534 lsp::TextDocumentItem {
535 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
536 version: 0,
537 text: json_buffer.update(cx, |buffer, _| buffer.text()),
538 language_id: Default::default()
539 },
540 lsp::TextDocumentItem {
541 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
542 version: 0,
543 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
544 language_id: Default::default()
545 }
546 ]
547 );
548
549 // Close notifications are reported only to servers matching the buffer's language.
550 cx.update(|_| drop(json_buffer));
551 let close_message = lsp::DidCloseTextDocumentParams {
552 text_document: lsp::TextDocumentIdentifier::new(
553 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
554 ),
555 };
556 assert_eq!(
557 fake_json_server
558 .receive_notification::<lsp::notification::DidCloseTextDocument>()
559 .await,
560 close_message,
561 );
562}
563
564#[gpui::test]
565async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
566 init_test(cx);
567
568 let fs = FakeFs::new(cx.executor());
569 fs.insert_tree(
570 "/the-root",
571 json!({
572 ".gitignore": "target\n",
573 "src": {
574 "a.rs": "",
575 "b.rs": "",
576 },
577 "target": {
578 "x": {
579 "out": {
580 "x.rs": ""
581 }
582 },
583 "y": {
584 "out": {
585 "y.rs": "",
586 }
587 },
588 "z": {
589 "out": {
590 "z.rs": ""
591 }
592 }
593 }
594 }),
595 )
596 .await;
597
598 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
599 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
600 language_registry.add(rust_lang());
601 let mut fake_servers = language_registry.register_fake_lsp_adapter(
602 "Rust",
603 FakeLspAdapter {
604 name: "the-language-server",
605 ..Default::default()
606 },
607 );
608
609 cx.executor().run_until_parked();
610
611 // Start the language server by opening a buffer with a compatible file extension.
612 let _buffer = project
613 .update(cx, |project, cx| {
614 project.open_local_buffer("/the-root/src/a.rs", cx)
615 })
616 .await
617 .unwrap();
618
619 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
620 project.update(cx, |project, cx| {
621 let worktree = project.worktrees().next().unwrap();
622 assert_eq!(
623 worktree
624 .read(cx)
625 .snapshot()
626 .entries(true)
627 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
628 .collect::<Vec<_>>(),
629 &[
630 (Path::new(""), false),
631 (Path::new(".gitignore"), false),
632 (Path::new("src"), false),
633 (Path::new("src/a.rs"), false),
634 (Path::new("src/b.rs"), false),
635 (Path::new("target"), true),
636 ]
637 );
638 });
639
640 let prev_read_dir_count = fs.read_dir_call_count();
641
642 // Keep track of the FS events reported to the language server.
643 let fake_server = fake_servers.next().await.unwrap();
644 let file_changes = Arc::new(Mutex::new(Vec::new()));
645 fake_server
646 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
647 registrations: vec![lsp::Registration {
648 id: Default::default(),
649 method: "workspace/didChangeWatchedFiles".to_string(),
650 register_options: serde_json::to_value(
651 lsp::DidChangeWatchedFilesRegistrationOptions {
652 watchers: vec![
653 lsp::FileSystemWatcher {
654 glob_pattern: lsp::GlobPattern::String(
655 "/the-root/Cargo.toml".to_string(),
656 ),
657 kind: None,
658 },
659 lsp::FileSystemWatcher {
660 glob_pattern: lsp::GlobPattern::String(
661 "/the-root/src/*.{rs,c}".to_string(),
662 ),
663 kind: None,
664 },
665 lsp::FileSystemWatcher {
666 glob_pattern: lsp::GlobPattern::String(
667 "/the-root/target/y/**/*.rs".to_string(),
668 ),
669 kind: None,
670 },
671 ],
672 },
673 )
674 .ok(),
675 }],
676 })
677 .await
678 .unwrap();
679 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
680 let file_changes = file_changes.clone();
681 move |params, _| {
682 let mut file_changes = file_changes.lock();
683 file_changes.extend(params.changes);
684 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
685 }
686 });
687
688 cx.executor().run_until_parked();
689 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
690 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
691
692 // Now the language server has asked us to watch an ignored directory path,
693 // so we recursively load it.
694 project.update(cx, |project, cx| {
695 let worktree = project.worktrees().next().unwrap();
696 assert_eq!(
697 worktree
698 .read(cx)
699 .snapshot()
700 .entries(true)
701 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
702 .collect::<Vec<_>>(),
703 &[
704 (Path::new(""), false),
705 (Path::new(".gitignore"), false),
706 (Path::new("src"), false),
707 (Path::new("src/a.rs"), false),
708 (Path::new("src/b.rs"), false),
709 (Path::new("target"), true),
710 (Path::new("target/x"), true),
711 (Path::new("target/y"), true),
712 (Path::new("target/y/out"), true),
713 (Path::new("target/y/out/y.rs"), true),
714 (Path::new("target/z"), true),
715 ]
716 );
717 });
718
719 // Perform some file system mutations, two of which match the watched patterns,
720 // and one of which does not.
721 fs.create_file("/the-root/src/c.rs".as_ref(), Default::default())
722 .await
723 .unwrap();
724 fs.create_file("/the-root/src/d.txt".as_ref(), Default::default())
725 .await
726 .unwrap();
727 fs.remove_file("/the-root/src/b.rs".as_ref(), Default::default())
728 .await
729 .unwrap();
730 fs.create_file("/the-root/target/x/out/x2.rs".as_ref(), Default::default())
731 .await
732 .unwrap();
733 fs.create_file("/the-root/target/y/out/y2.rs".as_ref(), Default::default())
734 .await
735 .unwrap();
736
737 // The language server receives events for the FS mutations that match its watch patterns.
738 cx.executor().run_until_parked();
739 assert_eq!(
740 &*file_changes.lock(),
741 &[
742 lsp::FileEvent {
743 uri: lsp::Url::from_file_path("/the-root/src/b.rs").unwrap(),
744 typ: lsp::FileChangeType::DELETED,
745 },
746 lsp::FileEvent {
747 uri: lsp::Url::from_file_path("/the-root/src/c.rs").unwrap(),
748 typ: lsp::FileChangeType::CREATED,
749 },
750 lsp::FileEvent {
751 uri: lsp::Url::from_file_path("/the-root/target/y/out/y2.rs").unwrap(),
752 typ: lsp::FileChangeType::CREATED,
753 },
754 ]
755 );
756}
757
758#[gpui::test]
759async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
760 init_test(cx);
761
762 let fs = FakeFs::new(cx.executor());
763 fs.insert_tree(
764 "/dir",
765 json!({
766 "a.rs": "let a = 1;",
767 "b.rs": "let b = 2;"
768 }),
769 )
770 .await;
771
772 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
773
774 let buffer_a = project
775 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
776 .await
777 .unwrap();
778 let buffer_b = project
779 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
780 .await
781 .unwrap();
782
783 project.update(cx, |project, cx| {
784 project
785 .update_diagnostics(
786 LanguageServerId(0),
787 lsp::PublishDiagnosticsParams {
788 uri: Url::from_file_path("/dir/a.rs").unwrap(),
789 version: None,
790 diagnostics: vec![lsp::Diagnostic {
791 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
792 severity: Some(lsp::DiagnosticSeverity::ERROR),
793 message: "error 1".to_string(),
794 ..Default::default()
795 }],
796 },
797 &[],
798 cx,
799 )
800 .unwrap();
801 project
802 .update_diagnostics(
803 LanguageServerId(0),
804 lsp::PublishDiagnosticsParams {
805 uri: Url::from_file_path("/dir/b.rs").unwrap(),
806 version: None,
807 diagnostics: vec![lsp::Diagnostic {
808 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
809 severity: Some(lsp::DiagnosticSeverity::WARNING),
810 message: "error 2".to_string(),
811 ..Default::default()
812 }],
813 },
814 &[],
815 cx,
816 )
817 .unwrap();
818 });
819
820 buffer_a.update(cx, |buffer, _| {
821 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
822 assert_eq!(
823 chunks
824 .iter()
825 .map(|(s, d)| (s.as_str(), *d))
826 .collect::<Vec<_>>(),
827 &[
828 ("let ", None),
829 ("a", Some(DiagnosticSeverity::ERROR)),
830 (" = 1;", None),
831 ]
832 );
833 });
834 buffer_b.update(cx, |buffer, _| {
835 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
836 assert_eq!(
837 chunks
838 .iter()
839 .map(|(s, d)| (s.as_str(), *d))
840 .collect::<Vec<_>>(),
841 &[
842 ("let ", None),
843 ("b", Some(DiagnosticSeverity::WARNING)),
844 (" = 2;", None),
845 ]
846 );
847 });
848}
849
850#[gpui::test]
851async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
852 init_test(cx);
853
854 let fs = FakeFs::new(cx.executor());
855 fs.insert_tree(
856 "/root",
857 json!({
858 "dir": {
859 ".git": {
860 "HEAD": "ref: refs/heads/main",
861 },
862 ".gitignore": "b.rs",
863 "a.rs": "let a = 1;",
864 "b.rs": "let b = 2;",
865 },
866 "other.rs": "let b = c;"
867 }),
868 )
869 .await;
870
871 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
872 let (worktree, _) = project
873 .update(cx, |project, cx| {
874 project.find_or_create_local_worktree("/root/dir", true, cx)
875 })
876 .await
877 .unwrap();
878 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
879
880 let (worktree, _) = project
881 .update(cx, |project, cx| {
882 project.find_or_create_local_worktree("/root/other.rs", false, cx)
883 })
884 .await
885 .unwrap();
886 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
887
888 let server_id = LanguageServerId(0);
889 project.update(cx, |project, cx| {
890 project
891 .update_diagnostics(
892 server_id,
893 lsp::PublishDiagnosticsParams {
894 uri: Url::from_file_path("/root/dir/b.rs").unwrap(),
895 version: None,
896 diagnostics: vec![lsp::Diagnostic {
897 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
898 severity: Some(lsp::DiagnosticSeverity::ERROR),
899 message: "unused variable 'b'".to_string(),
900 ..Default::default()
901 }],
902 },
903 &[],
904 cx,
905 )
906 .unwrap();
907 project
908 .update_diagnostics(
909 server_id,
910 lsp::PublishDiagnosticsParams {
911 uri: Url::from_file_path("/root/other.rs").unwrap(),
912 version: None,
913 diagnostics: vec![lsp::Diagnostic {
914 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
915 severity: Some(lsp::DiagnosticSeverity::ERROR),
916 message: "unknown variable 'c'".to_string(),
917 ..Default::default()
918 }],
919 },
920 &[],
921 cx,
922 )
923 .unwrap();
924 });
925
926 let main_ignored_buffer = project
927 .update(cx, |project, cx| {
928 project.open_buffer((main_worktree_id, "b.rs"), cx)
929 })
930 .await
931 .unwrap();
932 main_ignored_buffer.update(cx, |buffer, _| {
933 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
934 assert_eq!(
935 chunks
936 .iter()
937 .map(|(s, d)| (s.as_str(), *d))
938 .collect::<Vec<_>>(),
939 &[
940 ("let ", None),
941 ("b", Some(DiagnosticSeverity::ERROR)),
942 (" = 2;", None),
943 ],
944 "Gigitnored buffers should still get in-buffer diagnostics",
945 );
946 });
947 let other_buffer = project
948 .update(cx, |project, cx| {
949 project.open_buffer((other_worktree_id, ""), cx)
950 })
951 .await
952 .unwrap();
953 other_buffer.update(cx, |buffer, _| {
954 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
955 assert_eq!(
956 chunks
957 .iter()
958 .map(|(s, d)| (s.as_str(), *d))
959 .collect::<Vec<_>>(),
960 &[
961 ("let b = ", None),
962 ("c", Some(DiagnosticSeverity::ERROR)),
963 (";", None),
964 ],
965 "Buffers from hidden projects should still get in-buffer diagnostics"
966 );
967 });
968
969 project.update(cx, |project, cx| {
970 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
971 assert_eq!(
972 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
973 vec![(
974 ProjectPath {
975 worktree_id: main_worktree_id,
976 path: Arc::from(Path::new("b.rs")),
977 },
978 server_id,
979 DiagnosticSummary {
980 error_count: 1,
981 warning_count: 0,
982 }
983 )]
984 );
985 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
986 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
987 });
988}
989
990#[gpui::test]
991async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
992 init_test(cx);
993
994 let progress_token = "the-progress-token";
995
996 let fs = FakeFs::new(cx.executor());
997 fs.insert_tree(
998 "/dir",
999 json!({
1000 "a.rs": "fn a() { A }",
1001 "b.rs": "const y: i32 = 1",
1002 }),
1003 )
1004 .await;
1005
1006 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1007 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1008
1009 language_registry.add(rust_lang());
1010 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1011 "Rust",
1012 FakeLspAdapter {
1013 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1014 disk_based_diagnostics_sources: vec!["disk".into()],
1015 ..Default::default()
1016 },
1017 );
1018
1019 let worktree_id = project.update(cx, |p, cx| p.worktrees().next().unwrap().read(cx).id());
1020
1021 // Cause worktree to start the fake language server
1022 let _buffer = project
1023 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1024 .await
1025 .unwrap();
1026
1027 let mut events = cx.events(&project);
1028
1029 let fake_server = fake_servers.next().await.unwrap();
1030 assert_eq!(
1031 events.next().await.unwrap(),
1032 Event::LanguageServerAdded(LanguageServerId(0)),
1033 );
1034
1035 fake_server
1036 .start_progress(format!("{}/0", progress_token))
1037 .await;
1038 assert_eq!(
1039 events.next().await.unwrap(),
1040 Event::DiskBasedDiagnosticsStarted {
1041 language_server_id: LanguageServerId(0),
1042 }
1043 );
1044
1045 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1046 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1047 version: None,
1048 diagnostics: vec![lsp::Diagnostic {
1049 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1050 severity: Some(lsp::DiagnosticSeverity::ERROR),
1051 message: "undefined variable 'A'".to_string(),
1052 ..Default::default()
1053 }],
1054 });
1055 assert_eq!(
1056 events.next().await.unwrap(),
1057 Event::DiagnosticsUpdated {
1058 language_server_id: LanguageServerId(0),
1059 path: (worktree_id, Path::new("a.rs")).into()
1060 }
1061 );
1062
1063 fake_server.end_progress(format!("{}/0", progress_token));
1064 assert_eq!(
1065 events.next().await.unwrap(),
1066 Event::DiskBasedDiagnosticsFinished {
1067 language_server_id: LanguageServerId(0)
1068 }
1069 );
1070
1071 let buffer = project
1072 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
1073 .await
1074 .unwrap();
1075
1076 buffer.update(cx, |buffer, _| {
1077 let snapshot = buffer.snapshot();
1078 let diagnostics = snapshot
1079 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1080 .collect::<Vec<_>>();
1081 assert_eq!(
1082 diagnostics,
1083 &[DiagnosticEntry {
1084 range: Point::new(0, 9)..Point::new(0, 10),
1085 diagnostic: Diagnostic {
1086 severity: lsp::DiagnosticSeverity::ERROR,
1087 message: "undefined variable 'A'".to_string(),
1088 group_id: 0,
1089 is_primary: true,
1090 ..Default::default()
1091 }
1092 }]
1093 )
1094 });
1095
1096 // Ensure publishing empty diagnostics twice only results in one update event.
1097 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1098 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1099 version: None,
1100 diagnostics: Default::default(),
1101 });
1102 assert_eq!(
1103 events.next().await.unwrap(),
1104 Event::DiagnosticsUpdated {
1105 language_server_id: LanguageServerId(0),
1106 path: (worktree_id, Path::new("a.rs")).into()
1107 }
1108 );
1109
1110 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1111 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1112 version: None,
1113 diagnostics: Default::default(),
1114 });
1115 cx.executor().run_until_parked();
1116 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1117}
1118
1119#[gpui::test]
1120async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1121 init_test(cx);
1122
1123 let progress_token = "the-progress-token";
1124
1125 let fs = FakeFs::new(cx.executor());
1126 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1127
1128 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1129
1130 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1131 language_registry.add(rust_lang());
1132 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1133 "Rust",
1134 FakeLspAdapter {
1135 name: "the-language-server",
1136 disk_based_diagnostics_sources: vec!["disk".into()],
1137 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1138 ..Default::default()
1139 },
1140 );
1141
1142 let buffer = project
1143 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1144 .await
1145 .unwrap();
1146
1147 // Simulate diagnostics starting to update.
1148 let fake_server = fake_servers.next().await.unwrap();
1149 fake_server.start_progress(progress_token).await;
1150
1151 // Restart the server before the diagnostics finish updating.
1152 project.update(cx, |project, cx| {
1153 project.restart_language_servers_for_buffers([buffer], cx);
1154 });
1155 let mut events = cx.events(&project);
1156
1157 // Simulate the newly started server sending more diagnostics.
1158 let fake_server = fake_servers.next().await.unwrap();
1159 assert_eq!(
1160 events.next().await.unwrap(),
1161 Event::LanguageServerAdded(LanguageServerId(1))
1162 );
1163 fake_server.start_progress(progress_token).await;
1164 assert_eq!(
1165 events.next().await.unwrap(),
1166 Event::DiskBasedDiagnosticsStarted {
1167 language_server_id: LanguageServerId(1)
1168 }
1169 );
1170 project.update(cx, |project, _| {
1171 assert_eq!(
1172 project
1173 .language_servers_running_disk_based_diagnostics()
1174 .collect::<Vec<_>>(),
1175 [LanguageServerId(1)]
1176 );
1177 });
1178
1179 // All diagnostics are considered done, despite the old server's diagnostic
1180 // task never completing.
1181 fake_server.end_progress(progress_token);
1182 assert_eq!(
1183 events.next().await.unwrap(),
1184 Event::DiskBasedDiagnosticsFinished {
1185 language_server_id: LanguageServerId(1)
1186 }
1187 );
1188 project.update(cx, |project, _| {
1189 assert_eq!(
1190 project
1191 .language_servers_running_disk_based_diagnostics()
1192 .collect::<Vec<_>>(),
1193 [LanguageServerId(0); 0]
1194 );
1195 });
1196}
1197
1198#[gpui::test]
1199async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1200 init_test(cx);
1201
1202 let fs = FakeFs::new(cx.executor());
1203 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1204
1205 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1206
1207 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1208 language_registry.add(rust_lang());
1209 let mut fake_servers =
1210 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
1211
1212 let buffer = project
1213 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1214 .await
1215 .unwrap();
1216
1217 // Publish diagnostics
1218 let fake_server = fake_servers.next().await.unwrap();
1219 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1220 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1221 version: None,
1222 diagnostics: vec![lsp::Diagnostic {
1223 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1224 severity: Some(lsp::DiagnosticSeverity::ERROR),
1225 message: "the message".to_string(),
1226 ..Default::default()
1227 }],
1228 });
1229
1230 cx.executor().run_until_parked();
1231 buffer.update(cx, |buffer, _| {
1232 assert_eq!(
1233 buffer
1234 .snapshot()
1235 .diagnostics_in_range::<_, usize>(0..1, false)
1236 .map(|entry| entry.diagnostic.message.clone())
1237 .collect::<Vec<_>>(),
1238 ["the message".to_string()]
1239 );
1240 });
1241 project.update(cx, |project, cx| {
1242 assert_eq!(
1243 project.diagnostic_summary(false, cx),
1244 DiagnosticSummary {
1245 error_count: 1,
1246 warning_count: 0,
1247 }
1248 );
1249 });
1250
1251 project.update(cx, |project, cx| {
1252 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1253 });
1254
1255 // The diagnostics are cleared.
1256 cx.executor().run_until_parked();
1257 buffer.update(cx, |buffer, _| {
1258 assert_eq!(
1259 buffer
1260 .snapshot()
1261 .diagnostics_in_range::<_, usize>(0..1, false)
1262 .map(|entry| entry.diagnostic.message.clone())
1263 .collect::<Vec<_>>(),
1264 Vec::<String>::new(),
1265 );
1266 });
1267 project.update(cx, |project, cx| {
1268 assert_eq!(
1269 project.diagnostic_summary(false, cx),
1270 DiagnosticSummary {
1271 error_count: 0,
1272 warning_count: 0,
1273 }
1274 );
1275 });
1276}
1277
1278#[gpui::test]
1279async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1280 init_test(cx);
1281
1282 let fs = FakeFs::new(cx.executor());
1283 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1284
1285 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1286 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1287
1288 language_registry.add(rust_lang());
1289 let mut fake_servers =
1290 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
1291
1292 let buffer = project
1293 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1294 .await
1295 .unwrap();
1296
1297 // Before restarting the server, report diagnostics with an unknown buffer version.
1298 let fake_server = fake_servers.next().await.unwrap();
1299 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1300 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1301 version: Some(10000),
1302 diagnostics: Vec::new(),
1303 });
1304 cx.executor().run_until_parked();
1305
1306 project.update(cx, |project, cx| {
1307 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1308 });
1309 let mut fake_server = fake_servers.next().await.unwrap();
1310 let notification = fake_server
1311 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1312 .await
1313 .text_document;
1314 assert_eq!(notification.version, 0);
1315}
1316
1317#[gpui::test]
1318async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1319 init_test(cx);
1320
1321 let fs = FakeFs::new(cx.executor());
1322 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1323 .await;
1324
1325 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1326 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1327
1328 let mut fake_rust_servers = language_registry.register_fake_lsp_adapter(
1329 "Rust",
1330 FakeLspAdapter {
1331 name: "rust-lsp",
1332 ..Default::default()
1333 },
1334 );
1335 let mut fake_js_servers = language_registry.register_fake_lsp_adapter(
1336 "JavaScript",
1337 FakeLspAdapter {
1338 name: "js-lsp",
1339 ..Default::default()
1340 },
1341 );
1342 language_registry.add(rust_lang());
1343 language_registry.add(js_lang());
1344
1345 let _rs_buffer = project
1346 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1347 .await
1348 .unwrap();
1349 let _js_buffer = project
1350 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
1351 .await
1352 .unwrap();
1353
1354 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1355 assert_eq!(
1356 fake_rust_server_1
1357 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1358 .await
1359 .text_document
1360 .uri
1361 .as_str(),
1362 "file:///dir/a.rs"
1363 );
1364
1365 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1366 assert_eq!(
1367 fake_js_server
1368 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1369 .await
1370 .text_document
1371 .uri
1372 .as_str(),
1373 "file:///dir/b.js"
1374 );
1375
1376 // Disable Rust language server, ensuring only that server gets stopped.
1377 cx.update(|cx| {
1378 cx.update_global(|settings: &mut SettingsStore, cx| {
1379 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1380 settings.languages.insert(
1381 Arc::from("Rust"),
1382 LanguageSettingsContent {
1383 enable_language_server: Some(false),
1384 ..Default::default()
1385 },
1386 );
1387 });
1388 })
1389 });
1390 fake_rust_server_1
1391 .receive_notification::<lsp::notification::Exit>()
1392 .await;
1393
1394 // Enable Rust and disable JavaScript language servers, ensuring that the
1395 // former gets started again and that the latter stops.
1396 cx.update(|cx| {
1397 cx.update_global(|settings: &mut SettingsStore, cx| {
1398 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1399 settings.languages.insert(
1400 Arc::from("Rust"),
1401 LanguageSettingsContent {
1402 enable_language_server: Some(true),
1403 ..Default::default()
1404 },
1405 );
1406 settings.languages.insert(
1407 Arc::from("JavaScript"),
1408 LanguageSettingsContent {
1409 enable_language_server: Some(false),
1410 ..Default::default()
1411 },
1412 );
1413 });
1414 })
1415 });
1416 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1417 assert_eq!(
1418 fake_rust_server_2
1419 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1420 .await
1421 .text_document
1422 .uri
1423 .as_str(),
1424 "file:///dir/a.rs"
1425 );
1426 fake_js_server
1427 .receive_notification::<lsp::notification::Exit>()
1428 .await;
1429}
1430
1431#[gpui::test(iterations = 3)]
1432async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1433 init_test(cx);
1434
1435 let text = "
1436 fn a() { A }
1437 fn b() { BB }
1438 fn c() { CCC }
1439 "
1440 .unindent();
1441
1442 let fs = FakeFs::new(cx.executor());
1443 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1444
1445 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1446 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1447
1448 language_registry.add(rust_lang());
1449 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1450 "Rust",
1451 FakeLspAdapter {
1452 disk_based_diagnostics_sources: vec!["disk".into()],
1453 ..Default::default()
1454 },
1455 );
1456
1457 let buffer = project
1458 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1459 .await
1460 .unwrap();
1461
1462 let mut fake_server = fake_servers.next().await.unwrap();
1463 let open_notification = fake_server
1464 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1465 .await;
1466
1467 // Edit the buffer, moving the content down
1468 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1469 let change_notification_1 = fake_server
1470 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1471 .await;
1472 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1473
1474 // Report some diagnostics for the initial version of the buffer
1475 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1476 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1477 version: Some(open_notification.text_document.version),
1478 diagnostics: vec![
1479 lsp::Diagnostic {
1480 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1481 severity: Some(DiagnosticSeverity::ERROR),
1482 message: "undefined variable 'A'".to_string(),
1483 source: Some("disk".to_string()),
1484 ..Default::default()
1485 },
1486 lsp::Diagnostic {
1487 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1488 severity: Some(DiagnosticSeverity::ERROR),
1489 message: "undefined variable 'BB'".to_string(),
1490 source: Some("disk".to_string()),
1491 ..Default::default()
1492 },
1493 lsp::Diagnostic {
1494 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1495 severity: Some(DiagnosticSeverity::ERROR),
1496 source: Some("disk".to_string()),
1497 message: "undefined variable 'CCC'".to_string(),
1498 ..Default::default()
1499 },
1500 ],
1501 });
1502
1503 // The diagnostics have moved down since they were created.
1504 cx.executor().run_until_parked();
1505 buffer.update(cx, |buffer, _| {
1506 assert_eq!(
1507 buffer
1508 .snapshot()
1509 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1510 .collect::<Vec<_>>(),
1511 &[
1512 DiagnosticEntry {
1513 range: Point::new(3, 9)..Point::new(3, 11),
1514 diagnostic: Diagnostic {
1515 source: Some("disk".into()),
1516 severity: DiagnosticSeverity::ERROR,
1517 message: "undefined variable 'BB'".to_string(),
1518 is_disk_based: true,
1519 group_id: 1,
1520 is_primary: true,
1521 ..Default::default()
1522 },
1523 },
1524 DiagnosticEntry {
1525 range: Point::new(4, 9)..Point::new(4, 12),
1526 diagnostic: Diagnostic {
1527 source: Some("disk".into()),
1528 severity: DiagnosticSeverity::ERROR,
1529 message: "undefined variable 'CCC'".to_string(),
1530 is_disk_based: true,
1531 group_id: 2,
1532 is_primary: true,
1533 ..Default::default()
1534 }
1535 }
1536 ]
1537 );
1538 assert_eq!(
1539 chunks_with_diagnostics(buffer, 0..buffer.len()),
1540 [
1541 ("\n\nfn a() { ".to_string(), None),
1542 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1543 (" }\nfn b() { ".to_string(), None),
1544 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1545 (" }\nfn c() { ".to_string(), None),
1546 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1547 (" }\n".to_string(), None),
1548 ]
1549 );
1550 assert_eq!(
1551 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1552 [
1553 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1554 (" }\nfn c() { ".to_string(), None),
1555 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1556 ]
1557 );
1558 });
1559
1560 // Ensure overlapping diagnostics are highlighted correctly.
1561 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1562 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1563 version: Some(open_notification.text_document.version),
1564 diagnostics: vec![
1565 lsp::Diagnostic {
1566 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1567 severity: Some(DiagnosticSeverity::ERROR),
1568 message: "undefined variable 'A'".to_string(),
1569 source: Some("disk".to_string()),
1570 ..Default::default()
1571 },
1572 lsp::Diagnostic {
1573 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1574 severity: Some(DiagnosticSeverity::WARNING),
1575 message: "unreachable statement".to_string(),
1576 source: Some("disk".to_string()),
1577 ..Default::default()
1578 },
1579 ],
1580 });
1581
1582 cx.executor().run_until_parked();
1583 buffer.update(cx, |buffer, _| {
1584 assert_eq!(
1585 buffer
1586 .snapshot()
1587 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1588 .collect::<Vec<_>>(),
1589 &[
1590 DiagnosticEntry {
1591 range: Point::new(2, 9)..Point::new(2, 12),
1592 diagnostic: Diagnostic {
1593 source: Some("disk".into()),
1594 severity: DiagnosticSeverity::WARNING,
1595 message: "unreachable statement".to_string(),
1596 is_disk_based: true,
1597 group_id: 4,
1598 is_primary: true,
1599 ..Default::default()
1600 }
1601 },
1602 DiagnosticEntry {
1603 range: Point::new(2, 9)..Point::new(2, 10),
1604 diagnostic: Diagnostic {
1605 source: Some("disk".into()),
1606 severity: DiagnosticSeverity::ERROR,
1607 message: "undefined variable 'A'".to_string(),
1608 is_disk_based: true,
1609 group_id: 3,
1610 is_primary: true,
1611 ..Default::default()
1612 },
1613 }
1614 ]
1615 );
1616 assert_eq!(
1617 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1618 [
1619 ("fn a() { ".to_string(), None),
1620 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1621 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1622 ("\n".to_string(), None),
1623 ]
1624 );
1625 assert_eq!(
1626 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1627 [
1628 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1629 ("\n".to_string(), None),
1630 ]
1631 );
1632 });
1633
1634 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1635 // changes since the last save.
1636 buffer.update(cx, |buffer, cx| {
1637 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1638 buffer.edit(
1639 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1640 None,
1641 cx,
1642 );
1643 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1644 });
1645 let change_notification_2 = fake_server
1646 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1647 .await;
1648 assert!(
1649 change_notification_2.text_document.version > change_notification_1.text_document.version
1650 );
1651
1652 // Handle out-of-order diagnostics
1653 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1654 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1655 version: Some(change_notification_2.text_document.version),
1656 diagnostics: vec![
1657 lsp::Diagnostic {
1658 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1659 severity: Some(DiagnosticSeverity::ERROR),
1660 message: "undefined variable 'BB'".to_string(),
1661 source: Some("disk".to_string()),
1662 ..Default::default()
1663 },
1664 lsp::Diagnostic {
1665 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1666 severity: Some(DiagnosticSeverity::WARNING),
1667 message: "undefined variable 'A'".to_string(),
1668 source: Some("disk".to_string()),
1669 ..Default::default()
1670 },
1671 ],
1672 });
1673
1674 cx.executor().run_until_parked();
1675 buffer.update(cx, |buffer, _| {
1676 assert_eq!(
1677 buffer
1678 .snapshot()
1679 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1680 .collect::<Vec<_>>(),
1681 &[
1682 DiagnosticEntry {
1683 range: Point::new(2, 21)..Point::new(2, 22),
1684 diagnostic: Diagnostic {
1685 source: Some("disk".into()),
1686 severity: DiagnosticSeverity::WARNING,
1687 message: "undefined variable 'A'".to_string(),
1688 is_disk_based: true,
1689 group_id: 6,
1690 is_primary: true,
1691 ..Default::default()
1692 }
1693 },
1694 DiagnosticEntry {
1695 range: Point::new(3, 9)..Point::new(3, 14),
1696 diagnostic: Diagnostic {
1697 source: Some("disk".into()),
1698 severity: DiagnosticSeverity::ERROR,
1699 message: "undefined variable 'BB'".to_string(),
1700 is_disk_based: true,
1701 group_id: 5,
1702 is_primary: true,
1703 ..Default::default()
1704 },
1705 }
1706 ]
1707 );
1708 });
1709}
1710
1711#[gpui::test]
1712async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1713 init_test(cx);
1714
1715 let text = concat!(
1716 "let one = ;\n", //
1717 "let two = \n",
1718 "let three = 3;\n",
1719 );
1720
1721 let fs = FakeFs::new(cx.executor());
1722 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1723
1724 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1725 let buffer = project
1726 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1727 .await
1728 .unwrap();
1729
1730 project.update(cx, |project, cx| {
1731 project
1732 .update_buffer_diagnostics(
1733 &buffer,
1734 LanguageServerId(0),
1735 None,
1736 vec![
1737 DiagnosticEntry {
1738 range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 10)),
1739 diagnostic: Diagnostic {
1740 severity: DiagnosticSeverity::ERROR,
1741 message: "syntax error 1".to_string(),
1742 ..Default::default()
1743 },
1744 },
1745 DiagnosticEntry {
1746 range: Unclipped(PointUtf16::new(1, 10))..Unclipped(PointUtf16::new(1, 10)),
1747 diagnostic: Diagnostic {
1748 severity: DiagnosticSeverity::ERROR,
1749 message: "syntax error 2".to_string(),
1750 ..Default::default()
1751 },
1752 },
1753 ],
1754 cx,
1755 )
1756 .unwrap();
1757 });
1758
1759 // An empty range is extended forward to include the following character.
1760 // At the end of a line, an empty range is extended backward to include
1761 // the preceding character.
1762 buffer.update(cx, |buffer, _| {
1763 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1764 assert_eq!(
1765 chunks
1766 .iter()
1767 .map(|(s, d)| (s.as_str(), *d))
1768 .collect::<Vec<_>>(),
1769 &[
1770 ("let one = ", None),
1771 (";", Some(DiagnosticSeverity::ERROR)),
1772 ("\nlet two =", None),
1773 (" ", Some(DiagnosticSeverity::ERROR)),
1774 ("\nlet three = 3;\n", None)
1775 ]
1776 );
1777 });
1778}
1779
1780#[gpui::test]
1781async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
1782 init_test(cx);
1783
1784 let fs = FakeFs::new(cx.executor());
1785 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
1786 .await;
1787
1788 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1789
1790 project.update(cx, |project, cx| {
1791 project
1792 .update_diagnostic_entries(
1793 LanguageServerId(0),
1794 Path::new("/dir/a.rs").to_owned(),
1795 None,
1796 vec![DiagnosticEntry {
1797 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1798 diagnostic: Diagnostic {
1799 severity: DiagnosticSeverity::ERROR,
1800 is_primary: true,
1801 message: "syntax error a1".to_string(),
1802 ..Default::default()
1803 },
1804 }],
1805 cx,
1806 )
1807 .unwrap();
1808 project
1809 .update_diagnostic_entries(
1810 LanguageServerId(1),
1811 Path::new("/dir/a.rs").to_owned(),
1812 None,
1813 vec![DiagnosticEntry {
1814 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1815 diagnostic: Diagnostic {
1816 severity: DiagnosticSeverity::ERROR,
1817 is_primary: true,
1818 message: "syntax error b1".to_string(),
1819 ..Default::default()
1820 },
1821 }],
1822 cx,
1823 )
1824 .unwrap();
1825
1826 assert_eq!(
1827 project.diagnostic_summary(false, cx),
1828 DiagnosticSummary {
1829 error_count: 2,
1830 warning_count: 0,
1831 }
1832 );
1833 });
1834}
1835
1836#[gpui::test]
1837async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
1838 init_test(cx);
1839
1840 let text = "
1841 fn a() {
1842 f1();
1843 }
1844 fn b() {
1845 f2();
1846 }
1847 fn c() {
1848 f3();
1849 }
1850 "
1851 .unindent();
1852
1853 let fs = FakeFs::new(cx.executor());
1854 fs.insert_tree(
1855 "/dir",
1856 json!({
1857 "a.rs": text.clone(),
1858 }),
1859 )
1860 .await;
1861
1862 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1863
1864 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1865 language_registry.add(rust_lang());
1866 let mut fake_servers =
1867 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
1868
1869 let buffer = project
1870 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1871 .await
1872 .unwrap();
1873
1874 let mut fake_server = fake_servers.next().await.unwrap();
1875 let lsp_document_version = fake_server
1876 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1877 .await
1878 .text_document
1879 .version;
1880
1881 // Simulate editing the buffer after the language server computes some edits.
1882 buffer.update(cx, |buffer, cx| {
1883 buffer.edit(
1884 [(
1885 Point::new(0, 0)..Point::new(0, 0),
1886 "// above first function\n",
1887 )],
1888 None,
1889 cx,
1890 );
1891 buffer.edit(
1892 [(
1893 Point::new(2, 0)..Point::new(2, 0),
1894 " // inside first function\n",
1895 )],
1896 None,
1897 cx,
1898 );
1899 buffer.edit(
1900 [(
1901 Point::new(6, 4)..Point::new(6, 4),
1902 "// inside second function ",
1903 )],
1904 None,
1905 cx,
1906 );
1907
1908 assert_eq!(
1909 buffer.text(),
1910 "
1911 // above first function
1912 fn a() {
1913 // inside first function
1914 f1();
1915 }
1916 fn b() {
1917 // inside second function f2();
1918 }
1919 fn c() {
1920 f3();
1921 }
1922 "
1923 .unindent()
1924 );
1925 });
1926
1927 let edits = project
1928 .update(cx, |project, cx| {
1929 project.edits_from_lsp(
1930 &buffer,
1931 vec![
1932 // replace body of first function
1933 lsp::TextEdit {
1934 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
1935 new_text: "
1936 fn a() {
1937 f10();
1938 }
1939 "
1940 .unindent(),
1941 },
1942 // edit inside second function
1943 lsp::TextEdit {
1944 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
1945 new_text: "00".into(),
1946 },
1947 // edit inside third function via two distinct edits
1948 lsp::TextEdit {
1949 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
1950 new_text: "4000".into(),
1951 },
1952 lsp::TextEdit {
1953 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
1954 new_text: "".into(),
1955 },
1956 ],
1957 LanguageServerId(0),
1958 Some(lsp_document_version),
1959 cx,
1960 )
1961 })
1962 .await
1963 .unwrap();
1964
1965 buffer.update(cx, |buffer, cx| {
1966 for (range, new_text) in edits {
1967 buffer.edit([(range, new_text)], None, cx);
1968 }
1969 assert_eq!(
1970 buffer.text(),
1971 "
1972 // above first function
1973 fn a() {
1974 // inside first function
1975 f10();
1976 }
1977 fn b() {
1978 // inside second function f200();
1979 }
1980 fn c() {
1981 f4000();
1982 }
1983 "
1984 .unindent()
1985 );
1986 });
1987}
1988
1989#[gpui::test]
1990async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
1991 init_test(cx);
1992
1993 let text = "
1994 use a::b;
1995 use a::c;
1996
1997 fn f() {
1998 b();
1999 c();
2000 }
2001 "
2002 .unindent();
2003
2004 let fs = FakeFs::new(cx.executor());
2005 fs.insert_tree(
2006 "/dir",
2007 json!({
2008 "a.rs": text.clone(),
2009 }),
2010 )
2011 .await;
2012
2013 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2014 let buffer = project
2015 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2016 .await
2017 .unwrap();
2018
2019 // Simulate the language server sending us a small edit in the form of a very large diff.
2020 // Rust-analyzer does this when performing a merge-imports code action.
2021 let edits = project
2022 .update(cx, |project, cx| {
2023 project.edits_from_lsp(
2024 &buffer,
2025 [
2026 // Replace the first use statement without editing the semicolon.
2027 lsp::TextEdit {
2028 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2029 new_text: "a::{b, c}".into(),
2030 },
2031 // Reinsert the remainder of the file between the semicolon and the final
2032 // newline of the file.
2033 lsp::TextEdit {
2034 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2035 new_text: "\n\n".into(),
2036 },
2037 lsp::TextEdit {
2038 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2039 new_text: "
2040 fn f() {
2041 b();
2042 c();
2043 }"
2044 .unindent(),
2045 },
2046 // Delete everything after the first newline of the file.
2047 lsp::TextEdit {
2048 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2049 new_text: "".into(),
2050 },
2051 ],
2052 LanguageServerId(0),
2053 None,
2054 cx,
2055 )
2056 })
2057 .await
2058 .unwrap();
2059
2060 buffer.update(cx, |buffer, cx| {
2061 let edits = edits
2062 .into_iter()
2063 .map(|(range, text)| {
2064 (
2065 range.start.to_point(buffer)..range.end.to_point(buffer),
2066 text,
2067 )
2068 })
2069 .collect::<Vec<_>>();
2070
2071 assert_eq!(
2072 edits,
2073 [
2074 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2075 (Point::new(1, 0)..Point::new(2, 0), "".into())
2076 ]
2077 );
2078
2079 for (range, new_text) in edits {
2080 buffer.edit([(range, new_text)], None, cx);
2081 }
2082 assert_eq!(
2083 buffer.text(),
2084 "
2085 use a::{b, c};
2086
2087 fn f() {
2088 b();
2089 c();
2090 }
2091 "
2092 .unindent()
2093 );
2094 });
2095}
2096
2097#[gpui::test]
2098async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2099 init_test(cx);
2100
2101 let text = "
2102 use a::b;
2103 use a::c;
2104
2105 fn f() {
2106 b();
2107 c();
2108 }
2109 "
2110 .unindent();
2111
2112 let fs = FakeFs::new(cx.executor());
2113 fs.insert_tree(
2114 "/dir",
2115 json!({
2116 "a.rs": text.clone(),
2117 }),
2118 )
2119 .await;
2120
2121 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2122 let buffer = project
2123 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2124 .await
2125 .unwrap();
2126
2127 // Simulate the language server sending us edits in a non-ordered fashion,
2128 // with ranges sometimes being inverted or pointing to invalid locations.
2129 let edits = project
2130 .update(cx, |project, cx| {
2131 project.edits_from_lsp(
2132 &buffer,
2133 [
2134 lsp::TextEdit {
2135 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2136 new_text: "\n\n".into(),
2137 },
2138 lsp::TextEdit {
2139 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2140 new_text: "a::{b, c}".into(),
2141 },
2142 lsp::TextEdit {
2143 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2144 new_text: "".into(),
2145 },
2146 lsp::TextEdit {
2147 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2148 new_text: "
2149 fn f() {
2150 b();
2151 c();
2152 }"
2153 .unindent(),
2154 },
2155 ],
2156 LanguageServerId(0),
2157 None,
2158 cx,
2159 )
2160 })
2161 .await
2162 .unwrap();
2163
2164 buffer.update(cx, |buffer, cx| {
2165 let edits = edits
2166 .into_iter()
2167 .map(|(range, text)| {
2168 (
2169 range.start.to_point(buffer)..range.end.to_point(buffer),
2170 text,
2171 )
2172 })
2173 .collect::<Vec<_>>();
2174
2175 assert_eq!(
2176 edits,
2177 [
2178 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2179 (Point::new(1, 0)..Point::new(2, 0), "".into())
2180 ]
2181 );
2182
2183 for (range, new_text) in edits {
2184 buffer.edit([(range, new_text)], None, cx);
2185 }
2186 assert_eq!(
2187 buffer.text(),
2188 "
2189 use a::{b, c};
2190
2191 fn f() {
2192 b();
2193 c();
2194 }
2195 "
2196 .unindent()
2197 );
2198 });
2199}
2200
2201fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2202 buffer: &Buffer,
2203 range: Range<T>,
2204) -> Vec<(String, Option<DiagnosticSeverity>)> {
2205 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2206 for chunk in buffer.snapshot().chunks(range, true) {
2207 if chunks.last().map_or(false, |prev_chunk| {
2208 prev_chunk.1 == chunk.diagnostic_severity
2209 }) {
2210 chunks.last_mut().unwrap().0.push_str(chunk.text);
2211 } else {
2212 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2213 }
2214 }
2215 chunks
2216}
2217
2218#[gpui::test(iterations = 10)]
2219async fn test_definition(cx: &mut gpui::TestAppContext) {
2220 init_test(cx);
2221
2222 let fs = FakeFs::new(cx.executor());
2223 fs.insert_tree(
2224 "/dir",
2225 json!({
2226 "a.rs": "const fn a() { A }",
2227 "b.rs": "const y: i32 = crate::a()",
2228 }),
2229 )
2230 .await;
2231
2232 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2233
2234 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2235 language_registry.add(rust_lang());
2236 let mut fake_servers =
2237 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
2238
2239 let buffer = project
2240 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
2241 .await
2242 .unwrap();
2243
2244 let fake_server = fake_servers.next().await.unwrap();
2245 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2246 let params = params.text_document_position_params;
2247 assert_eq!(
2248 params.text_document.uri.to_file_path().unwrap(),
2249 Path::new("/dir/b.rs"),
2250 );
2251 assert_eq!(params.position, lsp::Position::new(0, 22));
2252
2253 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2254 lsp::Location::new(
2255 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
2256 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2257 ),
2258 )))
2259 });
2260
2261 let mut definitions = project
2262 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2263 .await
2264 .unwrap();
2265
2266 // Assert no new language server started
2267 cx.executor().run_until_parked();
2268 assert!(fake_servers.try_next().is_err());
2269
2270 assert_eq!(definitions.len(), 1);
2271 let definition = definitions.pop().unwrap();
2272 cx.update(|cx| {
2273 let target_buffer = definition.target.buffer.read(cx);
2274 assert_eq!(
2275 target_buffer
2276 .file()
2277 .unwrap()
2278 .as_local()
2279 .unwrap()
2280 .abs_path(cx),
2281 Path::new("/dir/a.rs"),
2282 );
2283 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2284 assert_eq!(
2285 list_worktrees(&project, cx),
2286 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
2287 );
2288
2289 drop(definition);
2290 });
2291 cx.update(|cx| {
2292 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2293 });
2294
2295 fn list_worktrees<'a>(
2296 project: &'a Model<Project>,
2297 cx: &'a AppContext,
2298 ) -> Vec<(&'a Path, bool)> {
2299 project
2300 .read(cx)
2301 .worktrees()
2302 .map(|worktree| {
2303 let worktree = worktree.read(cx);
2304 (
2305 worktree.as_local().unwrap().abs_path().as_ref(),
2306 worktree.is_visible(),
2307 )
2308 })
2309 .collect::<Vec<_>>()
2310 }
2311}
2312
2313#[gpui::test]
2314async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2315 init_test(cx);
2316
2317 let fs = FakeFs::new(cx.executor());
2318 fs.insert_tree(
2319 "/dir",
2320 json!({
2321 "a.ts": "",
2322 }),
2323 )
2324 .await;
2325
2326 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2327
2328 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2329 language_registry.add(typescript_lang());
2330 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2331 "TypeScript",
2332 FakeLspAdapter {
2333 capabilities: lsp::ServerCapabilities {
2334 completion_provider: Some(lsp::CompletionOptions {
2335 trigger_characters: Some(vec![":".to_string()]),
2336 ..Default::default()
2337 }),
2338 ..Default::default()
2339 },
2340 ..Default::default()
2341 },
2342 );
2343
2344 let buffer = project
2345 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2346 .await
2347 .unwrap();
2348
2349 let fake_server = fake_language_servers.next().await.unwrap();
2350
2351 let text = "let a = b.fqn";
2352 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2353 let completions = project.update(cx, |project, cx| {
2354 project.completions(&buffer, text.len(), cx)
2355 });
2356
2357 fake_server
2358 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2359 Ok(Some(lsp::CompletionResponse::Array(vec![
2360 lsp::CompletionItem {
2361 label: "fullyQualifiedName?".into(),
2362 insert_text: Some("fullyQualifiedName".into()),
2363 ..Default::default()
2364 },
2365 ])))
2366 })
2367 .next()
2368 .await;
2369 let completions = completions.await.unwrap();
2370 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2371 assert_eq!(completions.len(), 1);
2372 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2373 assert_eq!(
2374 completions[0].old_range.to_offset(&snapshot),
2375 text.len() - 3..text.len()
2376 );
2377
2378 let text = "let a = \"atoms/cmp\"";
2379 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2380 let completions = project.update(cx, |project, cx| {
2381 project.completions(&buffer, text.len() - 1, cx)
2382 });
2383
2384 fake_server
2385 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2386 Ok(Some(lsp::CompletionResponse::Array(vec![
2387 lsp::CompletionItem {
2388 label: "component".into(),
2389 ..Default::default()
2390 },
2391 ])))
2392 })
2393 .next()
2394 .await;
2395 let completions = completions.await.unwrap();
2396 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2397 assert_eq!(completions.len(), 1);
2398 assert_eq!(completions[0].new_text, "component");
2399 assert_eq!(
2400 completions[0].old_range.to_offset(&snapshot),
2401 text.len() - 4..text.len() - 1
2402 );
2403}
2404
2405#[gpui::test]
2406async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2407 init_test(cx);
2408
2409 let fs = FakeFs::new(cx.executor());
2410 fs.insert_tree(
2411 "/dir",
2412 json!({
2413 "a.ts": "",
2414 }),
2415 )
2416 .await;
2417
2418 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2419
2420 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2421 language_registry.add(typescript_lang());
2422 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2423 "TypeScript",
2424 FakeLspAdapter {
2425 capabilities: lsp::ServerCapabilities {
2426 completion_provider: Some(lsp::CompletionOptions {
2427 trigger_characters: Some(vec![":".to_string()]),
2428 ..Default::default()
2429 }),
2430 ..Default::default()
2431 },
2432 ..Default::default()
2433 },
2434 );
2435
2436 let buffer = project
2437 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2438 .await
2439 .unwrap();
2440
2441 let fake_server = fake_language_servers.next().await.unwrap();
2442
2443 let text = "let a = b.fqn";
2444 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2445 let completions = project.update(cx, |project, cx| {
2446 project.completions(&buffer, text.len(), cx)
2447 });
2448
2449 fake_server
2450 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2451 Ok(Some(lsp::CompletionResponse::Array(vec![
2452 lsp::CompletionItem {
2453 label: "fullyQualifiedName?".into(),
2454 insert_text: Some("fully\rQualified\r\nName".into()),
2455 ..Default::default()
2456 },
2457 ])))
2458 })
2459 .next()
2460 .await;
2461 let completions = completions.await.unwrap();
2462 assert_eq!(completions.len(), 1);
2463 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2464}
2465
2466#[gpui::test(iterations = 10)]
2467async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2468 init_test(cx);
2469
2470 let fs = FakeFs::new(cx.executor());
2471 fs.insert_tree(
2472 "/dir",
2473 json!({
2474 "a.ts": "a",
2475 }),
2476 )
2477 .await;
2478
2479 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2480
2481 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2482 language_registry.add(typescript_lang());
2483 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2484 "TypeScript",
2485 FakeLspAdapter {
2486 capabilities: lsp::ServerCapabilities {
2487 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
2488 lsp::CodeActionOptions {
2489 resolve_provider: Some(true),
2490 ..lsp::CodeActionOptions::default()
2491 },
2492 )),
2493 ..lsp::ServerCapabilities::default()
2494 },
2495 ..FakeLspAdapter::default()
2496 },
2497 );
2498
2499 let buffer = project
2500 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2501 .await
2502 .unwrap();
2503
2504 let fake_server = fake_language_servers.next().await.unwrap();
2505
2506 // Language server returns code actions that contain commands, and not edits.
2507 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
2508 fake_server
2509 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2510 Ok(Some(vec![
2511 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2512 title: "The code action".into(),
2513 data: Some(serde_json::json!({
2514 "command": "_the/command",
2515 })),
2516 ..lsp::CodeAction::default()
2517 }),
2518 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2519 title: "two".into(),
2520 ..lsp::CodeAction::default()
2521 }),
2522 ]))
2523 })
2524 .next()
2525 .await;
2526
2527 let action = actions.await[0].clone();
2528 let apply = project.update(cx, |project, cx| {
2529 project.apply_code_action(buffer.clone(), action, true, cx)
2530 });
2531
2532 // Resolving the code action does not populate its edits. In absence of
2533 // edits, we must execute the given command.
2534 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2535 |mut action, _| async move {
2536 if action.data.is_some() {
2537 action.command = Some(lsp::Command {
2538 title: "The command".into(),
2539 command: "_the/command".into(),
2540 arguments: Some(vec![json!("the-argument")]),
2541 });
2542 }
2543 Ok(action)
2544 },
2545 );
2546
2547 // While executing the command, the language server sends the editor
2548 // a `workspaceEdit` request.
2549 fake_server
2550 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2551 let fake = fake_server.clone();
2552 move |params, _| {
2553 assert_eq!(params.command, "_the/command");
2554 let fake = fake.clone();
2555 async move {
2556 fake.server
2557 .request::<lsp::request::ApplyWorkspaceEdit>(
2558 lsp::ApplyWorkspaceEditParams {
2559 label: None,
2560 edit: lsp::WorkspaceEdit {
2561 changes: Some(
2562 [(
2563 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2564 vec![lsp::TextEdit {
2565 range: lsp::Range::new(
2566 lsp::Position::new(0, 0),
2567 lsp::Position::new(0, 0),
2568 ),
2569 new_text: "X".into(),
2570 }],
2571 )]
2572 .into_iter()
2573 .collect(),
2574 ),
2575 ..Default::default()
2576 },
2577 },
2578 )
2579 .await
2580 .unwrap();
2581 Ok(Some(json!(null)))
2582 }
2583 }
2584 })
2585 .next()
2586 .await;
2587
2588 // Applying the code action returns a project transaction containing the edits
2589 // sent by the language server in its `workspaceEdit` request.
2590 let transaction = apply.await.unwrap();
2591 assert!(transaction.0.contains_key(&buffer));
2592 buffer.update(cx, |buffer, cx| {
2593 assert_eq!(buffer.text(), "Xa");
2594 buffer.undo(cx);
2595 assert_eq!(buffer.text(), "a");
2596 });
2597}
2598
2599#[gpui::test(iterations = 10)]
2600async fn test_save_file(cx: &mut gpui::TestAppContext) {
2601 init_test(cx);
2602
2603 let fs = FakeFs::new(cx.executor());
2604 fs.insert_tree(
2605 "/dir",
2606 json!({
2607 "file1": "the old contents",
2608 }),
2609 )
2610 .await;
2611
2612 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2613 let buffer = project
2614 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2615 .await
2616 .unwrap();
2617 buffer.update(cx, |buffer, cx| {
2618 assert_eq!(buffer.text(), "the old contents");
2619 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2620 });
2621
2622 project
2623 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2624 .await
2625 .unwrap();
2626
2627 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2628 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2629}
2630
2631#[gpui::test(iterations = 30)]
2632async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
2633 init_test(cx);
2634
2635 let fs = FakeFs::new(cx.executor().clone());
2636 fs.insert_tree(
2637 "/dir",
2638 json!({
2639 "file1": "the original contents",
2640 }),
2641 )
2642 .await;
2643
2644 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2645 let worktree = project.read_with(cx, |project, _| project.worktrees().next().unwrap());
2646 let buffer = project
2647 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2648 .await
2649 .unwrap();
2650
2651 // Simulate buffer diffs being slow, so that they don't complete before
2652 // the next file change occurs.
2653 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2654
2655 // Change the buffer's file on disk, and then wait for the file change
2656 // to be detected by the worktree, so that the buffer starts reloading.
2657 fs.save(
2658 "/dir/file1".as_ref(),
2659 &"the first contents".into(),
2660 Default::default(),
2661 )
2662 .await
2663 .unwrap();
2664 worktree.next_event(cx).await;
2665
2666 // Change the buffer's file again. Depending on the random seed, the
2667 // previous file change may still be in progress.
2668 fs.save(
2669 "/dir/file1".as_ref(),
2670 &"the second contents".into(),
2671 Default::default(),
2672 )
2673 .await
2674 .unwrap();
2675 worktree.next_event(cx).await;
2676
2677 cx.executor().run_until_parked();
2678 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2679 buffer.read_with(cx, |buffer, _| {
2680 assert_eq!(buffer.text(), on_disk_text);
2681 assert!(!buffer.is_dirty(), "buffer should not be dirty");
2682 assert!(!buffer.has_conflict(), "buffer should not be dirty");
2683 });
2684}
2685
2686#[gpui::test(iterations = 30)]
2687async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
2688 init_test(cx);
2689
2690 let fs = FakeFs::new(cx.executor().clone());
2691 fs.insert_tree(
2692 "/dir",
2693 json!({
2694 "file1": "the original contents",
2695 }),
2696 )
2697 .await;
2698
2699 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2700 let worktree = project.read_with(cx, |project, _| project.worktrees().next().unwrap());
2701 let buffer = project
2702 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2703 .await
2704 .unwrap();
2705
2706 // Simulate buffer diffs being slow, so that they don't complete before
2707 // the next file change occurs.
2708 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2709
2710 // Change the buffer's file on disk, and then wait for the file change
2711 // to be detected by the worktree, so that the buffer starts reloading.
2712 fs.save(
2713 "/dir/file1".as_ref(),
2714 &"the first contents".into(),
2715 Default::default(),
2716 )
2717 .await
2718 .unwrap();
2719 worktree.next_event(cx).await;
2720
2721 cx.executor()
2722 .spawn(cx.executor().simulate_random_delay())
2723 .await;
2724
2725 // Perform a noop edit, causing the buffer's version to increase.
2726 buffer.update(cx, |buffer, cx| {
2727 buffer.edit([(0..0, " ")], None, cx);
2728 buffer.undo(cx);
2729 });
2730
2731 cx.executor().run_until_parked();
2732 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2733 buffer.read_with(cx, |buffer, _| {
2734 let buffer_text = buffer.text();
2735 if buffer_text == on_disk_text {
2736 assert!(
2737 !buffer.is_dirty() && !buffer.has_conflict(),
2738 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
2739 );
2740 }
2741 // If the file change occurred while the buffer was processing the first
2742 // change, the buffer will be in a conflicting state.
2743 else {
2744 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2745 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2746 }
2747 });
2748}
2749
2750#[gpui::test]
2751async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2752 init_test(cx);
2753
2754 let fs = FakeFs::new(cx.executor());
2755 fs.insert_tree(
2756 "/dir",
2757 json!({
2758 "file1": "the old contents",
2759 }),
2760 )
2761 .await;
2762
2763 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2764 let buffer = project
2765 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2766 .await
2767 .unwrap();
2768 buffer.update(cx, |buffer, cx| {
2769 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2770 });
2771
2772 project
2773 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2774 .await
2775 .unwrap();
2776
2777 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2778 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2779}
2780
2781#[gpui::test]
2782async fn test_save_as(cx: &mut gpui::TestAppContext) {
2783 init_test(cx);
2784
2785 let fs = FakeFs::new(cx.executor());
2786 fs.insert_tree("/dir", json!({})).await;
2787
2788 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2789
2790 let languages = project.update(cx, |project, _| project.languages().clone());
2791 languages.add(rust_lang());
2792
2793 let buffer = project.update(cx, |project, cx| {
2794 project.create_buffer("", None, cx).unwrap()
2795 });
2796 buffer.update(cx, |buffer, cx| {
2797 buffer.edit([(0..0, "abc")], None, cx);
2798 assert!(buffer.is_dirty());
2799 assert!(!buffer.has_conflict());
2800 assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text");
2801 });
2802 project
2803 .update(cx, |project, cx| {
2804 project.save_buffer_as(buffer.clone(), "/dir/file1.rs".into(), cx)
2805 })
2806 .await
2807 .unwrap();
2808 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
2809
2810 cx.executor().run_until_parked();
2811 buffer.update(cx, |buffer, cx| {
2812 assert_eq!(
2813 buffer.file().unwrap().full_path(cx),
2814 Path::new("dir/file1.rs")
2815 );
2816 assert!(!buffer.is_dirty());
2817 assert!(!buffer.has_conflict());
2818 assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust");
2819 });
2820
2821 let opened_buffer = project
2822 .update(cx, |project, cx| {
2823 project.open_local_buffer("/dir/file1.rs", cx)
2824 })
2825 .await
2826 .unwrap();
2827 assert_eq!(opened_buffer, buffer);
2828}
2829
2830#[gpui::test(retries = 5)]
2831async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
2832 init_test(cx);
2833 cx.executor().allow_parking();
2834
2835 let dir = temp_tree(json!({
2836 "a": {
2837 "file1": "",
2838 "file2": "",
2839 "file3": "",
2840 },
2841 "b": {
2842 "c": {
2843 "file4": "",
2844 "file5": "",
2845 }
2846 }
2847 }));
2848
2849 let project = Project::test(Arc::new(RealFs::default()), [dir.path()], cx).await;
2850 let rpc = project.update(cx, |p, _| p.client.clone());
2851
2852 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2853 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
2854 async move { buffer.await.unwrap() }
2855 };
2856 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2857 project.update(cx, |project, cx| {
2858 let tree = project.worktrees().next().unwrap();
2859 tree.read(cx)
2860 .entry_for_path(path)
2861 .unwrap_or_else(|| panic!("no entry for path {}", path))
2862 .id
2863 })
2864 };
2865
2866 let buffer2 = buffer_for_path("a/file2", cx).await;
2867 let buffer3 = buffer_for_path("a/file3", cx).await;
2868 let buffer4 = buffer_for_path("b/c/file4", cx).await;
2869 let buffer5 = buffer_for_path("b/c/file5", cx).await;
2870
2871 let file2_id = id_for_path("a/file2", cx);
2872 let file3_id = id_for_path("a/file3", cx);
2873 let file4_id = id_for_path("b/c/file4", cx);
2874
2875 // Create a remote copy of this worktree.
2876 let tree = project.update(cx, |project, _| project.worktrees().next().unwrap());
2877
2878 let metadata = tree.update(cx, |tree, _| tree.as_local().unwrap().metadata_proto());
2879
2880 let updates = Arc::new(Mutex::new(Vec::new()));
2881 tree.update(cx, |tree, cx| {
2882 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
2883 let updates = updates.clone();
2884 move |update| {
2885 updates.lock().push(update);
2886 async { true }
2887 }
2888 });
2889 });
2890
2891 let remote = cx.update(|cx| Worktree::remote(1, 1, metadata, rpc.clone(), cx));
2892
2893 cx.executor().run_until_parked();
2894
2895 cx.update(|cx| {
2896 assert!(!buffer2.read(cx).is_dirty());
2897 assert!(!buffer3.read(cx).is_dirty());
2898 assert!(!buffer4.read(cx).is_dirty());
2899 assert!(!buffer5.read(cx).is_dirty());
2900 });
2901
2902 // Rename and delete files and directories.
2903 tree.flush_fs_events(cx).await;
2904 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
2905 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
2906 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
2907 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
2908 tree.flush_fs_events(cx).await;
2909
2910 let expected_paths = vec![
2911 "a",
2912 "a/file1",
2913 "a/file2.new",
2914 "b",
2915 "d",
2916 "d/file3",
2917 "d/file4",
2918 ];
2919
2920 cx.update(|app| {
2921 assert_eq!(
2922 tree.read(app)
2923 .paths()
2924 .map(|p| p.to_str().unwrap())
2925 .collect::<Vec<_>>(),
2926 expected_paths
2927 );
2928 });
2929
2930 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
2931 assert_eq!(id_for_path("d/file3", cx), file3_id);
2932 assert_eq!(id_for_path("d/file4", cx), file4_id);
2933
2934 cx.update(|cx| {
2935 assert_eq!(
2936 buffer2.read(cx).file().unwrap().path().as_ref(),
2937 Path::new("a/file2.new")
2938 );
2939 assert_eq!(
2940 buffer3.read(cx).file().unwrap().path().as_ref(),
2941 Path::new("d/file3")
2942 );
2943 assert_eq!(
2944 buffer4.read(cx).file().unwrap().path().as_ref(),
2945 Path::new("d/file4")
2946 );
2947 assert_eq!(
2948 buffer5.read(cx).file().unwrap().path().as_ref(),
2949 Path::new("b/c/file5")
2950 );
2951
2952 assert!(!buffer2.read(cx).file().unwrap().is_deleted());
2953 assert!(!buffer3.read(cx).file().unwrap().is_deleted());
2954 assert!(!buffer4.read(cx).file().unwrap().is_deleted());
2955 assert!(buffer5.read(cx).file().unwrap().is_deleted());
2956 });
2957
2958 // Update the remote worktree. Check that it becomes consistent with the
2959 // local worktree.
2960 cx.executor().run_until_parked();
2961
2962 remote.update(cx, |remote, _| {
2963 for update in updates.lock().drain(..) {
2964 remote.as_remote_mut().unwrap().update_from_remote(update);
2965 }
2966 });
2967 cx.executor().run_until_parked();
2968 remote.update(cx, |remote, _| {
2969 assert_eq!(
2970 remote
2971 .paths()
2972 .map(|p| p.to_str().unwrap())
2973 .collect::<Vec<_>>(),
2974 expected_paths
2975 );
2976 });
2977}
2978
2979#[gpui::test(iterations = 10)]
2980async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
2981 init_test(cx);
2982
2983 let fs = FakeFs::new(cx.executor());
2984 fs.insert_tree(
2985 "/dir",
2986 json!({
2987 "a": {
2988 "file1": "",
2989 }
2990 }),
2991 )
2992 .await;
2993
2994 let project = Project::test(fs, [Path::new("/dir")], cx).await;
2995 let tree = project.update(cx, |project, _| project.worktrees().next().unwrap());
2996 let tree_id = tree.update(cx, |tree, _| tree.id());
2997
2998 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2999 project.update(cx, |project, cx| {
3000 let tree = project.worktrees().next().unwrap();
3001 tree.read(cx)
3002 .entry_for_path(path)
3003 .unwrap_or_else(|| panic!("no entry for path {}", path))
3004 .id
3005 })
3006 };
3007
3008 let dir_id = id_for_path("a", cx);
3009 let file_id = id_for_path("a/file1", cx);
3010 let buffer = project
3011 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3012 .await
3013 .unwrap();
3014 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3015
3016 project
3017 .update(cx, |project, cx| {
3018 project.rename_entry(dir_id, Path::new("b"), cx)
3019 })
3020 .unwrap()
3021 .await
3022 .unwrap();
3023 cx.executor().run_until_parked();
3024
3025 assert_eq!(id_for_path("b", cx), dir_id);
3026 assert_eq!(id_for_path("b/file1", cx), file_id);
3027 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3028}
3029
3030#[gpui::test]
3031async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3032 init_test(cx);
3033
3034 let fs = FakeFs::new(cx.executor());
3035 fs.insert_tree(
3036 "/dir",
3037 json!({
3038 "a.txt": "a-contents",
3039 "b.txt": "b-contents",
3040 }),
3041 )
3042 .await;
3043
3044 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3045
3046 // Spawn multiple tasks to open paths, repeating some paths.
3047 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3048 (
3049 p.open_local_buffer("/dir/a.txt", cx),
3050 p.open_local_buffer("/dir/b.txt", cx),
3051 p.open_local_buffer("/dir/a.txt", cx),
3052 )
3053 });
3054
3055 let buffer_a_1 = buffer_a_1.await.unwrap();
3056 let buffer_a_2 = buffer_a_2.await.unwrap();
3057 let buffer_b = buffer_b.await.unwrap();
3058 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3059 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3060
3061 // There is only one buffer per path.
3062 let buffer_a_id = buffer_a_1.entity_id();
3063 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3064
3065 // Open the same path again while it is still open.
3066 drop(buffer_a_1);
3067 let buffer_a_3 = project
3068 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3069 .await
3070 .unwrap();
3071
3072 // There's still only one buffer per path.
3073 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3074}
3075
3076#[gpui::test]
3077async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3078 init_test(cx);
3079
3080 let fs = FakeFs::new(cx.executor());
3081 fs.insert_tree(
3082 "/dir",
3083 json!({
3084 "file1": "abc",
3085 "file2": "def",
3086 "file3": "ghi",
3087 }),
3088 )
3089 .await;
3090
3091 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3092
3093 let buffer1 = project
3094 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3095 .await
3096 .unwrap();
3097 let events = Arc::new(Mutex::new(Vec::new()));
3098
3099 // initially, the buffer isn't dirty.
3100 buffer1.update(cx, |buffer, cx| {
3101 cx.subscribe(&buffer1, {
3102 let events = events.clone();
3103 move |_, _, event, _| match event {
3104 BufferEvent::Operation(_) => {}
3105 _ => events.lock().push(event.clone()),
3106 }
3107 })
3108 .detach();
3109
3110 assert!(!buffer.is_dirty());
3111 assert!(events.lock().is_empty());
3112
3113 buffer.edit([(1..2, "")], None, cx);
3114 });
3115
3116 // after the first edit, the buffer is dirty, and emits a dirtied event.
3117 buffer1.update(cx, |buffer, cx| {
3118 assert!(buffer.text() == "ac");
3119 assert!(buffer.is_dirty());
3120 assert_eq!(
3121 *events.lock(),
3122 &[language::Event::Edited, language::Event::DirtyChanged]
3123 );
3124 events.lock().clear();
3125 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), cx);
3126 });
3127
3128 // after saving, the buffer is not dirty, and emits a saved event.
3129 buffer1.update(cx, |buffer, cx| {
3130 assert!(!buffer.is_dirty());
3131 assert_eq!(*events.lock(), &[language::Event::Saved]);
3132 events.lock().clear();
3133
3134 buffer.edit([(1..1, "B")], None, cx);
3135 buffer.edit([(2..2, "D")], None, cx);
3136 });
3137
3138 // after editing again, the buffer is dirty, and emits another dirty event.
3139 buffer1.update(cx, |buffer, cx| {
3140 assert!(buffer.text() == "aBDc");
3141 assert!(buffer.is_dirty());
3142 assert_eq!(
3143 *events.lock(),
3144 &[
3145 language::Event::Edited,
3146 language::Event::DirtyChanged,
3147 language::Event::Edited,
3148 ],
3149 );
3150 events.lock().clear();
3151
3152 // After restoring the buffer to its previously-saved state,
3153 // the buffer is not considered dirty anymore.
3154 buffer.edit([(1..3, "")], None, cx);
3155 assert!(buffer.text() == "ac");
3156 assert!(!buffer.is_dirty());
3157 });
3158
3159 assert_eq!(
3160 *events.lock(),
3161 &[language::Event::Edited, language::Event::DirtyChanged]
3162 );
3163
3164 // When a file is deleted, the buffer is considered dirty.
3165 let events = Arc::new(Mutex::new(Vec::new()));
3166 let buffer2 = project
3167 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3168 .await
3169 .unwrap();
3170 buffer2.update(cx, |_, cx| {
3171 cx.subscribe(&buffer2, {
3172 let events = events.clone();
3173 move |_, _, event, _| events.lock().push(event.clone())
3174 })
3175 .detach();
3176 });
3177
3178 fs.remove_file("/dir/file2".as_ref(), Default::default())
3179 .await
3180 .unwrap();
3181 cx.executor().run_until_parked();
3182 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3183 assert_eq!(
3184 *events.lock(),
3185 &[
3186 language::Event::DirtyChanged,
3187 language::Event::FileHandleChanged
3188 ]
3189 );
3190
3191 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3192 let events = Arc::new(Mutex::new(Vec::new()));
3193 let buffer3 = project
3194 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
3195 .await
3196 .unwrap();
3197 buffer3.update(cx, |_, cx| {
3198 cx.subscribe(&buffer3, {
3199 let events = events.clone();
3200 move |_, _, event, _| events.lock().push(event.clone())
3201 })
3202 .detach();
3203 });
3204
3205 buffer3.update(cx, |buffer, cx| {
3206 buffer.edit([(0..0, "x")], None, cx);
3207 });
3208 events.lock().clear();
3209 fs.remove_file("/dir/file3".as_ref(), Default::default())
3210 .await
3211 .unwrap();
3212 cx.executor().run_until_parked();
3213 assert_eq!(*events.lock(), &[language::Event::FileHandleChanged]);
3214 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3215}
3216
3217#[gpui::test]
3218async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3219 init_test(cx);
3220
3221 let initial_contents = "aaa\nbbbbb\nc\n";
3222 let fs = FakeFs::new(cx.executor());
3223 fs.insert_tree(
3224 "/dir",
3225 json!({
3226 "the-file": initial_contents,
3227 }),
3228 )
3229 .await;
3230 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3231 let buffer = project
3232 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
3233 .await
3234 .unwrap();
3235
3236 let anchors = (0..3)
3237 .map(|row| buffer.update(cx, |b, _| b.anchor_before(Point::new(row, 1))))
3238 .collect::<Vec<_>>();
3239
3240 // Change the file on disk, adding two new lines of text, and removing
3241 // one line.
3242 buffer.update(cx, |buffer, _| {
3243 assert!(!buffer.is_dirty());
3244 assert!(!buffer.has_conflict());
3245 });
3246 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3247 fs.save(
3248 "/dir/the-file".as_ref(),
3249 &new_contents.into(),
3250 LineEnding::Unix,
3251 )
3252 .await
3253 .unwrap();
3254
3255 // Because the buffer was not modified, it is reloaded from disk. Its
3256 // contents are edited according to the diff between the old and new
3257 // file contents.
3258 cx.executor().run_until_parked();
3259 buffer.update(cx, |buffer, _| {
3260 assert_eq!(buffer.text(), new_contents);
3261 assert!(!buffer.is_dirty());
3262 assert!(!buffer.has_conflict());
3263
3264 let anchor_positions = anchors
3265 .iter()
3266 .map(|anchor| anchor.to_point(&*buffer))
3267 .collect::<Vec<_>>();
3268 assert_eq!(
3269 anchor_positions,
3270 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
3271 );
3272 });
3273
3274 // Modify the buffer
3275 buffer.update(cx, |buffer, cx| {
3276 buffer.edit([(0..0, " ")], None, cx);
3277 assert!(buffer.is_dirty());
3278 assert!(!buffer.has_conflict());
3279 });
3280
3281 // Change the file on disk again, adding blank lines to the beginning.
3282 fs.save(
3283 "/dir/the-file".as_ref(),
3284 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3285 LineEnding::Unix,
3286 )
3287 .await
3288 .unwrap();
3289
3290 // Because the buffer is modified, it doesn't reload from disk, but is
3291 // marked as having a conflict.
3292 cx.executor().run_until_parked();
3293 buffer.update(cx, |buffer, _| {
3294 assert!(buffer.has_conflict());
3295 });
3296}
3297
3298#[gpui::test]
3299async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3300 init_test(cx);
3301
3302 let fs = FakeFs::new(cx.executor());
3303 fs.insert_tree(
3304 "/dir",
3305 json!({
3306 "file1": "a\nb\nc\n",
3307 "file2": "one\r\ntwo\r\nthree\r\n",
3308 }),
3309 )
3310 .await;
3311
3312 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3313 let buffer1 = project
3314 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3315 .await
3316 .unwrap();
3317 let buffer2 = project
3318 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3319 .await
3320 .unwrap();
3321
3322 buffer1.update(cx, |buffer, _| {
3323 assert_eq!(buffer.text(), "a\nb\nc\n");
3324 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3325 });
3326 buffer2.update(cx, |buffer, _| {
3327 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3328 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3329 });
3330
3331 // Change a file's line endings on disk from unix to windows. The buffer's
3332 // state updates correctly.
3333 fs.save(
3334 "/dir/file1".as_ref(),
3335 &"aaa\nb\nc\n".into(),
3336 LineEnding::Windows,
3337 )
3338 .await
3339 .unwrap();
3340 cx.executor().run_until_parked();
3341 buffer1.update(cx, |buffer, _| {
3342 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3343 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3344 });
3345
3346 // Save a file with windows line endings. The file is written correctly.
3347 buffer2.update(cx, |buffer, cx| {
3348 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3349 });
3350 project
3351 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3352 .await
3353 .unwrap();
3354 assert_eq!(
3355 fs.load("/dir/file2".as_ref()).await.unwrap(),
3356 "one\r\ntwo\r\nthree\r\nfour\r\n",
3357 );
3358}
3359
3360#[gpui::test]
3361async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3362 init_test(cx);
3363
3364 let fs = FakeFs::new(cx.executor());
3365 fs.insert_tree(
3366 "/the-dir",
3367 json!({
3368 "a.rs": "
3369 fn foo(mut v: Vec<usize>) {
3370 for x in &v {
3371 v.push(1);
3372 }
3373 }
3374 "
3375 .unindent(),
3376 }),
3377 )
3378 .await;
3379
3380 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3381 let buffer = project
3382 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3383 .await
3384 .unwrap();
3385
3386 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3387 let message = lsp::PublishDiagnosticsParams {
3388 uri: buffer_uri.clone(),
3389 diagnostics: vec![
3390 lsp::Diagnostic {
3391 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3392 severity: Some(DiagnosticSeverity::WARNING),
3393 message: "error 1".to_string(),
3394 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3395 location: lsp::Location {
3396 uri: buffer_uri.clone(),
3397 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3398 },
3399 message: "error 1 hint 1".to_string(),
3400 }]),
3401 ..Default::default()
3402 },
3403 lsp::Diagnostic {
3404 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3405 severity: Some(DiagnosticSeverity::HINT),
3406 message: "error 1 hint 1".to_string(),
3407 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3408 location: lsp::Location {
3409 uri: buffer_uri.clone(),
3410 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3411 },
3412 message: "original diagnostic".to_string(),
3413 }]),
3414 ..Default::default()
3415 },
3416 lsp::Diagnostic {
3417 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3418 severity: Some(DiagnosticSeverity::ERROR),
3419 message: "error 2".to_string(),
3420 related_information: Some(vec![
3421 lsp::DiagnosticRelatedInformation {
3422 location: lsp::Location {
3423 uri: buffer_uri.clone(),
3424 range: lsp::Range::new(
3425 lsp::Position::new(1, 13),
3426 lsp::Position::new(1, 15),
3427 ),
3428 },
3429 message: "error 2 hint 1".to_string(),
3430 },
3431 lsp::DiagnosticRelatedInformation {
3432 location: lsp::Location {
3433 uri: buffer_uri.clone(),
3434 range: lsp::Range::new(
3435 lsp::Position::new(1, 13),
3436 lsp::Position::new(1, 15),
3437 ),
3438 },
3439 message: "error 2 hint 2".to_string(),
3440 },
3441 ]),
3442 ..Default::default()
3443 },
3444 lsp::Diagnostic {
3445 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3446 severity: Some(DiagnosticSeverity::HINT),
3447 message: "error 2 hint 1".to_string(),
3448 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3449 location: lsp::Location {
3450 uri: buffer_uri.clone(),
3451 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3452 },
3453 message: "original diagnostic".to_string(),
3454 }]),
3455 ..Default::default()
3456 },
3457 lsp::Diagnostic {
3458 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3459 severity: Some(DiagnosticSeverity::HINT),
3460 message: "error 2 hint 2".to_string(),
3461 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3462 location: lsp::Location {
3463 uri: buffer_uri,
3464 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3465 },
3466 message: "original diagnostic".to_string(),
3467 }]),
3468 ..Default::default()
3469 },
3470 ],
3471 version: None,
3472 };
3473
3474 project
3475 .update(cx, |p, cx| {
3476 p.update_diagnostics(LanguageServerId(0), message, &[], cx)
3477 })
3478 .unwrap();
3479 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3480
3481 assert_eq!(
3482 buffer
3483 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3484 .collect::<Vec<_>>(),
3485 &[
3486 DiagnosticEntry {
3487 range: Point::new(1, 8)..Point::new(1, 9),
3488 diagnostic: Diagnostic {
3489 severity: DiagnosticSeverity::WARNING,
3490 message: "error 1".to_string(),
3491 group_id: 1,
3492 is_primary: true,
3493 ..Default::default()
3494 }
3495 },
3496 DiagnosticEntry {
3497 range: Point::new(1, 8)..Point::new(1, 9),
3498 diagnostic: Diagnostic {
3499 severity: DiagnosticSeverity::HINT,
3500 message: "error 1 hint 1".to_string(),
3501 group_id: 1,
3502 is_primary: false,
3503 ..Default::default()
3504 }
3505 },
3506 DiagnosticEntry {
3507 range: Point::new(1, 13)..Point::new(1, 15),
3508 diagnostic: Diagnostic {
3509 severity: DiagnosticSeverity::HINT,
3510 message: "error 2 hint 1".to_string(),
3511 group_id: 0,
3512 is_primary: false,
3513 ..Default::default()
3514 }
3515 },
3516 DiagnosticEntry {
3517 range: Point::new(1, 13)..Point::new(1, 15),
3518 diagnostic: Diagnostic {
3519 severity: DiagnosticSeverity::HINT,
3520 message: "error 2 hint 2".to_string(),
3521 group_id: 0,
3522 is_primary: false,
3523 ..Default::default()
3524 }
3525 },
3526 DiagnosticEntry {
3527 range: Point::new(2, 8)..Point::new(2, 17),
3528 diagnostic: Diagnostic {
3529 severity: DiagnosticSeverity::ERROR,
3530 message: "error 2".to_string(),
3531 group_id: 0,
3532 is_primary: true,
3533 ..Default::default()
3534 }
3535 }
3536 ]
3537 );
3538
3539 assert_eq!(
3540 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3541 &[
3542 DiagnosticEntry {
3543 range: Point::new(1, 13)..Point::new(1, 15),
3544 diagnostic: Diagnostic {
3545 severity: DiagnosticSeverity::HINT,
3546 message: "error 2 hint 1".to_string(),
3547 group_id: 0,
3548 is_primary: false,
3549 ..Default::default()
3550 }
3551 },
3552 DiagnosticEntry {
3553 range: Point::new(1, 13)..Point::new(1, 15),
3554 diagnostic: Diagnostic {
3555 severity: DiagnosticSeverity::HINT,
3556 message: "error 2 hint 2".to_string(),
3557 group_id: 0,
3558 is_primary: false,
3559 ..Default::default()
3560 }
3561 },
3562 DiagnosticEntry {
3563 range: Point::new(2, 8)..Point::new(2, 17),
3564 diagnostic: Diagnostic {
3565 severity: DiagnosticSeverity::ERROR,
3566 message: "error 2".to_string(),
3567 group_id: 0,
3568 is_primary: true,
3569 ..Default::default()
3570 }
3571 }
3572 ]
3573 );
3574
3575 assert_eq!(
3576 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3577 &[
3578 DiagnosticEntry {
3579 range: Point::new(1, 8)..Point::new(1, 9),
3580 diagnostic: Diagnostic {
3581 severity: DiagnosticSeverity::WARNING,
3582 message: "error 1".to_string(),
3583 group_id: 1,
3584 is_primary: true,
3585 ..Default::default()
3586 }
3587 },
3588 DiagnosticEntry {
3589 range: Point::new(1, 8)..Point::new(1, 9),
3590 diagnostic: Diagnostic {
3591 severity: DiagnosticSeverity::HINT,
3592 message: "error 1 hint 1".to_string(),
3593 group_id: 1,
3594 is_primary: false,
3595 ..Default::default()
3596 }
3597 },
3598 ]
3599 );
3600}
3601
3602#[gpui::test]
3603async fn test_rename(cx: &mut gpui::TestAppContext) {
3604 init_test(cx);
3605
3606 let fs = FakeFs::new(cx.executor());
3607 fs.insert_tree(
3608 "/dir",
3609 json!({
3610 "one.rs": "const ONE: usize = 1;",
3611 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3612 }),
3613 )
3614 .await;
3615
3616 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3617
3618 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3619 language_registry.add(rust_lang());
3620 let mut fake_servers = language_registry.register_fake_lsp_adapter(
3621 "Rust",
3622 FakeLspAdapter {
3623 capabilities: lsp::ServerCapabilities {
3624 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
3625 prepare_provider: Some(true),
3626 work_done_progress_options: Default::default(),
3627 })),
3628 ..Default::default()
3629 },
3630 ..Default::default()
3631 },
3632 );
3633
3634 let buffer = project
3635 .update(cx, |project, cx| {
3636 project.open_local_buffer("/dir/one.rs", cx)
3637 })
3638 .await
3639 .unwrap();
3640
3641 let fake_server = fake_servers.next().await.unwrap();
3642
3643 let response = project.update(cx, |project, cx| {
3644 project.prepare_rename(buffer.clone(), 7, cx)
3645 });
3646 fake_server
3647 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
3648 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3649 assert_eq!(params.position, lsp::Position::new(0, 7));
3650 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
3651 lsp::Position::new(0, 6),
3652 lsp::Position::new(0, 9),
3653 ))))
3654 })
3655 .next()
3656 .await
3657 .unwrap();
3658 let range = response.await.unwrap().unwrap();
3659 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
3660 assert_eq!(range, 6..9);
3661
3662 let response = project.update(cx, |project, cx| {
3663 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
3664 });
3665 fake_server
3666 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
3667 assert_eq!(
3668 params.text_document_position.text_document.uri.as_str(),
3669 "file:///dir/one.rs"
3670 );
3671 assert_eq!(
3672 params.text_document_position.position,
3673 lsp::Position::new(0, 7)
3674 );
3675 assert_eq!(params.new_name, "THREE");
3676 Ok(Some(lsp::WorkspaceEdit {
3677 changes: Some(
3678 [
3679 (
3680 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
3681 vec![lsp::TextEdit::new(
3682 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
3683 "THREE".to_string(),
3684 )],
3685 ),
3686 (
3687 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
3688 vec![
3689 lsp::TextEdit::new(
3690 lsp::Range::new(
3691 lsp::Position::new(0, 24),
3692 lsp::Position::new(0, 27),
3693 ),
3694 "THREE".to_string(),
3695 ),
3696 lsp::TextEdit::new(
3697 lsp::Range::new(
3698 lsp::Position::new(0, 35),
3699 lsp::Position::new(0, 38),
3700 ),
3701 "THREE".to_string(),
3702 ),
3703 ],
3704 ),
3705 ]
3706 .into_iter()
3707 .collect(),
3708 ),
3709 ..Default::default()
3710 }))
3711 })
3712 .next()
3713 .await
3714 .unwrap();
3715 let mut transaction = response.await.unwrap().0;
3716 assert_eq!(transaction.len(), 2);
3717 assert_eq!(
3718 transaction
3719 .remove_entry(&buffer)
3720 .unwrap()
3721 .0
3722 .update(cx, |buffer, _| buffer.text()),
3723 "const THREE: usize = 1;"
3724 );
3725 assert_eq!(
3726 transaction
3727 .into_keys()
3728 .next()
3729 .unwrap()
3730 .update(cx, |buffer, _| buffer.text()),
3731 "const TWO: usize = one::THREE + one::THREE;"
3732 );
3733}
3734
3735#[gpui::test]
3736async fn test_search(cx: &mut gpui::TestAppContext) {
3737 init_test(cx);
3738
3739 let fs = FakeFs::new(cx.executor());
3740 fs.insert_tree(
3741 "/dir",
3742 json!({
3743 "one.rs": "const ONE: usize = 1;",
3744 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3745 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3746 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3747 }),
3748 )
3749 .await;
3750 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3751 assert_eq!(
3752 search(
3753 &project,
3754 SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
3755 cx
3756 )
3757 .await
3758 .unwrap(),
3759 HashMap::from_iter([
3760 ("dir/two.rs".to_string(), vec![6..9]),
3761 ("dir/three.rs".to_string(), vec![37..40])
3762 ])
3763 );
3764
3765 let buffer_4 = project
3766 .update(cx, |project, cx| {
3767 project.open_local_buffer("/dir/four.rs", cx)
3768 })
3769 .await
3770 .unwrap();
3771 buffer_4.update(cx, |buffer, cx| {
3772 let text = "two::TWO";
3773 buffer.edit([(20..28, text), (31..43, text)], None, cx);
3774 });
3775
3776 assert_eq!(
3777 search(
3778 &project,
3779 SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
3780 cx
3781 )
3782 .await
3783 .unwrap(),
3784 HashMap::from_iter([
3785 ("dir/two.rs".to_string(), vec![6..9]),
3786 ("dir/three.rs".to_string(), vec![37..40]),
3787 ("dir/four.rs".to_string(), vec![25..28, 36..39])
3788 ])
3789 );
3790}
3791
3792#[gpui::test]
3793async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
3794 init_test(cx);
3795
3796 let search_query = "file";
3797
3798 let fs = FakeFs::new(cx.executor());
3799 fs.insert_tree(
3800 "/dir",
3801 json!({
3802 "one.rs": r#"// Rust file one"#,
3803 "one.ts": r#"// TypeScript file one"#,
3804 "two.rs": r#"// Rust file two"#,
3805 "two.ts": r#"// TypeScript file two"#,
3806 }),
3807 )
3808 .await;
3809 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3810
3811 assert!(
3812 search(
3813 &project,
3814 SearchQuery::text(
3815 search_query,
3816 false,
3817 true,
3818 false,
3819 vec![PathMatcher::new("*.odd").unwrap()],
3820 Vec::new()
3821 )
3822 .unwrap(),
3823 cx
3824 )
3825 .await
3826 .unwrap()
3827 .is_empty(),
3828 "If no inclusions match, no files should be returned"
3829 );
3830
3831 assert_eq!(
3832 search(
3833 &project,
3834 SearchQuery::text(
3835 search_query,
3836 false,
3837 true,
3838 false,
3839 vec![PathMatcher::new("*.rs").unwrap()],
3840 Vec::new()
3841 )
3842 .unwrap(),
3843 cx
3844 )
3845 .await
3846 .unwrap(),
3847 HashMap::from_iter([
3848 ("dir/one.rs".to_string(), vec![8..12]),
3849 ("dir/two.rs".to_string(), vec![8..12]),
3850 ]),
3851 "Rust only search should give only Rust files"
3852 );
3853
3854 assert_eq!(
3855 search(
3856 &project,
3857 SearchQuery::text(
3858 search_query,
3859 false,
3860 true,
3861 false,
3862 vec![
3863 PathMatcher::new("*.ts").unwrap(),
3864 PathMatcher::new("*.odd").unwrap(),
3865 ],
3866 Vec::new()
3867 ).unwrap(),
3868 cx
3869 )
3870 .await
3871 .unwrap(),
3872 HashMap::from_iter([
3873 ("dir/one.ts".to_string(), vec![14..18]),
3874 ("dir/two.ts".to_string(), vec![14..18]),
3875 ]),
3876 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
3877 );
3878
3879 assert_eq!(
3880 search(
3881 &project,
3882 SearchQuery::text(
3883 search_query,
3884 false,
3885 true,
3886 false,
3887 vec![
3888 PathMatcher::new("*.rs").unwrap(),
3889 PathMatcher::new("*.ts").unwrap(),
3890 PathMatcher::new("*.odd").unwrap(),
3891 ],
3892 Vec::new()
3893 ).unwrap(),
3894 cx
3895 )
3896 .await
3897 .unwrap(),
3898 HashMap::from_iter([
3899 ("dir/two.ts".to_string(), vec![14..18]),
3900 ("dir/one.rs".to_string(), vec![8..12]),
3901 ("dir/one.ts".to_string(), vec![14..18]),
3902 ("dir/two.rs".to_string(), vec![8..12]),
3903 ]),
3904 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
3905 );
3906}
3907
3908#[gpui::test]
3909async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
3910 init_test(cx);
3911
3912 let search_query = "file";
3913
3914 let fs = FakeFs::new(cx.executor());
3915 fs.insert_tree(
3916 "/dir",
3917 json!({
3918 "one.rs": r#"// Rust file one"#,
3919 "one.ts": r#"// TypeScript file one"#,
3920 "two.rs": r#"// Rust file two"#,
3921 "two.ts": r#"// TypeScript file two"#,
3922 }),
3923 )
3924 .await;
3925 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3926
3927 assert_eq!(
3928 search(
3929 &project,
3930 SearchQuery::text(
3931 search_query,
3932 false,
3933 true,
3934 false,
3935 Vec::new(),
3936 vec![PathMatcher::new("*.odd").unwrap()],
3937 )
3938 .unwrap(),
3939 cx
3940 )
3941 .await
3942 .unwrap(),
3943 HashMap::from_iter([
3944 ("dir/one.rs".to_string(), vec![8..12]),
3945 ("dir/one.ts".to_string(), vec![14..18]),
3946 ("dir/two.rs".to_string(), vec![8..12]),
3947 ("dir/two.ts".to_string(), vec![14..18]),
3948 ]),
3949 "If no exclusions match, all files should be returned"
3950 );
3951
3952 assert_eq!(
3953 search(
3954 &project,
3955 SearchQuery::text(
3956 search_query,
3957 false,
3958 true,
3959 false,
3960 Vec::new(),
3961 vec![PathMatcher::new("*.rs").unwrap()],
3962 )
3963 .unwrap(),
3964 cx
3965 )
3966 .await
3967 .unwrap(),
3968 HashMap::from_iter([
3969 ("dir/one.ts".to_string(), vec![14..18]),
3970 ("dir/two.ts".to_string(), vec![14..18]),
3971 ]),
3972 "Rust exclusion search should give only TypeScript files"
3973 );
3974
3975 assert_eq!(
3976 search(
3977 &project,
3978 SearchQuery::text(
3979 search_query,
3980 false,
3981 true,
3982 false,
3983 Vec::new(),
3984 vec![
3985 PathMatcher::new("*.ts").unwrap(),
3986 PathMatcher::new("*.odd").unwrap(),
3987 ],
3988 ).unwrap(),
3989 cx
3990 )
3991 .await
3992 .unwrap(),
3993 HashMap::from_iter([
3994 ("dir/one.rs".to_string(), vec![8..12]),
3995 ("dir/two.rs".to_string(), vec![8..12]),
3996 ]),
3997 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
3998 );
3999
4000 assert!(
4001 search(
4002 &project,
4003 SearchQuery::text(
4004 search_query,
4005 false,
4006 true,
4007 false,
4008 Vec::new(),
4009 vec![
4010 PathMatcher::new("*.rs").unwrap(),
4011 PathMatcher::new("*.ts").unwrap(),
4012 PathMatcher::new("*.odd").unwrap(),
4013 ],
4014 ).unwrap(),
4015 cx
4016 )
4017 .await
4018 .unwrap().is_empty(),
4019 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4020 );
4021}
4022
4023#[gpui::test]
4024async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4025 init_test(cx);
4026
4027 let search_query = "file";
4028
4029 let fs = FakeFs::new(cx.executor());
4030 fs.insert_tree(
4031 "/dir",
4032 json!({
4033 "one.rs": r#"// Rust file one"#,
4034 "one.ts": r#"// TypeScript file one"#,
4035 "two.rs": r#"// Rust file two"#,
4036 "two.ts": r#"// TypeScript file two"#,
4037 }),
4038 )
4039 .await;
4040 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4041
4042 assert!(
4043 search(
4044 &project,
4045 SearchQuery::text(
4046 search_query,
4047 false,
4048 true,
4049 false,
4050 vec![PathMatcher::new("*.odd").unwrap()],
4051 vec![PathMatcher::new("*.odd").unwrap()],
4052 )
4053 .unwrap(),
4054 cx
4055 )
4056 .await
4057 .unwrap()
4058 .is_empty(),
4059 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4060 );
4061
4062 assert!(
4063 search(
4064 &project,
4065 SearchQuery::text(
4066 search_query,
4067 false,
4068 true,
4069 false,
4070 vec![PathMatcher::new("*.ts").unwrap()],
4071 vec![PathMatcher::new("*.ts").unwrap()],
4072 ).unwrap(),
4073 cx
4074 )
4075 .await
4076 .unwrap()
4077 .is_empty(),
4078 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4079 );
4080
4081 assert!(
4082 search(
4083 &project,
4084 SearchQuery::text(
4085 search_query,
4086 false,
4087 true,
4088 false,
4089 vec![
4090 PathMatcher::new("*.ts").unwrap(),
4091 PathMatcher::new("*.odd").unwrap()
4092 ],
4093 vec![
4094 PathMatcher::new("*.ts").unwrap(),
4095 PathMatcher::new("*.odd").unwrap()
4096 ],
4097 )
4098 .unwrap(),
4099 cx
4100 )
4101 .await
4102 .unwrap()
4103 .is_empty(),
4104 "Non-matching inclusions and exclusions should not change that."
4105 );
4106
4107 assert_eq!(
4108 search(
4109 &project,
4110 SearchQuery::text(
4111 search_query,
4112 false,
4113 true,
4114 false,
4115 vec![
4116 PathMatcher::new("*.ts").unwrap(),
4117 PathMatcher::new("*.odd").unwrap()
4118 ],
4119 vec![
4120 PathMatcher::new("*.rs").unwrap(),
4121 PathMatcher::new("*.odd").unwrap()
4122 ],
4123 )
4124 .unwrap(),
4125 cx
4126 )
4127 .await
4128 .unwrap(),
4129 HashMap::from_iter([
4130 ("dir/one.ts".to_string(), vec![14..18]),
4131 ("dir/two.ts".to_string(), vec![14..18]),
4132 ]),
4133 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4134 );
4135}
4136
4137#[gpui::test]
4138async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
4139 init_test(cx);
4140
4141 let fs = FakeFs::new(cx.executor());
4142 fs.insert_tree(
4143 "/worktree-a",
4144 json!({
4145 "haystack.rs": r#"// NEEDLE"#,
4146 "haystack.ts": r#"// NEEDLE"#,
4147 }),
4148 )
4149 .await;
4150 fs.insert_tree(
4151 "/worktree-b",
4152 json!({
4153 "haystack.rs": r#"// NEEDLE"#,
4154 "haystack.ts": r#"// NEEDLE"#,
4155 }),
4156 )
4157 .await;
4158
4159 let project = Project::test(
4160 fs.clone(),
4161 ["/worktree-a".as_ref(), "/worktree-b".as_ref()],
4162 cx,
4163 )
4164 .await;
4165
4166 assert_eq!(
4167 search(
4168 &project,
4169 SearchQuery::text(
4170 "NEEDLE",
4171 false,
4172 true,
4173 false,
4174 vec![PathMatcher::new("worktree-a/*.rs").unwrap()],
4175 Vec::new()
4176 )
4177 .unwrap(),
4178 cx
4179 )
4180 .await
4181 .unwrap(),
4182 HashMap::from_iter([("worktree-a/haystack.rs".to_string(), vec![3..9])]),
4183 "should only return results from included worktree"
4184 );
4185 assert_eq!(
4186 search(
4187 &project,
4188 SearchQuery::text(
4189 "NEEDLE",
4190 false,
4191 true,
4192 false,
4193 vec![PathMatcher::new("worktree-b/*.rs").unwrap()],
4194 Vec::new()
4195 )
4196 .unwrap(),
4197 cx
4198 )
4199 .await
4200 .unwrap(),
4201 HashMap::from_iter([("worktree-b/haystack.rs".to_string(), vec![3..9])]),
4202 "should only return results from included worktree"
4203 );
4204
4205 assert_eq!(
4206 search(
4207 &project,
4208 SearchQuery::text(
4209 "NEEDLE",
4210 false,
4211 true,
4212 false,
4213 vec![PathMatcher::new("*.ts").unwrap()],
4214 Vec::new()
4215 )
4216 .unwrap(),
4217 cx
4218 )
4219 .await
4220 .unwrap(),
4221 HashMap::from_iter([
4222 ("worktree-a/haystack.ts".to_string(), vec![3..9]),
4223 ("worktree-b/haystack.ts".to_string(), vec![3..9])
4224 ]),
4225 "should return results from both worktrees"
4226 );
4227}
4228
4229#[gpui::test]
4230async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
4231 init_test(cx);
4232
4233 let fs = FakeFs::new(cx.background_executor.clone());
4234 fs.insert_tree(
4235 "/dir",
4236 json!({
4237 ".git": {},
4238 ".gitignore": "**/target\n/node_modules\n",
4239 "target": {
4240 "index.txt": "index_key:index_value"
4241 },
4242 "node_modules": {
4243 "eslint": {
4244 "index.ts": "const eslint_key = 'eslint value'",
4245 "package.json": r#"{ "some_key": "some value" }"#,
4246 },
4247 "prettier": {
4248 "index.ts": "const prettier_key = 'prettier value'",
4249 "package.json": r#"{ "other_key": "other value" }"#,
4250 },
4251 },
4252 "package.json": r#"{ "main_key": "main value" }"#,
4253 }),
4254 )
4255 .await;
4256 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4257
4258 let query = "key";
4259 assert_eq!(
4260 search(
4261 &project,
4262 SearchQuery::text(query, false, false, false, Vec::new(), Vec::new()).unwrap(),
4263 cx
4264 )
4265 .await
4266 .unwrap(),
4267 HashMap::from_iter([("dir/package.json".to_string(), vec![8..11])]),
4268 "Only one non-ignored file should have the query"
4269 );
4270
4271 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4272 assert_eq!(
4273 search(
4274 &project,
4275 SearchQuery::text(query, false, false, true, Vec::new(), Vec::new()).unwrap(),
4276 cx
4277 )
4278 .await
4279 .unwrap(),
4280 HashMap::from_iter([
4281 ("dir/package.json".to_string(), vec![8..11]),
4282 ("dir/target/index.txt".to_string(), vec![6..9]),
4283 (
4284 "dir/node_modules/prettier/package.json".to_string(),
4285 vec![9..12]
4286 ),
4287 (
4288 "dir/node_modules/prettier/index.ts".to_string(),
4289 vec![15..18]
4290 ),
4291 ("dir/node_modules/eslint/index.ts".to_string(), vec![13..16]),
4292 (
4293 "dir/node_modules/eslint/package.json".to_string(),
4294 vec![8..11]
4295 ),
4296 ]),
4297 "Unrestricted search with ignored directories should find every file with the query"
4298 );
4299
4300 let files_to_include = vec![PathMatcher::new("/dir/node_modules/prettier/**").unwrap()];
4301 let files_to_exclude = vec![PathMatcher::new("*.ts").unwrap()];
4302 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4303 assert_eq!(
4304 search(
4305 &project,
4306 SearchQuery::text(
4307 query,
4308 false,
4309 false,
4310 true,
4311 files_to_include,
4312 files_to_exclude,
4313 )
4314 .unwrap(),
4315 cx
4316 )
4317 .await
4318 .unwrap(),
4319 HashMap::from_iter([(
4320 "dir/node_modules/prettier/package.json".to_string(),
4321 vec![9..12]
4322 )]),
4323 "With search including ignored prettier directory and excluding TS files, only one file should be found"
4324 );
4325}
4326
4327#[test]
4328fn test_glob_literal_prefix() {
4329 assert_eq!(glob_literal_prefix("**/*.js"), "");
4330 assert_eq!(glob_literal_prefix("node_modules/**/*.js"), "node_modules");
4331 assert_eq!(glob_literal_prefix("foo/{bar,baz}.js"), "foo");
4332 assert_eq!(glob_literal_prefix("foo/bar/baz.js"), "foo/bar/baz.js");
4333}
4334
4335#[gpui::test]
4336async fn test_create_entry(cx: &mut gpui::TestAppContext) {
4337 init_test(cx);
4338
4339 let fs = FakeFs::new(cx.executor().clone());
4340 fs.insert_tree(
4341 "/one/two",
4342 json!({
4343 "three": {
4344 "a.txt": "",
4345 "four": {}
4346 },
4347 "c.rs": ""
4348 }),
4349 )
4350 .await;
4351
4352 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
4353 project
4354 .update(cx, |project, cx| {
4355 let id = project.worktrees().next().unwrap().read(cx).id();
4356 project.create_entry((id, "b.."), true, cx)
4357 })
4358 .unwrap()
4359 .await
4360 .unwrap();
4361
4362 // Can't create paths outside the project
4363 let result = project
4364 .update(cx, |project, cx| {
4365 let id = project.worktrees().next().unwrap().read(cx).id();
4366 project.create_entry((id, "../../boop"), true, cx)
4367 })
4368 .await;
4369 assert!(result.is_err());
4370
4371 // Can't create paths with '..'
4372 let result = project
4373 .update(cx, |project, cx| {
4374 let id = project.worktrees().next().unwrap().read(cx).id();
4375 project.create_entry((id, "four/../beep"), true, cx)
4376 })
4377 .await;
4378 assert!(result.is_err());
4379
4380 assert_eq!(
4381 fs.paths(true),
4382 vec![
4383 PathBuf::from("/"),
4384 PathBuf::from("/one"),
4385 PathBuf::from("/one/two"),
4386 PathBuf::from("/one/two/c.rs"),
4387 PathBuf::from("/one/two/three"),
4388 PathBuf::from("/one/two/three/a.txt"),
4389 PathBuf::from("/one/two/three/b.."),
4390 PathBuf::from("/one/two/three/four"),
4391 ]
4392 );
4393
4394 // And we cannot open buffers with '..'
4395 let result = project
4396 .update(cx, |project, cx| {
4397 let id = project.worktrees().next().unwrap().read(cx).id();
4398 project.open_buffer((id, "../c.rs"), cx)
4399 })
4400 .await;
4401 assert!(result.is_err())
4402}
4403
4404#[gpui::test]
4405async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
4406 init_test(cx);
4407
4408 let fs = FakeFs::new(cx.executor());
4409 fs.insert_tree(
4410 "/dir",
4411 json!({
4412 "a.tsx": "a",
4413 }),
4414 )
4415 .await;
4416
4417 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4418
4419 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4420 language_registry.add(tsx_lang());
4421 let language_server_names = [
4422 "TypeScriptServer",
4423 "TailwindServer",
4424 "ESLintServer",
4425 "NoHoverCapabilitiesServer",
4426 ];
4427 let mut fake_tsx_language_servers = language_registry.register_specific_fake_lsp_adapter(
4428 "tsx",
4429 true,
4430 FakeLspAdapter {
4431 name: &language_server_names[0],
4432 capabilities: lsp::ServerCapabilities {
4433 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4434 ..lsp::ServerCapabilities::default()
4435 },
4436 ..FakeLspAdapter::default()
4437 },
4438 );
4439 let _a = language_registry.register_specific_fake_lsp_adapter(
4440 "tsx",
4441 false,
4442 FakeLspAdapter {
4443 name: &language_server_names[1],
4444 capabilities: lsp::ServerCapabilities {
4445 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4446 ..lsp::ServerCapabilities::default()
4447 },
4448 ..FakeLspAdapter::default()
4449 },
4450 );
4451 let _b = language_registry.register_specific_fake_lsp_adapter(
4452 "tsx",
4453 false,
4454 FakeLspAdapter {
4455 name: &language_server_names[2],
4456 capabilities: lsp::ServerCapabilities {
4457 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4458 ..lsp::ServerCapabilities::default()
4459 },
4460 ..FakeLspAdapter::default()
4461 },
4462 );
4463 let _c = language_registry.register_specific_fake_lsp_adapter(
4464 "tsx",
4465 false,
4466 FakeLspAdapter {
4467 name: &language_server_names[3],
4468 capabilities: lsp::ServerCapabilities {
4469 hover_provider: None,
4470 ..lsp::ServerCapabilities::default()
4471 },
4472 ..FakeLspAdapter::default()
4473 },
4474 );
4475
4476 let buffer = project
4477 .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx))
4478 .await
4479 .unwrap();
4480 cx.executor().run_until_parked();
4481
4482 let mut servers_with_hover_requests = HashMap::default();
4483 for i in 0..language_server_names.len() {
4484 let new_server = fake_tsx_language_servers.next().await.unwrap_or_else(|| {
4485 panic!(
4486 "Failed to get language server #{i} with name {}",
4487 &language_server_names[i]
4488 )
4489 });
4490 let new_server_name = new_server.server.name();
4491 assert!(
4492 !servers_with_hover_requests.contains_key(new_server_name),
4493 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
4494 );
4495 let new_server_name = new_server_name.to_string();
4496 match new_server_name.as_str() {
4497 "TailwindServer" | "TypeScriptServer" => {
4498 servers_with_hover_requests.insert(
4499 new_server_name.clone(),
4500 new_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| {
4501 let name = new_server_name.clone();
4502 async move {
4503 Ok(Some(lsp::Hover {
4504 contents: lsp::HoverContents::Scalar(lsp::MarkedString::String(
4505 format!("{name} hover"),
4506 )),
4507 range: None,
4508 }))
4509 }
4510 }),
4511 );
4512 }
4513 "ESLintServer" => {
4514 servers_with_hover_requests.insert(
4515 new_server_name,
4516 new_server.handle_request::<lsp::request::HoverRequest, _, _>(
4517 |_, _| async move { Ok(None) },
4518 ),
4519 );
4520 }
4521 "NoHoverCapabilitiesServer" => {
4522 let _never_handled = new_server.handle_request::<lsp::request::HoverRequest, _, _>(
4523 |_, _| async move {
4524 panic!(
4525 "Should not call for hovers server with no corresponding capabilities"
4526 )
4527 },
4528 );
4529 }
4530 unexpected => panic!("Unexpected server name: {unexpected}"),
4531 }
4532 }
4533
4534 let hover_task = project.update(cx, |project, cx| {
4535 project.hover(&buffer, Point::new(0, 0), cx)
4536 });
4537 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
4538 |mut hover_request| async move {
4539 hover_request
4540 .next()
4541 .await
4542 .expect("All hover requests should have been triggered")
4543 },
4544 ))
4545 .await;
4546 assert_eq!(
4547 vec!["TailwindServer hover", "TypeScriptServer hover"],
4548 hover_task
4549 .await
4550 .into_iter()
4551 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
4552 .sorted()
4553 .collect::<Vec<_>>(),
4554 "Should receive hover responses from all related servers with hover capabilities"
4555 );
4556}
4557
4558#[gpui::test]
4559async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
4560 init_test(cx);
4561
4562 let fs = FakeFs::new(cx.executor());
4563 fs.insert_tree(
4564 "/dir",
4565 json!({
4566 "a.ts": "a",
4567 }),
4568 )
4569 .await;
4570
4571 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4572
4573 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4574 language_registry.add(typescript_lang());
4575 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
4576 "TypeScript",
4577 FakeLspAdapter {
4578 capabilities: lsp::ServerCapabilities {
4579 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4580 ..lsp::ServerCapabilities::default()
4581 },
4582 ..FakeLspAdapter::default()
4583 },
4584 );
4585
4586 let buffer = project
4587 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
4588 .await
4589 .unwrap();
4590 cx.executor().run_until_parked();
4591
4592 let fake_server = fake_language_servers
4593 .next()
4594 .await
4595 .expect("failed to get the language server");
4596
4597 let mut request_handled =
4598 fake_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| async move {
4599 Ok(Some(lsp::Hover {
4600 contents: lsp::HoverContents::Array(vec![
4601 lsp::MarkedString::String("".to_string()),
4602 lsp::MarkedString::String(" ".to_string()),
4603 lsp::MarkedString::String("\n\n\n".to_string()),
4604 ]),
4605 range: None,
4606 }))
4607 });
4608
4609 let hover_task = project.update(cx, |project, cx| {
4610 project.hover(&buffer, Point::new(0, 0), cx)
4611 });
4612 let () = request_handled
4613 .next()
4614 .await
4615 .expect("All hover requests should have been triggered");
4616 assert_eq!(
4617 Vec::<String>::new(),
4618 hover_task
4619 .await
4620 .into_iter()
4621 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
4622 .sorted()
4623 .collect::<Vec<_>>(),
4624 "Empty hover parts should be ignored"
4625 );
4626}
4627
4628#[gpui::test]
4629async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
4630 init_test(cx);
4631
4632 let fs = FakeFs::new(cx.executor());
4633 fs.insert_tree(
4634 "/dir",
4635 json!({
4636 "a.tsx": "a",
4637 }),
4638 )
4639 .await;
4640
4641 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4642
4643 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4644 language_registry.add(tsx_lang());
4645 let language_server_names = [
4646 "TypeScriptServer",
4647 "TailwindServer",
4648 "ESLintServer",
4649 "NoActionsCapabilitiesServer",
4650 ];
4651 let mut fake_tsx_language_servers = language_registry.register_specific_fake_lsp_adapter(
4652 "tsx",
4653 true,
4654 FakeLspAdapter {
4655 name: &language_server_names[0],
4656 capabilities: lsp::ServerCapabilities {
4657 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4658 ..lsp::ServerCapabilities::default()
4659 },
4660 ..FakeLspAdapter::default()
4661 },
4662 );
4663 let _a = language_registry.register_specific_fake_lsp_adapter(
4664 "tsx",
4665 false,
4666 FakeLspAdapter {
4667 name: &language_server_names[1],
4668 capabilities: lsp::ServerCapabilities {
4669 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4670 ..lsp::ServerCapabilities::default()
4671 },
4672 ..FakeLspAdapter::default()
4673 },
4674 );
4675 let _b = language_registry.register_specific_fake_lsp_adapter(
4676 "tsx",
4677 false,
4678 FakeLspAdapter {
4679 name: &language_server_names[2],
4680 capabilities: lsp::ServerCapabilities {
4681 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4682 ..lsp::ServerCapabilities::default()
4683 },
4684 ..FakeLspAdapter::default()
4685 },
4686 );
4687 let _c = language_registry.register_specific_fake_lsp_adapter(
4688 "tsx",
4689 false,
4690 FakeLspAdapter {
4691 name: &language_server_names[3],
4692 capabilities: lsp::ServerCapabilities {
4693 code_action_provider: None,
4694 ..lsp::ServerCapabilities::default()
4695 },
4696 ..FakeLspAdapter::default()
4697 },
4698 );
4699
4700 let buffer = project
4701 .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx))
4702 .await
4703 .unwrap();
4704 cx.executor().run_until_parked();
4705
4706 let mut servers_with_actions_requests = HashMap::default();
4707 for i in 0..language_server_names.len() {
4708 let new_server = fake_tsx_language_servers.next().await.unwrap_or_else(|| {
4709 panic!(
4710 "Failed to get language server #{i} with name {}",
4711 &language_server_names[i]
4712 )
4713 });
4714 let new_server_name = new_server.server.name();
4715 assert!(
4716 !servers_with_actions_requests.contains_key(new_server_name),
4717 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
4718 );
4719 let new_server_name = new_server_name.to_string();
4720 match new_server_name.as_str() {
4721 "TailwindServer" | "TypeScriptServer" => {
4722 servers_with_actions_requests.insert(
4723 new_server_name.clone(),
4724 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
4725 move |_, _| {
4726 let name = new_server_name.clone();
4727 async move {
4728 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
4729 lsp::CodeAction {
4730 title: format!("{name} code action"),
4731 ..lsp::CodeAction::default()
4732 },
4733 )]))
4734 }
4735 },
4736 ),
4737 );
4738 }
4739 "ESLintServer" => {
4740 servers_with_actions_requests.insert(
4741 new_server_name,
4742 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
4743 |_, _| async move { Ok(None) },
4744 ),
4745 );
4746 }
4747 "NoActionsCapabilitiesServer" => {
4748 let _never_handled = new_server
4749 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
4750 panic!(
4751 "Should not call for code actions server with no corresponding capabilities"
4752 )
4753 });
4754 }
4755 unexpected => panic!("Unexpected server name: {unexpected}"),
4756 }
4757 }
4758
4759 let code_actions_task = project.update(cx, |project, cx| {
4760 project.code_actions(&buffer, 0..buffer.read(cx).len(), cx)
4761 });
4762 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
4763 |mut code_actions_request| async move {
4764 code_actions_request
4765 .next()
4766 .await
4767 .expect("All code actions requests should have been triggered")
4768 },
4769 ))
4770 .await;
4771 assert_eq!(
4772 vec!["TailwindServer code action", "TypeScriptServer code action"],
4773 code_actions_task
4774 .await
4775 .into_iter()
4776 .map(|code_action| code_action.lsp_action.title)
4777 .sorted()
4778 .collect::<Vec<_>>(),
4779 "Should receive code actions responses from all related servers with hover capabilities"
4780 );
4781}
4782
4783async fn search(
4784 project: &Model<Project>,
4785 query: SearchQuery,
4786 cx: &mut gpui::TestAppContext,
4787) -> Result<HashMap<String, Vec<Range<usize>>>> {
4788 let mut search_rx = project.update(cx, |project, cx| project.search(query, cx));
4789 let mut results = HashMap::default();
4790 while let Some(search_result) = search_rx.next().await {
4791 match search_result {
4792 SearchResult::Buffer { buffer, ranges } => {
4793 results.entry(buffer).or_insert(ranges);
4794 }
4795 SearchResult::LimitReached => {}
4796 }
4797 }
4798 Ok(results
4799 .into_iter()
4800 .map(|(buffer, ranges)| {
4801 buffer.update(cx, |buffer, cx| {
4802 let path = buffer
4803 .file()
4804 .unwrap()
4805 .full_path(cx)
4806 .to_string_lossy()
4807 .to_string();
4808 let ranges = ranges
4809 .into_iter()
4810 .map(|range| range.to_offset(buffer))
4811 .collect::<Vec<_>>();
4812 (path, ranges)
4813 })
4814 })
4815 .collect())
4816}
4817
4818fn init_test(cx: &mut gpui::TestAppContext) {
4819 if std::env::var("RUST_LOG").is_ok() {
4820 env_logger::try_init().ok();
4821 }
4822
4823 cx.update(|cx| {
4824 let settings_store = SettingsStore::test(cx);
4825 cx.set_global(settings_store);
4826 release_channel::init("0.0.0", cx);
4827 language::init(cx);
4828 Project::init_settings(cx);
4829 });
4830}
4831
4832fn json_lang() -> Arc<Language> {
4833 Arc::new(Language::new(
4834 LanguageConfig {
4835 name: "JSON".into(),
4836 matcher: LanguageMatcher {
4837 path_suffixes: vec!["json".to_string()],
4838 ..Default::default()
4839 },
4840 ..Default::default()
4841 },
4842 None,
4843 ))
4844}
4845
4846fn js_lang() -> Arc<Language> {
4847 Arc::new(Language::new(
4848 LanguageConfig {
4849 name: Arc::from("JavaScript"),
4850 matcher: LanguageMatcher {
4851 path_suffixes: vec!["js".to_string()],
4852 ..Default::default()
4853 },
4854 ..Default::default()
4855 },
4856 None,
4857 ))
4858}
4859
4860fn rust_lang() -> Arc<Language> {
4861 Arc::new(Language::new(
4862 LanguageConfig {
4863 name: "Rust".into(),
4864 matcher: LanguageMatcher {
4865 path_suffixes: vec!["rs".to_string()],
4866 ..Default::default()
4867 },
4868 ..Default::default()
4869 },
4870 Some(tree_sitter_rust::language()),
4871 ))
4872}
4873
4874fn typescript_lang() -> Arc<Language> {
4875 Arc::new(Language::new(
4876 LanguageConfig {
4877 name: "TypeScript".into(),
4878 matcher: LanguageMatcher {
4879 path_suffixes: vec!["ts".to_string()],
4880 ..Default::default()
4881 },
4882 ..Default::default()
4883 },
4884 Some(tree_sitter_typescript::language_typescript()),
4885 ))
4886}
4887
4888fn tsx_lang() -> Arc<Language> {
4889 Arc::new(Language::new(
4890 LanguageConfig {
4891 name: "tsx".into(),
4892 matcher: LanguageMatcher {
4893 path_suffixes: vec!["tsx".to_string()],
4894 ..Default::default()
4895 },
4896 ..Default::default()
4897 },
4898 Some(tree_sitter_typescript::language_tsx()),
4899 ))
4900}