1use crate::{Event, *};
2use fs::FakeFs;
3use futures::{future, StreamExt};
4use gpui::AppContext;
5use language::{
6 language_settings::{AllLanguageSettings, LanguageSettingsContent},
7 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
8 LanguageMatcher, LineEnding, OffsetRangeExt, Point, ToPoint,
9};
10use lsp::Url;
11use parking_lot::Mutex;
12use pretty_assertions::assert_eq;
13use serde_json::json;
14#[cfg(not(windows))]
15use std::os;
16use std::task::Poll;
17use unindent::Unindent as _;
18use util::{assert_set_eq, paths::PathMatcher, test::temp_tree};
19use worktree::WorktreeModelHandle as _;
20
21#[gpui::test]
22async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
23 cx.executor().allow_parking();
24
25 let (tx, mut rx) = futures::channel::mpsc::unbounded();
26 let _thread = std::thread::spawn(move || {
27 std::fs::metadata("/Users").unwrap();
28 std::thread::sleep(Duration::from_millis(1000));
29 tx.unbounded_send(1).unwrap();
30 });
31 rx.next().await.unwrap();
32}
33
34#[gpui::test]
35async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
36 cx.executor().allow_parking();
37
38 let io_task = smol::unblock(move || {
39 println!("sleeping on thread {:?}", std::thread::current().id());
40 std::thread::sleep(Duration::from_millis(10));
41 1
42 });
43
44 let task = cx.foreground_executor().spawn(async move {
45 io_task.await;
46 });
47
48 task.await;
49}
50
51#[cfg(not(windows))]
52#[gpui::test]
53async fn test_symlinks(cx: &mut gpui::TestAppContext) {
54 init_test(cx);
55 cx.executor().allow_parking();
56
57 let dir = temp_tree(json!({
58 "root": {
59 "apple": "",
60 "banana": {
61 "carrot": {
62 "date": "",
63 "endive": "",
64 }
65 },
66 "fennel": {
67 "grape": "",
68 }
69 }
70 }));
71
72 let root_link_path = dir.path().join("root_link");
73 os::unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
74 os::unix::fs::symlink(
75 &dir.path().join("root/fennel"),
76 &dir.path().join("root/finnochio"),
77 )
78 .unwrap();
79
80 let project = Project::test(Arc::new(RealFs::default()), [root_link_path.as_ref()], cx).await;
81
82 project.update(cx, |project, cx| {
83 let tree = project.worktrees().next().unwrap().read(cx);
84 assert_eq!(tree.file_count(), 5);
85 assert_eq!(
86 tree.inode_for_path("fennel/grape"),
87 tree.inode_for_path("finnochio/grape")
88 );
89 });
90}
91
92#[gpui::test]
93async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
94 init_test(cx);
95
96 let fs = FakeFs::new(cx.executor());
97 fs.insert_tree(
98 "/the-root",
99 json!({
100 ".zed": {
101 "settings.json": r#"{ "tab_size": 8 }"#,
102 "tasks.json": r#"[{
103 "label": "cargo check",
104 "command": "cargo",
105 "args": ["check", "--all"]
106 },]"#,
107 },
108 "a": {
109 "a.rs": "fn a() {\n A\n}"
110 },
111 "b": {
112 ".zed": {
113 "settings.json": r#"{ "tab_size": 2 }"#,
114 "tasks.json": r#"[{
115 "label": "cargo check",
116 "command": "cargo",
117 "args": ["check"]
118 },]"#,
119 },
120 "b.rs": "fn b() {\n B\n}"
121 }
122 }),
123 )
124 .await;
125
126 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
127 let worktree = project.update(cx, |project, _| project.worktrees().next().unwrap());
128
129 cx.executor().run_until_parked();
130 cx.update(|cx| {
131 let tree = worktree.read(cx);
132
133 let settings_a = language_settings(
134 None,
135 Some(
136 &(File::for_entry(
137 tree.entry_for_path("a/a.rs").unwrap().clone(),
138 worktree.clone(),
139 ) as _),
140 ),
141 cx,
142 );
143 let settings_b = language_settings(
144 None,
145 Some(
146 &(File::for_entry(
147 tree.entry_for_path("b/b.rs").unwrap().clone(),
148 worktree.clone(),
149 ) as _),
150 ),
151 cx,
152 );
153
154 assert_eq!(settings_a.tab_size.get(), 8);
155 assert_eq!(settings_b.tab_size.get(), 2);
156
157 let workree_id = project.update(cx, |project, cx| {
158 project.worktrees().next().unwrap().read(cx).id()
159 });
160 let all_tasks = project
161 .update(cx, |project, cx| {
162 project
163 .task_inventory()
164 .update(cx, |inventory, cx| inventory.list_tasks(None, None, cx))
165 })
166 .into_iter()
167 .map(|(source_kind, task)| (source_kind, task.label))
168 .collect::<Vec<_>>();
169 assert_eq!(
170 all_tasks,
171 vec![
172 (
173 TaskSourceKind::Worktree {
174 id: workree_id,
175 abs_path: PathBuf::from("/the-root/.zed/tasks.json"),
176 id_base: "local_tasks_for_worktree",
177 },
178 "cargo check".to_string()
179 ),
180 (
181 TaskSourceKind::Worktree {
182 id: workree_id,
183 abs_path: PathBuf::from("/the-root/b/.zed/tasks.json"),
184 id_base: "local_tasks_for_worktree",
185 },
186 "cargo check".to_string()
187 ),
188 ]
189 );
190 });
191}
192
193#[gpui::test]
194async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
195 init_test(cx);
196
197 let fs = FakeFs::new(cx.executor());
198 fs.insert_tree(
199 "/the-root",
200 json!({
201 "test.rs": "const A: i32 = 1;",
202 "test2.rs": "",
203 "Cargo.toml": "a = 1",
204 "package.json": "{\"a\": 1}",
205 }),
206 )
207 .await;
208
209 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
210 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
211
212 let mut fake_rust_servers = language_registry.register_fake_lsp_adapter(
213 "Rust",
214 FakeLspAdapter {
215 name: "the-rust-language-server",
216 capabilities: lsp::ServerCapabilities {
217 completion_provider: Some(lsp::CompletionOptions {
218 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
219 ..Default::default()
220 }),
221 ..Default::default()
222 },
223 ..Default::default()
224 },
225 );
226 let mut fake_json_servers = language_registry.register_fake_lsp_adapter(
227 "JSON",
228 FakeLspAdapter {
229 name: "the-json-language-server",
230 capabilities: lsp::ServerCapabilities {
231 completion_provider: Some(lsp::CompletionOptions {
232 trigger_characters: Some(vec![":".to_string()]),
233 ..Default::default()
234 }),
235 ..Default::default()
236 },
237 ..Default::default()
238 },
239 );
240
241 // Open a buffer without an associated language server.
242 let toml_buffer = project
243 .update(cx, |project, cx| {
244 project.open_local_buffer("/the-root/Cargo.toml", cx)
245 })
246 .await
247 .unwrap();
248
249 // Open a buffer with an associated language server before the language for it has been loaded.
250 let rust_buffer = project
251 .update(cx, |project, cx| {
252 project.open_local_buffer("/the-root/test.rs", cx)
253 })
254 .await
255 .unwrap();
256 rust_buffer.update(cx, |buffer, _| {
257 assert_eq!(buffer.language().map(|l| l.name()), None);
258 });
259
260 // Now we add the languages to the project, and ensure they get assigned to all
261 // the relevant open buffers.
262 language_registry.add(json_lang());
263 language_registry.add(rust_lang());
264 cx.executor().run_until_parked();
265 rust_buffer.update(cx, |buffer, _| {
266 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
267 });
268
269 // A server is started up, and it is notified about Rust files.
270 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
271 assert_eq!(
272 fake_rust_server
273 .receive_notification::<lsp::notification::DidOpenTextDocument>()
274 .await
275 .text_document,
276 lsp::TextDocumentItem {
277 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
278 version: 0,
279 text: "const A: i32 = 1;".to_string(),
280 language_id: Default::default()
281 }
282 );
283
284 // The buffer is configured based on the language server's capabilities.
285 rust_buffer.update(cx, |buffer, _| {
286 assert_eq!(
287 buffer.completion_triggers(),
288 &[".".to_string(), "::".to_string()]
289 );
290 });
291 toml_buffer.update(cx, |buffer, _| {
292 assert!(buffer.completion_triggers().is_empty());
293 });
294
295 // Edit a buffer. The changes are reported to the language server.
296 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
297 assert_eq!(
298 fake_rust_server
299 .receive_notification::<lsp::notification::DidChangeTextDocument>()
300 .await
301 .text_document,
302 lsp::VersionedTextDocumentIdentifier::new(
303 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
304 1
305 )
306 );
307
308 // Open a third buffer with a different associated language server.
309 let json_buffer = project
310 .update(cx, |project, cx| {
311 project.open_local_buffer("/the-root/package.json", cx)
312 })
313 .await
314 .unwrap();
315
316 // A json language server is started up and is only notified about the json buffer.
317 let mut fake_json_server = fake_json_servers.next().await.unwrap();
318 assert_eq!(
319 fake_json_server
320 .receive_notification::<lsp::notification::DidOpenTextDocument>()
321 .await
322 .text_document,
323 lsp::TextDocumentItem {
324 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
325 version: 0,
326 text: "{\"a\": 1}".to_string(),
327 language_id: Default::default()
328 }
329 );
330
331 // This buffer is configured based on the second language server's
332 // capabilities.
333 json_buffer.update(cx, |buffer, _| {
334 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
335 });
336
337 // When opening another buffer whose language server is already running,
338 // it is also configured based on the existing language server's capabilities.
339 let rust_buffer2 = project
340 .update(cx, |project, cx| {
341 project.open_local_buffer("/the-root/test2.rs", cx)
342 })
343 .await
344 .unwrap();
345 rust_buffer2.update(cx, |buffer, _| {
346 assert_eq!(
347 buffer.completion_triggers(),
348 &[".".to_string(), "::".to_string()]
349 );
350 });
351
352 // Changes are reported only to servers matching the buffer's language.
353 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
354 rust_buffer2.update(cx, |buffer, cx| {
355 buffer.edit([(0..0, "let x = 1;")], None, cx)
356 });
357 assert_eq!(
358 fake_rust_server
359 .receive_notification::<lsp::notification::DidChangeTextDocument>()
360 .await
361 .text_document,
362 lsp::VersionedTextDocumentIdentifier::new(
363 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
364 1
365 )
366 );
367
368 // Save notifications are reported to all servers.
369 project
370 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
371 .await
372 .unwrap();
373 assert_eq!(
374 fake_rust_server
375 .receive_notification::<lsp::notification::DidSaveTextDocument>()
376 .await
377 .text_document,
378 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
379 );
380 assert_eq!(
381 fake_json_server
382 .receive_notification::<lsp::notification::DidSaveTextDocument>()
383 .await
384 .text_document,
385 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
386 );
387
388 // Renames are reported only to servers matching the buffer's language.
389 fs.rename(
390 Path::new("/the-root/test2.rs"),
391 Path::new("/the-root/test3.rs"),
392 Default::default(),
393 )
394 .await
395 .unwrap();
396 assert_eq!(
397 fake_rust_server
398 .receive_notification::<lsp::notification::DidCloseTextDocument>()
399 .await
400 .text_document,
401 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
402 );
403 assert_eq!(
404 fake_rust_server
405 .receive_notification::<lsp::notification::DidOpenTextDocument>()
406 .await
407 .text_document,
408 lsp::TextDocumentItem {
409 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
410 version: 0,
411 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
412 language_id: Default::default()
413 },
414 );
415
416 rust_buffer2.update(cx, |buffer, cx| {
417 buffer.update_diagnostics(
418 LanguageServerId(0),
419 DiagnosticSet::from_sorted_entries(
420 vec![DiagnosticEntry {
421 diagnostic: Default::default(),
422 range: Anchor::MIN..Anchor::MAX,
423 }],
424 &buffer.snapshot(),
425 ),
426 cx,
427 );
428 assert_eq!(
429 buffer
430 .snapshot()
431 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
432 .count(),
433 1
434 );
435 });
436
437 // When the rename changes the extension of the file, the buffer gets closed on the old
438 // language server and gets opened on the new one.
439 fs.rename(
440 Path::new("/the-root/test3.rs"),
441 Path::new("/the-root/test3.json"),
442 Default::default(),
443 )
444 .await
445 .unwrap();
446 assert_eq!(
447 fake_rust_server
448 .receive_notification::<lsp::notification::DidCloseTextDocument>()
449 .await
450 .text_document,
451 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
452 );
453 assert_eq!(
454 fake_json_server
455 .receive_notification::<lsp::notification::DidOpenTextDocument>()
456 .await
457 .text_document,
458 lsp::TextDocumentItem {
459 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
460 version: 0,
461 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
462 language_id: Default::default()
463 },
464 );
465
466 // We clear the diagnostics, since the language has changed.
467 rust_buffer2.update(cx, |buffer, _| {
468 assert_eq!(
469 buffer
470 .snapshot()
471 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
472 .count(),
473 0
474 );
475 });
476
477 // The renamed file's version resets after changing language server.
478 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
479 assert_eq!(
480 fake_json_server
481 .receive_notification::<lsp::notification::DidChangeTextDocument>()
482 .await
483 .text_document,
484 lsp::VersionedTextDocumentIdentifier::new(
485 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
486 1
487 )
488 );
489
490 // Restart language servers
491 project.update(cx, |project, cx| {
492 project.restart_language_servers_for_buffers(
493 vec![rust_buffer.clone(), json_buffer.clone()],
494 cx,
495 );
496 });
497
498 let mut rust_shutdown_requests = fake_rust_server
499 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
500 let mut json_shutdown_requests = fake_json_server
501 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
502 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
503
504 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
505 let mut fake_json_server = fake_json_servers.next().await.unwrap();
506
507 // Ensure rust document is reopened in new rust language server
508 assert_eq!(
509 fake_rust_server
510 .receive_notification::<lsp::notification::DidOpenTextDocument>()
511 .await
512 .text_document,
513 lsp::TextDocumentItem {
514 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
515 version: 0,
516 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
517 language_id: Default::default()
518 }
519 );
520
521 // Ensure json documents are reopened in new json language server
522 assert_set_eq!(
523 [
524 fake_json_server
525 .receive_notification::<lsp::notification::DidOpenTextDocument>()
526 .await
527 .text_document,
528 fake_json_server
529 .receive_notification::<lsp::notification::DidOpenTextDocument>()
530 .await
531 .text_document,
532 ],
533 [
534 lsp::TextDocumentItem {
535 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
536 version: 0,
537 text: json_buffer.update(cx, |buffer, _| buffer.text()),
538 language_id: Default::default()
539 },
540 lsp::TextDocumentItem {
541 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
542 version: 0,
543 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
544 language_id: Default::default()
545 }
546 ]
547 );
548
549 // Close notifications are reported only to servers matching the buffer's language.
550 cx.update(|_| drop(json_buffer));
551 let close_message = lsp::DidCloseTextDocumentParams {
552 text_document: lsp::TextDocumentIdentifier::new(
553 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
554 ),
555 };
556 assert_eq!(
557 fake_json_server
558 .receive_notification::<lsp::notification::DidCloseTextDocument>()
559 .await,
560 close_message,
561 );
562}
563
564#[gpui::test]
565async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
566 init_test(cx);
567
568 let fs = FakeFs::new(cx.executor());
569 fs.insert_tree(
570 "/the-root",
571 json!({
572 ".gitignore": "target\n",
573 "src": {
574 "a.rs": "",
575 "b.rs": "",
576 },
577 "target": {
578 "x": {
579 "out": {
580 "x.rs": ""
581 }
582 },
583 "y": {
584 "out": {
585 "y.rs": "",
586 }
587 },
588 "z": {
589 "out": {
590 "z.rs": ""
591 }
592 }
593 }
594 }),
595 )
596 .await;
597
598 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
599 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
600 language_registry.add(rust_lang());
601 let mut fake_servers = language_registry.register_fake_lsp_adapter(
602 "Rust",
603 FakeLspAdapter {
604 name: "the-language-server",
605 ..Default::default()
606 },
607 );
608
609 cx.executor().run_until_parked();
610
611 // Start the language server by opening a buffer with a compatible file extension.
612 let _buffer = project
613 .update(cx, |project, cx| {
614 project.open_local_buffer("/the-root/src/a.rs", cx)
615 })
616 .await
617 .unwrap();
618
619 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
620 project.update(cx, |project, cx| {
621 let worktree = project.worktrees().next().unwrap();
622 assert_eq!(
623 worktree
624 .read(cx)
625 .snapshot()
626 .entries(true)
627 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
628 .collect::<Vec<_>>(),
629 &[
630 (Path::new(""), false),
631 (Path::new(".gitignore"), false),
632 (Path::new("src"), false),
633 (Path::new("src/a.rs"), false),
634 (Path::new("src/b.rs"), false),
635 (Path::new("target"), true),
636 ]
637 );
638 });
639
640 let prev_read_dir_count = fs.read_dir_call_count();
641
642 // Keep track of the FS events reported to the language server.
643 let fake_server = fake_servers.next().await.unwrap();
644 let file_changes = Arc::new(Mutex::new(Vec::new()));
645 fake_server
646 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
647 registrations: vec![lsp::Registration {
648 id: Default::default(),
649 method: "workspace/didChangeWatchedFiles".to_string(),
650 register_options: serde_json::to_value(
651 lsp::DidChangeWatchedFilesRegistrationOptions {
652 watchers: vec![
653 lsp::FileSystemWatcher {
654 glob_pattern: lsp::GlobPattern::String(
655 "/the-root/Cargo.toml".to_string(),
656 ),
657 kind: None,
658 },
659 lsp::FileSystemWatcher {
660 glob_pattern: lsp::GlobPattern::String(
661 "/the-root/src/*.{rs,c}".to_string(),
662 ),
663 kind: None,
664 },
665 lsp::FileSystemWatcher {
666 glob_pattern: lsp::GlobPattern::String(
667 "/the-root/target/y/**/*.rs".to_string(),
668 ),
669 kind: None,
670 },
671 ],
672 },
673 )
674 .ok(),
675 }],
676 })
677 .await
678 .unwrap();
679 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
680 let file_changes = file_changes.clone();
681 move |params, _| {
682 let mut file_changes = file_changes.lock();
683 file_changes.extend(params.changes);
684 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
685 }
686 });
687
688 cx.executor().run_until_parked();
689 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
690 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
691
692 // Now the language server has asked us to watch an ignored directory path,
693 // so we recursively load it.
694 project.update(cx, |project, cx| {
695 let worktree = project.worktrees().next().unwrap();
696 assert_eq!(
697 worktree
698 .read(cx)
699 .snapshot()
700 .entries(true)
701 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
702 .collect::<Vec<_>>(),
703 &[
704 (Path::new(""), false),
705 (Path::new(".gitignore"), false),
706 (Path::new("src"), false),
707 (Path::new("src/a.rs"), false),
708 (Path::new("src/b.rs"), false),
709 (Path::new("target"), true),
710 (Path::new("target/x"), true),
711 (Path::new("target/y"), true),
712 (Path::new("target/y/out"), true),
713 (Path::new("target/y/out/y.rs"), true),
714 (Path::new("target/z"), true),
715 ]
716 );
717 });
718
719 // Perform some file system mutations, two of which match the watched patterns,
720 // and one of which does not.
721 fs.create_file("/the-root/src/c.rs".as_ref(), Default::default())
722 .await
723 .unwrap();
724 fs.create_file("/the-root/src/d.txt".as_ref(), Default::default())
725 .await
726 .unwrap();
727 fs.remove_file("/the-root/src/b.rs".as_ref(), Default::default())
728 .await
729 .unwrap();
730 fs.create_file("/the-root/target/x/out/x2.rs".as_ref(), Default::default())
731 .await
732 .unwrap();
733 fs.create_file("/the-root/target/y/out/y2.rs".as_ref(), Default::default())
734 .await
735 .unwrap();
736
737 // The language server receives events for the FS mutations that match its watch patterns.
738 cx.executor().run_until_parked();
739 assert_eq!(
740 &*file_changes.lock(),
741 &[
742 lsp::FileEvent {
743 uri: lsp::Url::from_file_path("/the-root/src/b.rs").unwrap(),
744 typ: lsp::FileChangeType::DELETED,
745 },
746 lsp::FileEvent {
747 uri: lsp::Url::from_file_path("/the-root/src/c.rs").unwrap(),
748 typ: lsp::FileChangeType::CREATED,
749 },
750 lsp::FileEvent {
751 uri: lsp::Url::from_file_path("/the-root/target/y/out/y2.rs").unwrap(),
752 typ: lsp::FileChangeType::CREATED,
753 },
754 ]
755 );
756}
757
758#[gpui::test]
759async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
760 init_test(cx);
761
762 let fs = FakeFs::new(cx.executor());
763 fs.insert_tree(
764 "/dir",
765 json!({
766 "a.rs": "let a = 1;",
767 "b.rs": "let b = 2;"
768 }),
769 )
770 .await;
771
772 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
773
774 let buffer_a = project
775 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
776 .await
777 .unwrap();
778 let buffer_b = project
779 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
780 .await
781 .unwrap();
782
783 project.update(cx, |project, cx| {
784 project
785 .update_diagnostics(
786 LanguageServerId(0),
787 lsp::PublishDiagnosticsParams {
788 uri: Url::from_file_path("/dir/a.rs").unwrap(),
789 version: None,
790 diagnostics: vec![lsp::Diagnostic {
791 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
792 severity: Some(lsp::DiagnosticSeverity::ERROR),
793 message: "error 1".to_string(),
794 ..Default::default()
795 }],
796 },
797 &[],
798 cx,
799 )
800 .unwrap();
801 project
802 .update_diagnostics(
803 LanguageServerId(0),
804 lsp::PublishDiagnosticsParams {
805 uri: Url::from_file_path("/dir/b.rs").unwrap(),
806 version: None,
807 diagnostics: vec![lsp::Diagnostic {
808 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
809 severity: Some(lsp::DiagnosticSeverity::WARNING),
810 message: "error 2".to_string(),
811 ..Default::default()
812 }],
813 },
814 &[],
815 cx,
816 )
817 .unwrap();
818 });
819
820 buffer_a.update(cx, |buffer, _| {
821 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
822 assert_eq!(
823 chunks
824 .iter()
825 .map(|(s, d)| (s.as_str(), *d))
826 .collect::<Vec<_>>(),
827 &[
828 ("let ", None),
829 ("a", Some(DiagnosticSeverity::ERROR)),
830 (" = 1;", None),
831 ]
832 );
833 });
834 buffer_b.update(cx, |buffer, _| {
835 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
836 assert_eq!(
837 chunks
838 .iter()
839 .map(|(s, d)| (s.as_str(), *d))
840 .collect::<Vec<_>>(),
841 &[
842 ("let ", None),
843 ("b", Some(DiagnosticSeverity::WARNING)),
844 (" = 2;", None),
845 ]
846 );
847 });
848}
849
850#[gpui::test]
851async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
852 init_test(cx);
853
854 let fs = FakeFs::new(cx.executor());
855 fs.insert_tree(
856 "/root",
857 json!({
858 "dir": {
859 ".git": {
860 "HEAD": "ref: refs/heads/main",
861 },
862 ".gitignore": "b.rs",
863 "a.rs": "let a = 1;",
864 "b.rs": "let b = 2;",
865 },
866 "other.rs": "let b = c;"
867 }),
868 )
869 .await;
870
871 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
872 let (worktree, _) = project
873 .update(cx, |project, cx| {
874 project.find_or_create_local_worktree("/root/dir", true, cx)
875 })
876 .await
877 .unwrap();
878 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
879
880 let (worktree, _) = project
881 .update(cx, |project, cx| {
882 project.find_or_create_local_worktree("/root/other.rs", false, cx)
883 })
884 .await
885 .unwrap();
886 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
887
888 let server_id = LanguageServerId(0);
889 project.update(cx, |project, cx| {
890 project
891 .update_diagnostics(
892 server_id,
893 lsp::PublishDiagnosticsParams {
894 uri: Url::from_file_path("/root/dir/b.rs").unwrap(),
895 version: None,
896 diagnostics: vec![lsp::Diagnostic {
897 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
898 severity: Some(lsp::DiagnosticSeverity::ERROR),
899 message: "unused variable 'b'".to_string(),
900 ..Default::default()
901 }],
902 },
903 &[],
904 cx,
905 )
906 .unwrap();
907 project
908 .update_diagnostics(
909 server_id,
910 lsp::PublishDiagnosticsParams {
911 uri: Url::from_file_path("/root/other.rs").unwrap(),
912 version: None,
913 diagnostics: vec![lsp::Diagnostic {
914 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
915 severity: Some(lsp::DiagnosticSeverity::ERROR),
916 message: "unknown variable 'c'".to_string(),
917 ..Default::default()
918 }],
919 },
920 &[],
921 cx,
922 )
923 .unwrap();
924 });
925
926 let main_ignored_buffer = project
927 .update(cx, |project, cx| {
928 project.open_buffer((main_worktree_id, "b.rs"), cx)
929 })
930 .await
931 .unwrap();
932 main_ignored_buffer.update(cx, |buffer, _| {
933 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
934 assert_eq!(
935 chunks
936 .iter()
937 .map(|(s, d)| (s.as_str(), *d))
938 .collect::<Vec<_>>(),
939 &[
940 ("let ", None),
941 ("b", Some(DiagnosticSeverity::ERROR)),
942 (" = 2;", None),
943 ],
944 "Gigitnored buffers should still get in-buffer diagnostics",
945 );
946 });
947 let other_buffer = project
948 .update(cx, |project, cx| {
949 project.open_buffer((other_worktree_id, ""), cx)
950 })
951 .await
952 .unwrap();
953 other_buffer.update(cx, |buffer, _| {
954 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
955 assert_eq!(
956 chunks
957 .iter()
958 .map(|(s, d)| (s.as_str(), *d))
959 .collect::<Vec<_>>(),
960 &[
961 ("let b = ", None),
962 ("c", Some(DiagnosticSeverity::ERROR)),
963 (";", None),
964 ],
965 "Buffers from hidden projects should still get in-buffer diagnostics"
966 );
967 });
968
969 project.update(cx, |project, cx| {
970 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
971 assert_eq!(
972 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
973 vec![(
974 ProjectPath {
975 worktree_id: main_worktree_id,
976 path: Arc::from(Path::new("b.rs")),
977 },
978 server_id,
979 DiagnosticSummary {
980 error_count: 1,
981 warning_count: 0,
982 }
983 )]
984 );
985 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
986 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
987 });
988}
989
990#[gpui::test]
991async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
992 init_test(cx);
993
994 let progress_token = "the-progress-token";
995
996 let fs = FakeFs::new(cx.executor());
997 fs.insert_tree(
998 "/dir",
999 json!({
1000 "a.rs": "fn a() { A }",
1001 "b.rs": "const y: i32 = 1",
1002 }),
1003 )
1004 .await;
1005
1006 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1007 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1008
1009 language_registry.add(rust_lang());
1010 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1011 "Rust",
1012 FakeLspAdapter {
1013 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1014 disk_based_diagnostics_sources: vec!["disk".into()],
1015 ..Default::default()
1016 },
1017 );
1018
1019 let worktree_id = project.update(cx, |p, cx| p.worktrees().next().unwrap().read(cx).id());
1020
1021 // Cause worktree to start the fake language server
1022 let _buffer = project
1023 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1024 .await
1025 .unwrap();
1026
1027 let mut events = cx.events(&project);
1028
1029 let fake_server = fake_servers.next().await.unwrap();
1030 assert_eq!(
1031 events.next().await.unwrap(),
1032 Event::LanguageServerAdded(LanguageServerId(0)),
1033 );
1034
1035 fake_server
1036 .start_progress(format!("{}/0", progress_token))
1037 .await;
1038 assert_eq!(
1039 events.next().await.unwrap(),
1040 Event::DiskBasedDiagnosticsStarted {
1041 language_server_id: LanguageServerId(0),
1042 }
1043 );
1044
1045 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1046 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1047 version: None,
1048 diagnostics: vec![lsp::Diagnostic {
1049 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1050 severity: Some(lsp::DiagnosticSeverity::ERROR),
1051 message: "undefined variable 'A'".to_string(),
1052 ..Default::default()
1053 }],
1054 });
1055 assert_eq!(
1056 events.next().await.unwrap(),
1057 Event::DiagnosticsUpdated {
1058 language_server_id: LanguageServerId(0),
1059 path: (worktree_id, Path::new("a.rs")).into()
1060 }
1061 );
1062
1063 fake_server.end_progress(format!("{}/0", progress_token));
1064 assert_eq!(
1065 events.next().await.unwrap(),
1066 Event::DiskBasedDiagnosticsFinished {
1067 language_server_id: LanguageServerId(0)
1068 }
1069 );
1070
1071 let buffer = project
1072 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
1073 .await
1074 .unwrap();
1075
1076 buffer.update(cx, |buffer, _| {
1077 let snapshot = buffer.snapshot();
1078 let diagnostics = snapshot
1079 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1080 .collect::<Vec<_>>();
1081 assert_eq!(
1082 diagnostics,
1083 &[DiagnosticEntry {
1084 range: Point::new(0, 9)..Point::new(0, 10),
1085 diagnostic: Diagnostic {
1086 severity: lsp::DiagnosticSeverity::ERROR,
1087 message: "undefined variable 'A'".to_string(),
1088 group_id: 0,
1089 is_primary: true,
1090 ..Default::default()
1091 }
1092 }]
1093 )
1094 });
1095
1096 // Ensure publishing empty diagnostics twice only results in one update event.
1097 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1098 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1099 version: None,
1100 diagnostics: Default::default(),
1101 });
1102 assert_eq!(
1103 events.next().await.unwrap(),
1104 Event::DiagnosticsUpdated {
1105 language_server_id: LanguageServerId(0),
1106 path: (worktree_id, Path::new("a.rs")).into()
1107 }
1108 );
1109
1110 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1111 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1112 version: None,
1113 diagnostics: Default::default(),
1114 });
1115 cx.executor().run_until_parked();
1116 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1117}
1118
1119#[gpui::test]
1120async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1121 init_test(cx);
1122
1123 let progress_token = "the-progress-token";
1124
1125 let fs = FakeFs::new(cx.executor());
1126 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1127
1128 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1129
1130 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1131 language_registry.add(rust_lang());
1132 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1133 "Rust",
1134 FakeLspAdapter {
1135 name: "the-language-server",
1136 disk_based_diagnostics_sources: vec!["disk".into()],
1137 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1138 ..Default::default()
1139 },
1140 );
1141
1142 let buffer = project
1143 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1144 .await
1145 .unwrap();
1146
1147 // Simulate diagnostics starting to update.
1148 let fake_server = fake_servers.next().await.unwrap();
1149 fake_server.start_progress(progress_token).await;
1150
1151 // Restart the server before the diagnostics finish updating.
1152 project.update(cx, |project, cx| {
1153 project.restart_language_servers_for_buffers([buffer], cx);
1154 });
1155 let mut events = cx.events(&project);
1156
1157 // Simulate the newly started server sending more diagnostics.
1158 let fake_server = fake_servers.next().await.unwrap();
1159 assert_eq!(
1160 events.next().await.unwrap(),
1161 Event::LanguageServerAdded(LanguageServerId(1))
1162 );
1163 fake_server.start_progress(progress_token).await;
1164 assert_eq!(
1165 events.next().await.unwrap(),
1166 Event::DiskBasedDiagnosticsStarted {
1167 language_server_id: LanguageServerId(1)
1168 }
1169 );
1170 project.update(cx, |project, _| {
1171 assert_eq!(
1172 project
1173 .language_servers_running_disk_based_diagnostics()
1174 .collect::<Vec<_>>(),
1175 [LanguageServerId(1)]
1176 );
1177 });
1178
1179 // All diagnostics are considered done, despite the old server's diagnostic
1180 // task never completing.
1181 fake_server.end_progress(progress_token);
1182 assert_eq!(
1183 events.next().await.unwrap(),
1184 Event::DiskBasedDiagnosticsFinished {
1185 language_server_id: LanguageServerId(1)
1186 }
1187 );
1188 project.update(cx, |project, _| {
1189 assert_eq!(
1190 project
1191 .language_servers_running_disk_based_diagnostics()
1192 .collect::<Vec<_>>(),
1193 [LanguageServerId(0); 0]
1194 );
1195 });
1196}
1197
1198#[gpui::test]
1199async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1200 init_test(cx);
1201
1202 let fs = FakeFs::new(cx.executor());
1203 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1204
1205 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1206
1207 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1208 language_registry.add(rust_lang());
1209 let mut fake_servers =
1210 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
1211
1212 let buffer = project
1213 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1214 .await
1215 .unwrap();
1216
1217 // Publish diagnostics
1218 let fake_server = fake_servers.next().await.unwrap();
1219 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1220 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1221 version: None,
1222 diagnostics: vec![lsp::Diagnostic {
1223 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1224 severity: Some(lsp::DiagnosticSeverity::ERROR),
1225 message: "the message".to_string(),
1226 ..Default::default()
1227 }],
1228 });
1229
1230 cx.executor().run_until_parked();
1231 buffer.update(cx, |buffer, _| {
1232 assert_eq!(
1233 buffer
1234 .snapshot()
1235 .diagnostics_in_range::<_, usize>(0..1, false)
1236 .map(|entry| entry.diagnostic.message.clone())
1237 .collect::<Vec<_>>(),
1238 ["the message".to_string()]
1239 );
1240 });
1241 project.update(cx, |project, cx| {
1242 assert_eq!(
1243 project.diagnostic_summary(false, cx),
1244 DiagnosticSummary {
1245 error_count: 1,
1246 warning_count: 0,
1247 }
1248 );
1249 });
1250
1251 project.update(cx, |project, cx| {
1252 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1253 });
1254
1255 // The diagnostics are cleared.
1256 cx.executor().run_until_parked();
1257 buffer.update(cx, |buffer, _| {
1258 assert_eq!(
1259 buffer
1260 .snapshot()
1261 .diagnostics_in_range::<_, usize>(0..1, false)
1262 .map(|entry| entry.diagnostic.message.clone())
1263 .collect::<Vec<_>>(),
1264 Vec::<String>::new(),
1265 );
1266 });
1267 project.update(cx, |project, cx| {
1268 assert_eq!(
1269 project.diagnostic_summary(false, cx),
1270 DiagnosticSummary {
1271 error_count: 0,
1272 warning_count: 0,
1273 }
1274 );
1275 });
1276}
1277
1278#[gpui::test]
1279async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1280 init_test(cx);
1281
1282 let fs = FakeFs::new(cx.executor());
1283 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1284
1285 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1286 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1287
1288 language_registry.add(rust_lang());
1289 let mut fake_servers =
1290 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
1291
1292 let buffer = project
1293 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1294 .await
1295 .unwrap();
1296
1297 // Before restarting the server, report diagnostics with an unknown buffer version.
1298 let fake_server = fake_servers.next().await.unwrap();
1299 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1300 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1301 version: Some(10000),
1302 diagnostics: Vec::new(),
1303 });
1304 cx.executor().run_until_parked();
1305
1306 project.update(cx, |project, cx| {
1307 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1308 });
1309 let mut fake_server = fake_servers.next().await.unwrap();
1310 let notification = fake_server
1311 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1312 .await
1313 .text_document;
1314 assert_eq!(notification.version, 0);
1315}
1316
1317#[gpui::test]
1318async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1319 init_test(cx);
1320
1321 let fs = FakeFs::new(cx.executor());
1322 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1323 .await;
1324
1325 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1326 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1327
1328 let mut fake_rust_servers = language_registry.register_fake_lsp_adapter(
1329 "Rust",
1330 FakeLspAdapter {
1331 name: "rust-lsp",
1332 ..Default::default()
1333 },
1334 );
1335 let mut fake_js_servers = language_registry.register_fake_lsp_adapter(
1336 "JavaScript",
1337 FakeLspAdapter {
1338 name: "js-lsp",
1339 ..Default::default()
1340 },
1341 );
1342 language_registry.add(rust_lang());
1343 language_registry.add(js_lang());
1344
1345 let _rs_buffer = project
1346 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1347 .await
1348 .unwrap();
1349 let _js_buffer = project
1350 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
1351 .await
1352 .unwrap();
1353
1354 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1355 assert_eq!(
1356 fake_rust_server_1
1357 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1358 .await
1359 .text_document
1360 .uri
1361 .as_str(),
1362 "file:///dir/a.rs"
1363 );
1364
1365 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1366 assert_eq!(
1367 fake_js_server
1368 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1369 .await
1370 .text_document
1371 .uri
1372 .as_str(),
1373 "file:///dir/b.js"
1374 );
1375
1376 // Disable Rust language server, ensuring only that server gets stopped.
1377 cx.update(|cx| {
1378 cx.update_global(|settings: &mut SettingsStore, cx| {
1379 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1380 settings.languages.insert(
1381 Arc::from("Rust"),
1382 LanguageSettingsContent {
1383 enable_language_server: Some(false),
1384 ..Default::default()
1385 },
1386 );
1387 });
1388 })
1389 });
1390 fake_rust_server_1
1391 .receive_notification::<lsp::notification::Exit>()
1392 .await;
1393
1394 // Enable Rust and disable JavaScript language servers, ensuring that the
1395 // former gets started again and that the latter stops.
1396 cx.update(|cx| {
1397 cx.update_global(|settings: &mut SettingsStore, cx| {
1398 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1399 settings.languages.insert(
1400 Arc::from("Rust"),
1401 LanguageSettingsContent {
1402 enable_language_server: Some(true),
1403 ..Default::default()
1404 },
1405 );
1406 settings.languages.insert(
1407 Arc::from("JavaScript"),
1408 LanguageSettingsContent {
1409 enable_language_server: Some(false),
1410 ..Default::default()
1411 },
1412 );
1413 });
1414 })
1415 });
1416 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1417 assert_eq!(
1418 fake_rust_server_2
1419 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1420 .await
1421 .text_document
1422 .uri
1423 .as_str(),
1424 "file:///dir/a.rs"
1425 );
1426 fake_js_server
1427 .receive_notification::<lsp::notification::Exit>()
1428 .await;
1429}
1430
1431#[gpui::test(iterations = 3)]
1432async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1433 init_test(cx);
1434
1435 let text = "
1436 fn a() { A }
1437 fn b() { BB }
1438 fn c() { CCC }
1439 "
1440 .unindent();
1441
1442 let fs = FakeFs::new(cx.executor());
1443 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1444
1445 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1446 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1447
1448 language_registry.add(rust_lang());
1449 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1450 "Rust",
1451 FakeLspAdapter {
1452 disk_based_diagnostics_sources: vec!["disk".into()],
1453 ..Default::default()
1454 },
1455 );
1456
1457 let buffer = project
1458 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1459 .await
1460 .unwrap();
1461
1462 let mut fake_server = fake_servers.next().await.unwrap();
1463 let open_notification = fake_server
1464 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1465 .await;
1466
1467 // Edit the buffer, moving the content down
1468 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1469 let change_notification_1 = fake_server
1470 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1471 .await;
1472 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1473
1474 // Report some diagnostics for the initial version of the buffer
1475 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1476 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1477 version: Some(open_notification.text_document.version),
1478 diagnostics: vec![
1479 lsp::Diagnostic {
1480 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1481 severity: Some(DiagnosticSeverity::ERROR),
1482 message: "undefined variable 'A'".to_string(),
1483 source: Some("disk".to_string()),
1484 ..Default::default()
1485 },
1486 lsp::Diagnostic {
1487 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1488 severity: Some(DiagnosticSeverity::ERROR),
1489 message: "undefined variable 'BB'".to_string(),
1490 source: Some("disk".to_string()),
1491 ..Default::default()
1492 },
1493 lsp::Diagnostic {
1494 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1495 severity: Some(DiagnosticSeverity::ERROR),
1496 source: Some("disk".to_string()),
1497 message: "undefined variable 'CCC'".to_string(),
1498 ..Default::default()
1499 },
1500 ],
1501 });
1502
1503 // The diagnostics have moved down since they were created.
1504 cx.executor().run_until_parked();
1505 buffer.update(cx, |buffer, _| {
1506 assert_eq!(
1507 buffer
1508 .snapshot()
1509 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1510 .collect::<Vec<_>>(),
1511 &[
1512 DiagnosticEntry {
1513 range: Point::new(3, 9)..Point::new(3, 11),
1514 diagnostic: Diagnostic {
1515 source: Some("disk".into()),
1516 severity: DiagnosticSeverity::ERROR,
1517 message: "undefined variable 'BB'".to_string(),
1518 is_disk_based: true,
1519 group_id: 1,
1520 is_primary: true,
1521 ..Default::default()
1522 },
1523 },
1524 DiagnosticEntry {
1525 range: Point::new(4, 9)..Point::new(4, 12),
1526 diagnostic: Diagnostic {
1527 source: Some("disk".into()),
1528 severity: DiagnosticSeverity::ERROR,
1529 message: "undefined variable 'CCC'".to_string(),
1530 is_disk_based: true,
1531 group_id: 2,
1532 is_primary: true,
1533 ..Default::default()
1534 }
1535 }
1536 ]
1537 );
1538 assert_eq!(
1539 chunks_with_diagnostics(buffer, 0..buffer.len()),
1540 [
1541 ("\n\nfn a() { ".to_string(), None),
1542 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1543 (" }\nfn b() { ".to_string(), None),
1544 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1545 (" }\nfn c() { ".to_string(), None),
1546 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1547 (" }\n".to_string(), None),
1548 ]
1549 );
1550 assert_eq!(
1551 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1552 [
1553 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1554 (" }\nfn c() { ".to_string(), None),
1555 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1556 ]
1557 );
1558 });
1559
1560 // Ensure overlapping diagnostics are highlighted correctly.
1561 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1562 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1563 version: Some(open_notification.text_document.version),
1564 diagnostics: vec![
1565 lsp::Diagnostic {
1566 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1567 severity: Some(DiagnosticSeverity::ERROR),
1568 message: "undefined variable 'A'".to_string(),
1569 source: Some("disk".to_string()),
1570 ..Default::default()
1571 },
1572 lsp::Diagnostic {
1573 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1574 severity: Some(DiagnosticSeverity::WARNING),
1575 message: "unreachable statement".to_string(),
1576 source: Some("disk".to_string()),
1577 ..Default::default()
1578 },
1579 ],
1580 });
1581
1582 cx.executor().run_until_parked();
1583 buffer.update(cx, |buffer, _| {
1584 assert_eq!(
1585 buffer
1586 .snapshot()
1587 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1588 .collect::<Vec<_>>(),
1589 &[
1590 DiagnosticEntry {
1591 range: Point::new(2, 9)..Point::new(2, 12),
1592 diagnostic: Diagnostic {
1593 source: Some("disk".into()),
1594 severity: DiagnosticSeverity::WARNING,
1595 message: "unreachable statement".to_string(),
1596 is_disk_based: true,
1597 group_id: 4,
1598 is_primary: true,
1599 ..Default::default()
1600 }
1601 },
1602 DiagnosticEntry {
1603 range: Point::new(2, 9)..Point::new(2, 10),
1604 diagnostic: Diagnostic {
1605 source: Some("disk".into()),
1606 severity: DiagnosticSeverity::ERROR,
1607 message: "undefined variable 'A'".to_string(),
1608 is_disk_based: true,
1609 group_id: 3,
1610 is_primary: true,
1611 ..Default::default()
1612 },
1613 }
1614 ]
1615 );
1616 assert_eq!(
1617 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1618 [
1619 ("fn a() { ".to_string(), None),
1620 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1621 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1622 ("\n".to_string(), None),
1623 ]
1624 );
1625 assert_eq!(
1626 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1627 [
1628 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1629 ("\n".to_string(), None),
1630 ]
1631 );
1632 });
1633
1634 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1635 // changes since the last save.
1636 buffer.update(cx, |buffer, cx| {
1637 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1638 buffer.edit(
1639 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1640 None,
1641 cx,
1642 );
1643 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1644 });
1645 let change_notification_2 = fake_server
1646 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1647 .await;
1648 assert!(
1649 change_notification_2.text_document.version > change_notification_1.text_document.version
1650 );
1651
1652 // Handle out-of-order diagnostics
1653 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1654 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1655 version: Some(change_notification_2.text_document.version),
1656 diagnostics: vec![
1657 lsp::Diagnostic {
1658 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1659 severity: Some(DiagnosticSeverity::ERROR),
1660 message: "undefined variable 'BB'".to_string(),
1661 source: Some("disk".to_string()),
1662 ..Default::default()
1663 },
1664 lsp::Diagnostic {
1665 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1666 severity: Some(DiagnosticSeverity::WARNING),
1667 message: "undefined variable 'A'".to_string(),
1668 source: Some("disk".to_string()),
1669 ..Default::default()
1670 },
1671 ],
1672 });
1673
1674 cx.executor().run_until_parked();
1675 buffer.update(cx, |buffer, _| {
1676 assert_eq!(
1677 buffer
1678 .snapshot()
1679 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1680 .collect::<Vec<_>>(),
1681 &[
1682 DiagnosticEntry {
1683 range: Point::new(2, 21)..Point::new(2, 22),
1684 diagnostic: Diagnostic {
1685 source: Some("disk".into()),
1686 severity: DiagnosticSeverity::WARNING,
1687 message: "undefined variable 'A'".to_string(),
1688 is_disk_based: true,
1689 group_id: 6,
1690 is_primary: true,
1691 ..Default::default()
1692 }
1693 },
1694 DiagnosticEntry {
1695 range: Point::new(3, 9)..Point::new(3, 14),
1696 diagnostic: Diagnostic {
1697 source: Some("disk".into()),
1698 severity: DiagnosticSeverity::ERROR,
1699 message: "undefined variable 'BB'".to_string(),
1700 is_disk_based: true,
1701 group_id: 5,
1702 is_primary: true,
1703 ..Default::default()
1704 },
1705 }
1706 ]
1707 );
1708 });
1709}
1710
1711#[gpui::test]
1712async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1713 init_test(cx);
1714
1715 let text = concat!(
1716 "let one = ;\n", //
1717 "let two = \n",
1718 "let three = 3;\n",
1719 );
1720
1721 let fs = FakeFs::new(cx.executor());
1722 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1723
1724 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1725 let buffer = project
1726 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1727 .await
1728 .unwrap();
1729
1730 project.update(cx, |project, cx| {
1731 project
1732 .update_buffer_diagnostics(
1733 &buffer,
1734 LanguageServerId(0),
1735 None,
1736 vec![
1737 DiagnosticEntry {
1738 range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 10)),
1739 diagnostic: Diagnostic {
1740 severity: DiagnosticSeverity::ERROR,
1741 message: "syntax error 1".to_string(),
1742 ..Default::default()
1743 },
1744 },
1745 DiagnosticEntry {
1746 range: Unclipped(PointUtf16::new(1, 10))..Unclipped(PointUtf16::new(1, 10)),
1747 diagnostic: Diagnostic {
1748 severity: DiagnosticSeverity::ERROR,
1749 message: "syntax error 2".to_string(),
1750 ..Default::default()
1751 },
1752 },
1753 ],
1754 cx,
1755 )
1756 .unwrap();
1757 });
1758
1759 // An empty range is extended forward to include the following character.
1760 // At the end of a line, an empty range is extended backward to include
1761 // the preceding character.
1762 buffer.update(cx, |buffer, _| {
1763 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1764 assert_eq!(
1765 chunks
1766 .iter()
1767 .map(|(s, d)| (s.as_str(), *d))
1768 .collect::<Vec<_>>(),
1769 &[
1770 ("let one = ", None),
1771 (";", Some(DiagnosticSeverity::ERROR)),
1772 ("\nlet two =", None),
1773 (" ", Some(DiagnosticSeverity::ERROR)),
1774 ("\nlet three = 3;\n", None)
1775 ]
1776 );
1777 });
1778}
1779
1780#[gpui::test]
1781async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
1782 init_test(cx);
1783
1784 let fs = FakeFs::new(cx.executor());
1785 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
1786 .await;
1787
1788 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1789
1790 project.update(cx, |project, cx| {
1791 project
1792 .update_diagnostic_entries(
1793 LanguageServerId(0),
1794 Path::new("/dir/a.rs").to_owned(),
1795 None,
1796 vec![DiagnosticEntry {
1797 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1798 diagnostic: Diagnostic {
1799 severity: DiagnosticSeverity::ERROR,
1800 is_primary: true,
1801 message: "syntax error a1".to_string(),
1802 ..Default::default()
1803 },
1804 }],
1805 cx,
1806 )
1807 .unwrap();
1808 project
1809 .update_diagnostic_entries(
1810 LanguageServerId(1),
1811 Path::new("/dir/a.rs").to_owned(),
1812 None,
1813 vec![DiagnosticEntry {
1814 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1815 diagnostic: Diagnostic {
1816 severity: DiagnosticSeverity::ERROR,
1817 is_primary: true,
1818 message: "syntax error b1".to_string(),
1819 ..Default::default()
1820 },
1821 }],
1822 cx,
1823 )
1824 .unwrap();
1825
1826 assert_eq!(
1827 project.diagnostic_summary(false, cx),
1828 DiagnosticSummary {
1829 error_count: 2,
1830 warning_count: 0,
1831 }
1832 );
1833 });
1834}
1835
1836#[gpui::test]
1837async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
1838 init_test(cx);
1839
1840 let text = "
1841 fn a() {
1842 f1();
1843 }
1844 fn b() {
1845 f2();
1846 }
1847 fn c() {
1848 f3();
1849 }
1850 "
1851 .unindent();
1852
1853 let fs = FakeFs::new(cx.executor());
1854 fs.insert_tree(
1855 "/dir",
1856 json!({
1857 "a.rs": text.clone(),
1858 }),
1859 )
1860 .await;
1861
1862 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1863
1864 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1865 language_registry.add(rust_lang());
1866 let mut fake_servers =
1867 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
1868
1869 let buffer = project
1870 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1871 .await
1872 .unwrap();
1873
1874 let mut fake_server = fake_servers.next().await.unwrap();
1875 let lsp_document_version = fake_server
1876 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1877 .await
1878 .text_document
1879 .version;
1880
1881 // Simulate editing the buffer after the language server computes some edits.
1882 buffer.update(cx, |buffer, cx| {
1883 buffer.edit(
1884 [(
1885 Point::new(0, 0)..Point::new(0, 0),
1886 "// above first function\n",
1887 )],
1888 None,
1889 cx,
1890 );
1891 buffer.edit(
1892 [(
1893 Point::new(2, 0)..Point::new(2, 0),
1894 " // inside first function\n",
1895 )],
1896 None,
1897 cx,
1898 );
1899 buffer.edit(
1900 [(
1901 Point::new(6, 4)..Point::new(6, 4),
1902 "// inside second function ",
1903 )],
1904 None,
1905 cx,
1906 );
1907
1908 assert_eq!(
1909 buffer.text(),
1910 "
1911 // above first function
1912 fn a() {
1913 // inside first function
1914 f1();
1915 }
1916 fn b() {
1917 // inside second function f2();
1918 }
1919 fn c() {
1920 f3();
1921 }
1922 "
1923 .unindent()
1924 );
1925 });
1926
1927 let edits = project
1928 .update(cx, |project, cx| {
1929 project.edits_from_lsp(
1930 &buffer,
1931 vec![
1932 // replace body of first function
1933 lsp::TextEdit {
1934 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
1935 new_text: "
1936 fn a() {
1937 f10();
1938 }
1939 "
1940 .unindent(),
1941 },
1942 // edit inside second function
1943 lsp::TextEdit {
1944 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
1945 new_text: "00".into(),
1946 },
1947 // edit inside third function via two distinct edits
1948 lsp::TextEdit {
1949 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
1950 new_text: "4000".into(),
1951 },
1952 lsp::TextEdit {
1953 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
1954 new_text: "".into(),
1955 },
1956 ],
1957 LanguageServerId(0),
1958 Some(lsp_document_version),
1959 cx,
1960 )
1961 })
1962 .await
1963 .unwrap();
1964
1965 buffer.update(cx, |buffer, cx| {
1966 for (range, new_text) in edits {
1967 buffer.edit([(range, new_text)], None, cx);
1968 }
1969 assert_eq!(
1970 buffer.text(),
1971 "
1972 // above first function
1973 fn a() {
1974 // inside first function
1975 f10();
1976 }
1977 fn b() {
1978 // inside second function f200();
1979 }
1980 fn c() {
1981 f4000();
1982 }
1983 "
1984 .unindent()
1985 );
1986 });
1987}
1988
1989#[gpui::test]
1990async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
1991 init_test(cx);
1992
1993 let text = "
1994 use a::b;
1995 use a::c;
1996
1997 fn f() {
1998 b();
1999 c();
2000 }
2001 "
2002 .unindent();
2003
2004 let fs = FakeFs::new(cx.executor());
2005 fs.insert_tree(
2006 "/dir",
2007 json!({
2008 "a.rs": text.clone(),
2009 }),
2010 )
2011 .await;
2012
2013 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2014 let buffer = project
2015 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2016 .await
2017 .unwrap();
2018
2019 // Simulate the language server sending us a small edit in the form of a very large diff.
2020 // Rust-analyzer does this when performing a merge-imports code action.
2021 let edits = project
2022 .update(cx, |project, cx| {
2023 project.edits_from_lsp(
2024 &buffer,
2025 [
2026 // Replace the first use statement without editing the semicolon.
2027 lsp::TextEdit {
2028 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2029 new_text: "a::{b, c}".into(),
2030 },
2031 // Reinsert the remainder of the file between the semicolon and the final
2032 // newline of the file.
2033 lsp::TextEdit {
2034 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2035 new_text: "\n\n".into(),
2036 },
2037 lsp::TextEdit {
2038 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2039 new_text: "
2040 fn f() {
2041 b();
2042 c();
2043 }"
2044 .unindent(),
2045 },
2046 // Delete everything after the first newline of the file.
2047 lsp::TextEdit {
2048 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2049 new_text: "".into(),
2050 },
2051 ],
2052 LanguageServerId(0),
2053 None,
2054 cx,
2055 )
2056 })
2057 .await
2058 .unwrap();
2059
2060 buffer.update(cx, |buffer, cx| {
2061 let edits = edits
2062 .into_iter()
2063 .map(|(range, text)| {
2064 (
2065 range.start.to_point(buffer)..range.end.to_point(buffer),
2066 text,
2067 )
2068 })
2069 .collect::<Vec<_>>();
2070
2071 assert_eq!(
2072 edits,
2073 [
2074 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2075 (Point::new(1, 0)..Point::new(2, 0), "".into())
2076 ]
2077 );
2078
2079 for (range, new_text) in edits {
2080 buffer.edit([(range, new_text)], None, cx);
2081 }
2082 assert_eq!(
2083 buffer.text(),
2084 "
2085 use a::{b, c};
2086
2087 fn f() {
2088 b();
2089 c();
2090 }
2091 "
2092 .unindent()
2093 );
2094 });
2095}
2096
2097#[gpui::test]
2098async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2099 init_test(cx);
2100
2101 let text = "
2102 use a::b;
2103 use a::c;
2104
2105 fn f() {
2106 b();
2107 c();
2108 }
2109 "
2110 .unindent();
2111
2112 let fs = FakeFs::new(cx.executor());
2113 fs.insert_tree(
2114 "/dir",
2115 json!({
2116 "a.rs": text.clone(),
2117 }),
2118 )
2119 .await;
2120
2121 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2122 let buffer = project
2123 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2124 .await
2125 .unwrap();
2126
2127 // Simulate the language server sending us edits in a non-ordered fashion,
2128 // with ranges sometimes being inverted or pointing to invalid locations.
2129 let edits = project
2130 .update(cx, |project, cx| {
2131 project.edits_from_lsp(
2132 &buffer,
2133 [
2134 lsp::TextEdit {
2135 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2136 new_text: "\n\n".into(),
2137 },
2138 lsp::TextEdit {
2139 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2140 new_text: "a::{b, c}".into(),
2141 },
2142 lsp::TextEdit {
2143 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2144 new_text: "".into(),
2145 },
2146 lsp::TextEdit {
2147 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2148 new_text: "
2149 fn f() {
2150 b();
2151 c();
2152 }"
2153 .unindent(),
2154 },
2155 ],
2156 LanguageServerId(0),
2157 None,
2158 cx,
2159 )
2160 })
2161 .await
2162 .unwrap();
2163
2164 buffer.update(cx, |buffer, cx| {
2165 let edits = edits
2166 .into_iter()
2167 .map(|(range, text)| {
2168 (
2169 range.start.to_point(buffer)..range.end.to_point(buffer),
2170 text,
2171 )
2172 })
2173 .collect::<Vec<_>>();
2174
2175 assert_eq!(
2176 edits,
2177 [
2178 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2179 (Point::new(1, 0)..Point::new(2, 0), "".into())
2180 ]
2181 );
2182
2183 for (range, new_text) in edits {
2184 buffer.edit([(range, new_text)], None, cx);
2185 }
2186 assert_eq!(
2187 buffer.text(),
2188 "
2189 use a::{b, c};
2190
2191 fn f() {
2192 b();
2193 c();
2194 }
2195 "
2196 .unindent()
2197 );
2198 });
2199}
2200
2201fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2202 buffer: &Buffer,
2203 range: Range<T>,
2204) -> Vec<(String, Option<DiagnosticSeverity>)> {
2205 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2206 for chunk in buffer.snapshot().chunks(range, true) {
2207 if chunks.last().map_or(false, |prev_chunk| {
2208 prev_chunk.1 == chunk.diagnostic_severity
2209 }) {
2210 chunks.last_mut().unwrap().0.push_str(chunk.text);
2211 } else {
2212 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2213 }
2214 }
2215 chunks
2216}
2217
2218#[gpui::test(iterations = 10)]
2219async fn test_definition(cx: &mut gpui::TestAppContext) {
2220 init_test(cx);
2221
2222 let fs = FakeFs::new(cx.executor());
2223 fs.insert_tree(
2224 "/dir",
2225 json!({
2226 "a.rs": "const fn a() { A }",
2227 "b.rs": "const y: i32 = crate::a()",
2228 }),
2229 )
2230 .await;
2231
2232 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2233
2234 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2235 language_registry.add(rust_lang());
2236 let mut fake_servers =
2237 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
2238
2239 let buffer = project
2240 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
2241 .await
2242 .unwrap();
2243
2244 let fake_server = fake_servers.next().await.unwrap();
2245 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2246 let params = params.text_document_position_params;
2247 assert_eq!(
2248 params.text_document.uri.to_file_path().unwrap(),
2249 Path::new("/dir/b.rs"),
2250 );
2251 assert_eq!(params.position, lsp::Position::new(0, 22));
2252
2253 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2254 lsp::Location::new(
2255 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
2256 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2257 ),
2258 )))
2259 });
2260
2261 let mut definitions = project
2262 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2263 .await
2264 .unwrap();
2265
2266 // Assert no new language server started
2267 cx.executor().run_until_parked();
2268 assert!(fake_servers.try_next().is_err());
2269
2270 assert_eq!(definitions.len(), 1);
2271 let definition = definitions.pop().unwrap();
2272 cx.update(|cx| {
2273 let target_buffer = definition.target.buffer.read(cx);
2274 assert_eq!(
2275 target_buffer
2276 .file()
2277 .unwrap()
2278 .as_local()
2279 .unwrap()
2280 .abs_path(cx),
2281 Path::new("/dir/a.rs"),
2282 );
2283 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2284 assert_eq!(
2285 list_worktrees(&project, cx),
2286 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
2287 );
2288
2289 drop(definition);
2290 });
2291 cx.update(|cx| {
2292 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2293 });
2294
2295 fn list_worktrees<'a>(
2296 project: &'a Model<Project>,
2297 cx: &'a AppContext,
2298 ) -> Vec<(&'a Path, bool)> {
2299 project
2300 .read(cx)
2301 .worktrees()
2302 .map(|worktree| {
2303 let worktree = worktree.read(cx);
2304 (
2305 worktree.as_local().unwrap().abs_path().as_ref(),
2306 worktree.is_visible(),
2307 )
2308 })
2309 .collect::<Vec<_>>()
2310 }
2311}
2312
2313#[gpui::test]
2314async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2315 init_test(cx);
2316
2317 let fs = FakeFs::new(cx.executor());
2318 fs.insert_tree(
2319 "/dir",
2320 json!({
2321 "a.ts": "",
2322 }),
2323 )
2324 .await;
2325
2326 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2327
2328 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2329 language_registry.add(typescript_lang());
2330 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2331 "TypeScript",
2332 FakeLspAdapter {
2333 capabilities: lsp::ServerCapabilities {
2334 completion_provider: Some(lsp::CompletionOptions {
2335 trigger_characters: Some(vec![":".to_string()]),
2336 ..Default::default()
2337 }),
2338 ..Default::default()
2339 },
2340 ..Default::default()
2341 },
2342 );
2343
2344 let buffer = project
2345 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2346 .await
2347 .unwrap();
2348
2349 let fake_server = fake_language_servers.next().await.unwrap();
2350
2351 let text = "let a = b.fqn";
2352 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2353 let completions = project.update(cx, |project, cx| {
2354 project.completions(&buffer, text.len(), cx)
2355 });
2356
2357 fake_server
2358 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2359 Ok(Some(lsp::CompletionResponse::Array(vec![
2360 lsp::CompletionItem {
2361 label: "fullyQualifiedName?".into(),
2362 insert_text: Some("fullyQualifiedName".into()),
2363 ..Default::default()
2364 },
2365 ])))
2366 })
2367 .next()
2368 .await;
2369 let completions = completions.await.unwrap();
2370 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2371 assert_eq!(completions.len(), 1);
2372 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2373 assert_eq!(
2374 completions[0].old_range.to_offset(&snapshot),
2375 text.len() - 3..text.len()
2376 );
2377
2378 let text = "let a = \"atoms/cmp\"";
2379 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2380 let completions = project.update(cx, |project, cx| {
2381 project.completions(&buffer, text.len() - 1, cx)
2382 });
2383
2384 fake_server
2385 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2386 Ok(Some(lsp::CompletionResponse::Array(vec![
2387 lsp::CompletionItem {
2388 label: "component".into(),
2389 ..Default::default()
2390 },
2391 ])))
2392 })
2393 .next()
2394 .await;
2395 let completions = completions.await.unwrap();
2396 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2397 assert_eq!(completions.len(), 1);
2398 assert_eq!(completions[0].new_text, "component");
2399 assert_eq!(
2400 completions[0].old_range.to_offset(&snapshot),
2401 text.len() - 4..text.len() - 1
2402 );
2403}
2404
2405#[gpui::test]
2406async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2407 init_test(cx);
2408
2409 let fs = FakeFs::new(cx.executor());
2410 fs.insert_tree(
2411 "/dir",
2412 json!({
2413 "a.ts": "",
2414 }),
2415 )
2416 .await;
2417
2418 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2419
2420 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2421 language_registry.add(typescript_lang());
2422 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2423 "TypeScript",
2424 FakeLspAdapter {
2425 capabilities: lsp::ServerCapabilities {
2426 completion_provider: Some(lsp::CompletionOptions {
2427 trigger_characters: Some(vec![":".to_string()]),
2428 ..Default::default()
2429 }),
2430 ..Default::default()
2431 },
2432 ..Default::default()
2433 },
2434 );
2435
2436 let buffer = project
2437 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2438 .await
2439 .unwrap();
2440
2441 let fake_server = fake_language_servers.next().await.unwrap();
2442
2443 let text = "let a = b.fqn";
2444 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2445 let completions = project.update(cx, |project, cx| {
2446 project.completions(&buffer, text.len(), cx)
2447 });
2448
2449 fake_server
2450 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2451 Ok(Some(lsp::CompletionResponse::Array(vec![
2452 lsp::CompletionItem {
2453 label: "fullyQualifiedName?".into(),
2454 insert_text: Some("fully\rQualified\r\nName".into()),
2455 ..Default::default()
2456 },
2457 ])))
2458 })
2459 .next()
2460 .await;
2461 let completions = completions.await.unwrap();
2462 assert_eq!(completions.len(), 1);
2463 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2464}
2465
2466#[gpui::test(iterations = 10)]
2467async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2468 init_test(cx);
2469
2470 let fs = FakeFs::new(cx.executor());
2471 fs.insert_tree(
2472 "/dir",
2473 json!({
2474 "a.ts": "a",
2475 }),
2476 )
2477 .await;
2478
2479 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2480
2481 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2482 language_registry.add(typescript_lang());
2483 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2484 "TypeScript",
2485 FakeLspAdapter {
2486 capabilities: lsp::ServerCapabilities {
2487 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
2488 lsp::CodeActionOptions {
2489 resolve_provider: Some(true),
2490 ..lsp::CodeActionOptions::default()
2491 },
2492 )),
2493 ..lsp::ServerCapabilities::default()
2494 },
2495 ..FakeLspAdapter::default()
2496 },
2497 );
2498
2499 let buffer = project
2500 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2501 .await
2502 .unwrap();
2503
2504 let fake_server = fake_language_servers.next().await.unwrap();
2505
2506 // Language server returns code actions that contain commands, and not edits.
2507 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
2508 fake_server
2509 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2510 Ok(Some(vec![
2511 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2512 title: "The code action".into(),
2513 data: Some(serde_json::json!({
2514 "command": "_the/command",
2515 })),
2516 ..lsp::CodeAction::default()
2517 }),
2518 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2519 title: "two".into(),
2520 ..lsp::CodeAction::default()
2521 }),
2522 ]))
2523 })
2524 .next()
2525 .await;
2526
2527 let action = actions.await[0].clone();
2528 let apply = project.update(cx, |project, cx| {
2529 project.apply_code_action(buffer.clone(), action, true, cx)
2530 });
2531
2532 // Resolving the code action does not populate its edits. In absence of
2533 // edits, we must execute the given command.
2534 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2535 |mut action, _| async move {
2536 if action.data.is_some() {
2537 action.command = Some(lsp::Command {
2538 title: "The command".into(),
2539 command: "_the/command".into(),
2540 arguments: Some(vec![json!("the-argument")]),
2541 });
2542 }
2543 Ok(action)
2544 },
2545 );
2546
2547 // While executing the command, the language server sends the editor
2548 // a `workspaceEdit` request.
2549 fake_server
2550 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2551 let fake = fake_server.clone();
2552 move |params, _| {
2553 assert_eq!(params.command, "_the/command");
2554 let fake = fake.clone();
2555 async move {
2556 fake.server
2557 .request::<lsp::request::ApplyWorkspaceEdit>(
2558 lsp::ApplyWorkspaceEditParams {
2559 label: None,
2560 edit: lsp::WorkspaceEdit {
2561 changes: Some(
2562 [(
2563 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2564 vec![lsp::TextEdit {
2565 range: lsp::Range::new(
2566 lsp::Position::new(0, 0),
2567 lsp::Position::new(0, 0),
2568 ),
2569 new_text: "X".into(),
2570 }],
2571 )]
2572 .into_iter()
2573 .collect(),
2574 ),
2575 ..Default::default()
2576 },
2577 },
2578 )
2579 .await
2580 .unwrap();
2581 Ok(Some(json!(null)))
2582 }
2583 }
2584 })
2585 .next()
2586 .await;
2587
2588 // Applying the code action returns a project transaction containing the edits
2589 // sent by the language server in its `workspaceEdit` request.
2590 let transaction = apply.await.unwrap();
2591 assert!(transaction.0.contains_key(&buffer));
2592 buffer.update(cx, |buffer, cx| {
2593 assert_eq!(buffer.text(), "Xa");
2594 buffer.undo(cx);
2595 assert_eq!(buffer.text(), "a");
2596 });
2597}
2598
2599#[gpui::test(iterations = 10)]
2600async fn test_save_file(cx: &mut gpui::TestAppContext) {
2601 init_test(cx);
2602
2603 let fs = FakeFs::new(cx.executor());
2604 fs.insert_tree(
2605 "/dir",
2606 json!({
2607 "file1": "the old contents",
2608 }),
2609 )
2610 .await;
2611
2612 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2613 let buffer = project
2614 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2615 .await
2616 .unwrap();
2617 buffer.update(cx, |buffer, cx| {
2618 assert_eq!(buffer.text(), "the old contents");
2619 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2620 });
2621
2622 project
2623 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2624 .await
2625 .unwrap();
2626
2627 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2628 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2629}
2630
2631#[gpui::test(iterations = 30)]
2632async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
2633 init_test(cx);
2634
2635 let fs = FakeFs::new(cx.executor().clone());
2636 fs.insert_tree(
2637 "/dir",
2638 json!({
2639 "file1": "the original contents",
2640 }),
2641 )
2642 .await;
2643
2644 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2645 let worktree = project.read_with(cx, |project, _| project.worktrees().next().unwrap());
2646 let buffer = project
2647 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2648 .await
2649 .unwrap();
2650
2651 // Simulate buffer diffs being slow, so that they don't complete before
2652 // the next file change occurs.
2653 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2654
2655 // Change the buffer's file on disk, and then wait for the file change
2656 // to be detected by the worktree, so that the buffer starts reloading.
2657 fs.save(
2658 "/dir/file1".as_ref(),
2659 &"the first contents".into(),
2660 Default::default(),
2661 )
2662 .await
2663 .unwrap();
2664 worktree.next_event(cx);
2665
2666 // Change the buffer's file again. Depending on the random seed, the
2667 // previous file change may still be in progress.
2668 fs.save(
2669 "/dir/file1".as_ref(),
2670 &"the second contents".into(),
2671 Default::default(),
2672 )
2673 .await
2674 .unwrap();
2675 worktree.next_event(cx);
2676
2677 cx.executor().run_until_parked();
2678 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2679 buffer.read_with(cx, |buffer, _| {
2680 assert_eq!(buffer.text(), on_disk_text);
2681 assert!(!buffer.is_dirty(), "buffer should not be dirty");
2682 assert!(!buffer.has_conflict(), "buffer should not be dirty");
2683 });
2684}
2685
2686#[gpui::test(iterations = 30)]
2687async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
2688 init_test(cx);
2689
2690 let fs = FakeFs::new(cx.executor().clone());
2691 fs.insert_tree(
2692 "/dir",
2693 json!({
2694 "file1": "the original contents",
2695 }),
2696 )
2697 .await;
2698
2699 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2700 let worktree = project.read_with(cx, |project, _| project.worktrees().next().unwrap());
2701 let buffer = project
2702 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2703 .await
2704 .unwrap();
2705
2706 // Simulate buffer diffs being slow, so that they don't complete before
2707 // the next file change occurs.
2708 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2709
2710 // Change the buffer's file on disk, and then wait for the file change
2711 // to be detected by the worktree, so that the buffer starts reloading.
2712 fs.save(
2713 "/dir/file1".as_ref(),
2714 &"the first contents".into(),
2715 Default::default(),
2716 )
2717 .await
2718 .unwrap();
2719 worktree.next_event(cx);
2720
2721 cx.executor()
2722 .spawn(cx.executor().simulate_random_delay())
2723 .await;
2724
2725 // Perform a noop edit, causing the buffer's version to increase.
2726 buffer.update(cx, |buffer, cx| {
2727 buffer.edit([(0..0, " ")], None, cx);
2728 buffer.undo(cx);
2729 });
2730
2731 cx.executor().run_until_parked();
2732 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2733 buffer.read_with(cx, |buffer, _| {
2734 let buffer_text = buffer.text();
2735 if buffer_text == on_disk_text {
2736 assert!(
2737 !buffer.is_dirty() && !buffer.has_conflict(),
2738 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
2739 );
2740 }
2741 // If the file change occurred while the buffer was processing the first
2742 // change, the buffer will be in a conflicting state.
2743 else {
2744 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2745 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2746 }
2747 });
2748}
2749
2750#[gpui::test]
2751async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2752 init_test(cx);
2753
2754 let fs = FakeFs::new(cx.executor());
2755 fs.insert_tree(
2756 "/dir",
2757 json!({
2758 "file1": "the old contents",
2759 }),
2760 )
2761 .await;
2762
2763 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2764 let buffer = project
2765 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2766 .await
2767 .unwrap();
2768 buffer.update(cx, |buffer, cx| {
2769 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2770 });
2771
2772 project
2773 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2774 .await
2775 .unwrap();
2776
2777 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2778 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2779}
2780
2781#[gpui::test]
2782async fn test_save_as(cx: &mut gpui::TestAppContext) {
2783 init_test(cx);
2784
2785 let fs = FakeFs::new(cx.executor());
2786 fs.insert_tree("/dir", json!({})).await;
2787
2788 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2789
2790 let languages = project.update(cx, |project, _| project.languages().clone());
2791 languages.add(rust_lang());
2792
2793 let buffer = project.update(cx, |project, cx| {
2794 project.create_buffer("", None, cx).unwrap()
2795 });
2796 buffer.update(cx, |buffer, cx| {
2797 buffer.edit([(0..0, "abc")], None, cx);
2798 assert!(buffer.is_dirty());
2799 assert!(!buffer.has_conflict());
2800 assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text");
2801 });
2802 project
2803 .update(cx, |project, cx| {
2804 project.save_buffer_as(buffer.clone(), "/dir/file1.rs".into(), cx)
2805 })
2806 .await
2807 .unwrap();
2808 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
2809
2810 cx.executor().run_until_parked();
2811 buffer.update(cx, |buffer, cx| {
2812 assert_eq!(
2813 buffer.file().unwrap().full_path(cx),
2814 Path::new("dir/file1.rs")
2815 );
2816 assert!(!buffer.is_dirty());
2817 assert!(!buffer.has_conflict());
2818 assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust");
2819 });
2820
2821 let opened_buffer = project
2822 .update(cx, |project, cx| {
2823 project.open_local_buffer("/dir/file1.rs", cx)
2824 })
2825 .await
2826 .unwrap();
2827 assert_eq!(opened_buffer, buffer);
2828}
2829
2830#[gpui::test(retries = 5)]
2831async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
2832 init_test(cx);
2833 cx.executor().allow_parking();
2834
2835 let dir = temp_tree(json!({
2836 "a": {
2837 "file1": "",
2838 "file2": "",
2839 "file3": "",
2840 },
2841 "b": {
2842 "c": {
2843 "file4": "",
2844 "file5": "",
2845 }
2846 }
2847 }));
2848
2849 let project = Project::test(Arc::new(RealFs::default()), [dir.path()], cx).await;
2850 let rpc = project.update(cx, |p, _| p.client.clone());
2851
2852 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2853 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
2854 async move { buffer.await.unwrap() }
2855 };
2856 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2857 project.update(cx, |project, cx| {
2858 let tree = project.worktrees().next().unwrap();
2859 tree.read(cx)
2860 .entry_for_path(path)
2861 .unwrap_or_else(|| panic!("no entry for path {}", path))
2862 .id
2863 })
2864 };
2865
2866 let buffer2 = buffer_for_path("a/file2", cx).await;
2867 let buffer3 = buffer_for_path("a/file3", cx).await;
2868 let buffer4 = buffer_for_path("b/c/file4", cx).await;
2869 let buffer5 = buffer_for_path("b/c/file5", cx).await;
2870
2871 let file2_id = id_for_path("a/file2", cx);
2872 let file3_id = id_for_path("a/file3", cx);
2873 let file4_id = id_for_path("b/c/file4", cx);
2874
2875 // Create a remote copy of this worktree.
2876 let tree = project.update(cx, |project, _| project.worktrees().next().unwrap());
2877
2878 let metadata = tree.update(cx, |tree, _| tree.as_local().unwrap().metadata_proto());
2879
2880 let updates = Arc::new(Mutex::new(Vec::new()));
2881 tree.update(cx, |tree, cx| {
2882 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
2883 let updates = updates.clone();
2884 move |update| {
2885 updates.lock().push(update);
2886 async { true }
2887 }
2888 });
2889 });
2890
2891 let remote = cx.update(|cx| Worktree::remote(1, 1, metadata, rpc.clone(), cx));
2892
2893 cx.executor().run_until_parked();
2894
2895 cx.update(|cx| {
2896 assert!(!buffer2.read(cx).is_dirty());
2897 assert!(!buffer3.read(cx).is_dirty());
2898 assert!(!buffer4.read(cx).is_dirty());
2899 assert!(!buffer5.read(cx).is_dirty());
2900 });
2901
2902 // Rename and delete files and directories.
2903 tree.flush_fs_events(cx).await;
2904 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
2905 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
2906 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
2907 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
2908 tree.flush_fs_events(cx).await;
2909
2910 let expected_paths = vec![
2911 "a",
2912 "a/file1",
2913 "a/file2.new",
2914 "b",
2915 "d",
2916 "d/file3",
2917 "d/file4",
2918 ];
2919
2920 cx.update(|app| {
2921 assert_eq!(
2922 tree.read(app)
2923 .paths()
2924 .map(|p| p.to_str().unwrap())
2925 .collect::<Vec<_>>(),
2926 expected_paths
2927 );
2928 });
2929
2930 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
2931 assert_eq!(id_for_path("d/file3", cx), file3_id);
2932 assert_eq!(id_for_path("d/file4", cx), file4_id);
2933
2934 cx.update(|cx| {
2935 assert_eq!(
2936 buffer2.read(cx).file().unwrap().path().as_ref(),
2937 Path::new("a/file2.new")
2938 );
2939 assert_eq!(
2940 buffer3.read(cx).file().unwrap().path().as_ref(),
2941 Path::new("d/file3")
2942 );
2943 assert_eq!(
2944 buffer4.read(cx).file().unwrap().path().as_ref(),
2945 Path::new("d/file4")
2946 );
2947 assert_eq!(
2948 buffer5.read(cx).file().unwrap().path().as_ref(),
2949 Path::new("b/c/file5")
2950 );
2951
2952 assert!(!buffer2.read(cx).file().unwrap().is_deleted());
2953 assert!(!buffer3.read(cx).file().unwrap().is_deleted());
2954 assert!(!buffer4.read(cx).file().unwrap().is_deleted());
2955 assert!(buffer5.read(cx).file().unwrap().is_deleted());
2956 });
2957
2958 // Update the remote worktree. Check that it becomes consistent with the
2959 // local worktree.
2960 cx.executor().run_until_parked();
2961
2962 remote.update(cx, |remote, _| {
2963 for update in updates.lock().drain(..) {
2964 remote.as_remote_mut().unwrap().update_from_remote(update);
2965 }
2966 });
2967 cx.executor().run_until_parked();
2968 remote.update(cx, |remote, _| {
2969 assert_eq!(
2970 remote
2971 .paths()
2972 .map(|p| p.to_str().unwrap())
2973 .collect::<Vec<_>>(),
2974 expected_paths
2975 );
2976 });
2977}
2978
2979#[gpui::test(iterations = 10)]
2980async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
2981 init_test(cx);
2982
2983 let fs = FakeFs::new(cx.executor());
2984 fs.insert_tree(
2985 "/dir",
2986 json!({
2987 "a": {
2988 "file1": "",
2989 }
2990 }),
2991 )
2992 .await;
2993
2994 let project = Project::test(fs, [Path::new("/dir")], cx).await;
2995 let tree = project.update(cx, |project, _| project.worktrees().next().unwrap());
2996 let tree_id = tree.update(cx, |tree, _| tree.id());
2997
2998 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2999 project.update(cx, |project, cx| {
3000 let tree = project.worktrees().next().unwrap();
3001 tree.read(cx)
3002 .entry_for_path(path)
3003 .unwrap_or_else(|| panic!("no entry for path {}", path))
3004 .id
3005 })
3006 };
3007
3008 let dir_id = id_for_path("a", cx);
3009 let file_id = id_for_path("a/file1", cx);
3010 let buffer = project
3011 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3012 .await
3013 .unwrap();
3014 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3015
3016 project
3017 .update(cx, |project, cx| {
3018 project.rename_entry(dir_id, Path::new("b"), cx)
3019 })
3020 .unwrap()
3021 .await
3022 .unwrap();
3023 cx.executor().run_until_parked();
3024
3025 assert_eq!(id_for_path("b", cx), dir_id);
3026 assert_eq!(id_for_path("b/file1", cx), file_id);
3027 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3028}
3029
3030#[gpui::test]
3031async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3032 init_test(cx);
3033
3034 let fs = FakeFs::new(cx.executor());
3035 fs.insert_tree(
3036 "/dir",
3037 json!({
3038 "a.txt": "a-contents",
3039 "b.txt": "b-contents",
3040 }),
3041 )
3042 .await;
3043
3044 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3045
3046 // Spawn multiple tasks to open paths, repeating some paths.
3047 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3048 (
3049 p.open_local_buffer("/dir/a.txt", cx),
3050 p.open_local_buffer("/dir/b.txt", cx),
3051 p.open_local_buffer("/dir/a.txt", cx),
3052 )
3053 });
3054
3055 let buffer_a_1 = buffer_a_1.await.unwrap();
3056 let buffer_a_2 = buffer_a_2.await.unwrap();
3057 let buffer_b = buffer_b.await.unwrap();
3058 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3059 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3060
3061 // There is only one buffer per path.
3062 let buffer_a_id = buffer_a_1.entity_id();
3063 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3064
3065 // Open the same path again while it is still open.
3066 drop(buffer_a_1);
3067 let buffer_a_3 = project
3068 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3069 .await
3070 .unwrap();
3071
3072 // There's still only one buffer per path.
3073 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3074}
3075
3076#[gpui::test]
3077async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3078 init_test(cx);
3079
3080 let fs = FakeFs::new(cx.executor());
3081 fs.insert_tree(
3082 "/dir",
3083 json!({
3084 "file1": "abc",
3085 "file2": "def",
3086 "file3": "ghi",
3087 }),
3088 )
3089 .await;
3090
3091 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3092
3093 let buffer1 = project
3094 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3095 .await
3096 .unwrap();
3097 let events = Arc::new(Mutex::new(Vec::new()));
3098
3099 // initially, the buffer isn't dirty.
3100 buffer1.update(cx, |buffer, cx| {
3101 cx.subscribe(&buffer1, {
3102 let events = events.clone();
3103 move |_, _, event, _| match event {
3104 BufferEvent::Operation(_) => {}
3105 _ => events.lock().push(event.clone()),
3106 }
3107 })
3108 .detach();
3109
3110 assert!(!buffer.is_dirty());
3111 assert!(events.lock().is_empty());
3112
3113 buffer.edit([(1..2, "")], None, cx);
3114 });
3115
3116 // after the first edit, the buffer is dirty, and emits a dirtied event.
3117 buffer1.update(cx, |buffer, cx| {
3118 assert!(buffer.text() == "ac");
3119 assert!(buffer.is_dirty());
3120 assert_eq!(
3121 *events.lock(),
3122 &[language::Event::Edited, language::Event::DirtyChanged]
3123 );
3124 events.lock().clear();
3125 buffer.did_save(
3126 buffer.version(),
3127 buffer.as_rope().fingerprint(),
3128 buffer.file().unwrap().mtime(),
3129 cx,
3130 );
3131 });
3132
3133 // after saving, the buffer is not dirty, and emits a saved event.
3134 buffer1.update(cx, |buffer, cx| {
3135 assert!(!buffer.is_dirty());
3136 assert_eq!(*events.lock(), &[language::Event::Saved]);
3137 events.lock().clear();
3138
3139 buffer.edit([(1..1, "B")], None, cx);
3140 buffer.edit([(2..2, "D")], None, cx);
3141 });
3142
3143 // after editing again, the buffer is dirty, and emits another dirty event.
3144 buffer1.update(cx, |buffer, cx| {
3145 assert!(buffer.text() == "aBDc");
3146 assert!(buffer.is_dirty());
3147 assert_eq!(
3148 *events.lock(),
3149 &[
3150 language::Event::Edited,
3151 language::Event::DirtyChanged,
3152 language::Event::Edited,
3153 ],
3154 );
3155 events.lock().clear();
3156
3157 // After restoring the buffer to its previously-saved state,
3158 // the buffer is not considered dirty anymore.
3159 buffer.edit([(1..3, "")], None, cx);
3160 assert!(buffer.text() == "ac");
3161 assert!(!buffer.is_dirty());
3162 });
3163
3164 assert_eq!(
3165 *events.lock(),
3166 &[language::Event::Edited, language::Event::DirtyChanged]
3167 );
3168
3169 // When a file is deleted, the buffer is considered dirty.
3170 let events = Arc::new(Mutex::new(Vec::new()));
3171 let buffer2 = project
3172 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3173 .await
3174 .unwrap();
3175 buffer2.update(cx, |_, cx| {
3176 cx.subscribe(&buffer2, {
3177 let events = events.clone();
3178 move |_, _, event, _| events.lock().push(event.clone())
3179 })
3180 .detach();
3181 });
3182
3183 fs.remove_file("/dir/file2".as_ref(), Default::default())
3184 .await
3185 .unwrap();
3186 cx.executor().run_until_parked();
3187 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3188 assert_eq!(
3189 *events.lock(),
3190 &[
3191 language::Event::DirtyChanged,
3192 language::Event::FileHandleChanged
3193 ]
3194 );
3195
3196 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3197 let events = Arc::new(Mutex::new(Vec::new()));
3198 let buffer3 = project
3199 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
3200 .await
3201 .unwrap();
3202 buffer3.update(cx, |_, cx| {
3203 cx.subscribe(&buffer3, {
3204 let events = events.clone();
3205 move |_, _, event, _| events.lock().push(event.clone())
3206 })
3207 .detach();
3208 });
3209
3210 buffer3.update(cx, |buffer, cx| {
3211 buffer.edit([(0..0, "x")], None, cx);
3212 });
3213 events.lock().clear();
3214 fs.remove_file("/dir/file3".as_ref(), Default::default())
3215 .await
3216 .unwrap();
3217 cx.executor().run_until_parked();
3218 assert_eq!(*events.lock(), &[language::Event::FileHandleChanged]);
3219 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3220}
3221
3222#[gpui::test]
3223async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3224 init_test(cx);
3225
3226 let initial_contents = "aaa\nbbbbb\nc\n";
3227 let fs = FakeFs::new(cx.executor());
3228 fs.insert_tree(
3229 "/dir",
3230 json!({
3231 "the-file": initial_contents,
3232 }),
3233 )
3234 .await;
3235 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3236 let buffer = project
3237 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
3238 .await
3239 .unwrap();
3240
3241 let anchors = (0..3)
3242 .map(|row| buffer.update(cx, |b, _| b.anchor_before(Point::new(row, 1))))
3243 .collect::<Vec<_>>();
3244
3245 // Change the file on disk, adding two new lines of text, and removing
3246 // one line.
3247 buffer.update(cx, |buffer, _| {
3248 assert!(!buffer.is_dirty());
3249 assert!(!buffer.has_conflict());
3250 });
3251 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3252 fs.save(
3253 "/dir/the-file".as_ref(),
3254 &new_contents.into(),
3255 LineEnding::Unix,
3256 )
3257 .await
3258 .unwrap();
3259
3260 // Because the buffer was not modified, it is reloaded from disk. Its
3261 // contents are edited according to the diff between the old and new
3262 // file contents.
3263 cx.executor().run_until_parked();
3264 buffer.update(cx, |buffer, _| {
3265 assert_eq!(buffer.text(), new_contents);
3266 assert!(!buffer.is_dirty());
3267 assert!(!buffer.has_conflict());
3268
3269 let anchor_positions = anchors
3270 .iter()
3271 .map(|anchor| anchor.to_point(&*buffer))
3272 .collect::<Vec<_>>();
3273 assert_eq!(
3274 anchor_positions,
3275 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
3276 );
3277 });
3278
3279 // Modify the buffer
3280 buffer.update(cx, |buffer, cx| {
3281 buffer.edit([(0..0, " ")], None, cx);
3282 assert!(buffer.is_dirty());
3283 assert!(!buffer.has_conflict());
3284 });
3285
3286 // Change the file on disk again, adding blank lines to the beginning.
3287 fs.save(
3288 "/dir/the-file".as_ref(),
3289 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3290 LineEnding::Unix,
3291 )
3292 .await
3293 .unwrap();
3294
3295 // Because the buffer is modified, it doesn't reload from disk, but is
3296 // marked as having a conflict.
3297 cx.executor().run_until_parked();
3298 buffer.update(cx, |buffer, _| {
3299 assert!(buffer.has_conflict());
3300 });
3301}
3302
3303#[gpui::test]
3304async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3305 init_test(cx);
3306
3307 let fs = FakeFs::new(cx.executor());
3308 fs.insert_tree(
3309 "/dir",
3310 json!({
3311 "file1": "a\nb\nc\n",
3312 "file2": "one\r\ntwo\r\nthree\r\n",
3313 }),
3314 )
3315 .await;
3316
3317 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3318 let buffer1 = project
3319 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3320 .await
3321 .unwrap();
3322 let buffer2 = project
3323 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3324 .await
3325 .unwrap();
3326
3327 buffer1.update(cx, |buffer, _| {
3328 assert_eq!(buffer.text(), "a\nb\nc\n");
3329 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3330 });
3331 buffer2.update(cx, |buffer, _| {
3332 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3333 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3334 });
3335
3336 // Change a file's line endings on disk from unix to windows. The buffer's
3337 // state updates correctly.
3338 fs.save(
3339 "/dir/file1".as_ref(),
3340 &"aaa\nb\nc\n".into(),
3341 LineEnding::Windows,
3342 )
3343 .await
3344 .unwrap();
3345 cx.executor().run_until_parked();
3346 buffer1.update(cx, |buffer, _| {
3347 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3348 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3349 });
3350
3351 // Save a file with windows line endings. The file is written correctly.
3352 buffer2.update(cx, |buffer, cx| {
3353 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3354 });
3355 project
3356 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3357 .await
3358 .unwrap();
3359 assert_eq!(
3360 fs.load("/dir/file2".as_ref()).await.unwrap(),
3361 "one\r\ntwo\r\nthree\r\nfour\r\n",
3362 );
3363}
3364
3365#[gpui::test]
3366async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3367 init_test(cx);
3368
3369 let fs = FakeFs::new(cx.executor());
3370 fs.insert_tree(
3371 "/the-dir",
3372 json!({
3373 "a.rs": "
3374 fn foo(mut v: Vec<usize>) {
3375 for x in &v {
3376 v.push(1);
3377 }
3378 }
3379 "
3380 .unindent(),
3381 }),
3382 )
3383 .await;
3384
3385 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3386 let buffer = project
3387 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3388 .await
3389 .unwrap();
3390
3391 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3392 let message = lsp::PublishDiagnosticsParams {
3393 uri: buffer_uri.clone(),
3394 diagnostics: vec![
3395 lsp::Diagnostic {
3396 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3397 severity: Some(DiagnosticSeverity::WARNING),
3398 message: "error 1".to_string(),
3399 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3400 location: lsp::Location {
3401 uri: buffer_uri.clone(),
3402 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3403 },
3404 message: "error 1 hint 1".to_string(),
3405 }]),
3406 ..Default::default()
3407 },
3408 lsp::Diagnostic {
3409 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3410 severity: Some(DiagnosticSeverity::HINT),
3411 message: "error 1 hint 1".to_string(),
3412 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3413 location: lsp::Location {
3414 uri: buffer_uri.clone(),
3415 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3416 },
3417 message: "original diagnostic".to_string(),
3418 }]),
3419 ..Default::default()
3420 },
3421 lsp::Diagnostic {
3422 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3423 severity: Some(DiagnosticSeverity::ERROR),
3424 message: "error 2".to_string(),
3425 related_information: Some(vec![
3426 lsp::DiagnosticRelatedInformation {
3427 location: lsp::Location {
3428 uri: buffer_uri.clone(),
3429 range: lsp::Range::new(
3430 lsp::Position::new(1, 13),
3431 lsp::Position::new(1, 15),
3432 ),
3433 },
3434 message: "error 2 hint 1".to_string(),
3435 },
3436 lsp::DiagnosticRelatedInformation {
3437 location: lsp::Location {
3438 uri: buffer_uri.clone(),
3439 range: lsp::Range::new(
3440 lsp::Position::new(1, 13),
3441 lsp::Position::new(1, 15),
3442 ),
3443 },
3444 message: "error 2 hint 2".to_string(),
3445 },
3446 ]),
3447 ..Default::default()
3448 },
3449 lsp::Diagnostic {
3450 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3451 severity: Some(DiagnosticSeverity::HINT),
3452 message: "error 2 hint 1".to_string(),
3453 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3454 location: lsp::Location {
3455 uri: buffer_uri.clone(),
3456 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3457 },
3458 message: "original diagnostic".to_string(),
3459 }]),
3460 ..Default::default()
3461 },
3462 lsp::Diagnostic {
3463 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3464 severity: Some(DiagnosticSeverity::HINT),
3465 message: "error 2 hint 2".to_string(),
3466 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3467 location: lsp::Location {
3468 uri: buffer_uri,
3469 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3470 },
3471 message: "original diagnostic".to_string(),
3472 }]),
3473 ..Default::default()
3474 },
3475 ],
3476 version: None,
3477 };
3478
3479 project
3480 .update(cx, |p, cx| {
3481 p.update_diagnostics(LanguageServerId(0), message, &[], cx)
3482 })
3483 .unwrap();
3484 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3485
3486 assert_eq!(
3487 buffer
3488 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3489 .collect::<Vec<_>>(),
3490 &[
3491 DiagnosticEntry {
3492 range: Point::new(1, 8)..Point::new(1, 9),
3493 diagnostic: Diagnostic {
3494 severity: DiagnosticSeverity::WARNING,
3495 message: "error 1".to_string(),
3496 group_id: 1,
3497 is_primary: true,
3498 ..Default::default()
3499 }
3500 },
3501 DiagnosticEntry {
3502 range: Point::new(1, 8)..Point::new(1, 9),
3503 diagnostic: Diagnostic {
3504 severity: DiagnosticSeverity::HINT,
3505 message: "error 1 hint 1".to_string(),
3506 group_id: 1,
3507 is_primary: false,
3508 ..Default::default()
3509 }
3510 },
3511 DiagnosticEntry {
3512 range: Point::new(1, 13)..Point::new(1, 15),
3513 diagnostic: Diagnostic {
3514 severity: DiagnosticSeverity::HINT,
3515 message: "error 2 hint 1".to_string(),
3516 group_id: 0,
3517 is_primary: false,
3518 ..Default::default()
3519 }
3520 },
3521 DiagnosticEntry {
3522 range: Point::new(1, 13)..Point::new(1, 15),
3523 diagnostic: Diagnostic {
3524 severity: DiagnosticSeverity::HINT,
3525 message: "error 2 hint 2".to_string(),
3526 group_id: 0,
3527 is_primary: false,
3528 ..Default::default()
3529 }
3530 },
3531 DiagnosticEntry {
3532 range: Point::new(2, 8)..Point::new(2, 17),
3533 diagnostic: Diagnostic {
3534 severity: DiagnosticSeverity::ERROR,
3535 message: "error 2".to_string(),
3536 group_id: 0,
3537 is_primary: true,
3538 ..Default::default()
3539 }
3540 }
3541 ]
3542 );
3543
3544 assert_eq!(
3545 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3546 &[
3547 DiagnosticEntry {
3548 range: Point::new(1, 13)..Point::new(1, 15),
3549 diagnostic: Diagnostic {
3550 severity: DiagnosticSeverity::HINT,
3551 message: "error 2 hint 1".to_string(),
3552 group_id: 0,
3553 is_primary: false,
3554 ..Default::default()
3555 }
3556 },
3557 DiagnosticEntry {
3558 range: Point::new(1, 13)..Point::new(1, 15),
3559 diagnostic: Diagnostic {
3560 severity: DiagnosticSeverity::HINT,
3561 message: "error 2 hint 2".to_string(),
3562 group_id: 0,
3563 is_primary: false,
3564 ..Default::default()
3565 }
3566 },
3567 DiagnosticEntry {
3568 range: Point::new(2, 8)..Point::new(2, 17),
3569 diagnostic: Diagnostic {
3570 severity: DiagnosticSeverity::ERROR,
3571 message: "error 2".to_string(),
3572 group_id: 0,
3573 is_primary: true,
3574 ..Default::default()
3575 }
3576 }
3577 ]
3578 );
3579
3580 assert_eq!(
3581 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3582 &[
3583 DiagnosticEntry {
3584 range: Point::new(1, 8)..Point::new(1, 9),
3585 diagnostic: Diagnostic {
3586 severity: DiagnosticSeverity::WARNING,
3587 message: "error 1".to_string(),
3588 group_id: 1,
3589 is_primary: true,
3590 ..Default::default()
3591 }
3592 },
3593 DiagnosticEntry {
3594 range: Point::new(1, 8)..Point::new(1, 9),
3595 diagnostic: Diagnostic {
3596 severity: DiagnosticSeverity::HINT,
3597 message: "error 1 hint 1".to_string(),
3598 group_id: 1,
3599 is_primary: false,
3600 ..Default::default()
3601 }
3602 },
3603 ]
3604 );
3605}
3606
3607#[gpui::test]
3608async fn test_rename(cx: &mut gpui::TestAppContext) {
3609 init_test(cx);
3610
3611 let fs = FakeFs::new(cx.executor());
3612 fs.insert_tree(
3613 "/dir",
3614 json!({
3615 "one.rs": "const ONE: usize = 1;",
3616 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3617 }),
3618 )
3619 .await;
3620
3621 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3622
3623 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3624 language_registry.add(rust_lang());
3625 let mut fake_servers = language_registry.register_fake_lsp_adapter(
3626 "Rust",
3627 FakeLspAdapter {
3628 capabilities: lsp::ServerCapabilities {
3629 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
3630 prepare_provider: Some(true),
3631 work_done_progress_options: Default::default(),
3632 })),
3633 ..Default::default()
3634 },
3635 ..Default::default()
3636 },
3637 );
3638
3639 let buffer = project
3640 .update(cx, |project, cx| {
3641 project.open_local_buffer("/dir/one.rs", cx)
3642 })
3643 .await
3644 .unwrap();
3645
3646 let fake_server = fake_servers.next().await.unwrap();
3647
3648 let response = project.update(cx, |project, cx| {
3649 project.prepare_rename(buffer.clone(), 7, cx)
3650 });
3651 fake_server
3652 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
3653 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3654 assert_eq!(params.position, lsp::Position::new(0, 7));
3655 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
3656 lsp::Position::new(0, 6),
3657 lsp::Position::new(0, 9),
3658 ))))
3659 })
3660 .next()
3661 .await
3662 .unwrap();
3663 let range = response.await.unwrap().unwrap();
3664 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
3665 assert_eq!(range, 6..9);
3666
3667 let response = project.update(cx, |project, cx| {
3668 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
3669 });
3670 fake_server
3671 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
3672 assert_eq!(
3673 params.text_document_position.text_document.uri.as_str(),
3674 "file:///dir/one.rs"
3675 );
3676 assert_eq!(
3677 params.text_document_position.position,
3678 lsp::Position::new(0, 7)
3679 );
3680 assert_eq!(params.new_name, "THREE");
3681 Ok(Some(lsp::WorkspaceEdit {
3682 changes: Some(
3683 [
3684 (
3685 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
3686 vec![lsp::TextEdit::new(
3687 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
3688 "THREE".to_string(),
3689 )],
3690 ),
3691 (
3692 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
3693 vec![
3694 lsp::TextEdit::new(
3695 lsp::Range::new(
3696 lsp::Position::new(0, 24),
3697 lsp::Position::new(0, 27),
3698 ),
3699 "THREE".to_string(),
3700 ),
3701 lsp::TextEdit::new(
3702 lsp::Range::new(
3703 lsp::Position::new(0, 35),
3704 lsp::Position::new(0, 38),
3705 ),
3706 "THREE".to_string(),
3707 ),
3708 ],
3709 ),
3710 ]
3711 .into_iter()
3712 .collect(),
3713 ),
3714 ..Default::default()
3715 }))
3716 })
3717 .next()
3718 .await
3719 .unwrap();
3720 let mut transaction = response.await.unwrap().0;
3721 assert_eq!(transaction.len(), 2);
3722 assert_eq!(
3723 transaction
3724 .remove_entry(&buffer)
3725 .unwrap()
3726 .0
3727 .update(cx, |buffer, _| buffer.text()),
3728 "const THREE: usize = 1;"
3729 );
3730 assert_eq!(
3731 transaction
3732 .into_keys()
3733 .next()
3734 .unwrap()
3735 .update(cx, |buffer, _| buffer.text()),
3736 "const TWO: usize = one::THREE + one::THREE;"
3737 );
3738}
3739
3740#[gpui::test]
3741async fn test_search(cx: &mut gpui::TestAppContext) {
3742 init_test(cx);
3743
3744 let fs = FakeFs::new(cx.executor());
3745 fs.insert_tree(
3746 "/dir",
3747 json!({
3748 "one.rs": "const ONE: usize = 1;",
3749 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3750 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3751 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3752 }),
3753 )
3754 .await;
3755 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3756 assert_eq!(
3757 search(
3758 &project,
3759 SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
3760 cx
3761 )
3762 .await
3763 .unwrap(),
3764 HashMap::from_iter([
3765 ("dir/two.rs".to_string(), vec![6..9]),
3766 ("dir/three.rs".to_string(), vec![37..40])
3767 ])
3768 );
3769
3770 let buffer_4 = project
3771 .update(cx, |project, cx| {
3772 project.open_local_buffer("/dir/four.rs", cx)
3773 })
3774 .await
3775 .unwrap();
3776 buffer_4.update(cx, |buffer, cx| {
3777 let text = "two::TWO";
3778 buffer.edit([(20..28, text), (31..43, text)], None, cx);
3779 });
3780
3781 assert_eq!(
3782 search(
3783 &project,
3784 SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
3785 cx
3786 )
3787 .await
3788 .unwrap(),
3789 HashMap::from_iter([
3790 ("dir/two.rs".to_string(), vec![6..9]),
3791 ("dir/three.rs".to_string(), vec![37..40]),
3792 ("dir/four.rs".to_string(), vec![25..28, 36..39])
3793 ])
3794 );
3795}
3796
3797#[gpui::test]
3798async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
3799 init_test(cx);
3800
3801 let search_query = "file";
3802
3803 let fs = FakeFs::new(cx.executor());
3804 fs.insert_tree(
3805 "/dir",
3806 json!({
3807 "one.rs": r#"// Rust file one"#,
3808 "one.ts": r#"// TypeScript file one"#,
3809 "two.rs": r#"// Rust file two"#,
3810 "two.ts": r#"// TypeScript file two"#,
3811 }),
3812 )
3813 .await;
3814 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3815
3816 assert!(
3817 search(
3818 &project,
3819 SearchQuery::text(
3820 search_query,
3821 false,
3822 true,
3823 false,
3824 vec![PathMatcher::new("*.odd").unwrap()],
3825 Vec::new()
3826 )
3827 .unwrap(),
3828 cx
3829 )
3830 .await
3831 .unwrap()
3832 .is_empty(),
3833 "If no inclusions match, no files should be returned"
3834 );
3835
3836 assert_eq!(
3837 search(
3838 &project,
3839 SearchQuery::text(
3840 search_query,
3841 false,
3842 true,
3843 false,
3844 vec![PathMatcher::new("*.rs").unwrap()],
3845 Vec::new()
3846 )
3847 .unwrap(),
3848 cx
3849 )
3850 .await
3851 .unwrap(),
3852 HashMap::from_iter([
3853 ("dir/one.rs".to_string(), vec![8..12]),
3854 ("dir/two.rs".to_string(), vec![8..12]),
3855 ]),
3856 "Rust only search should give only Rust files"
3857 );
3858
3859 assert_eq!(
3860 search(
3861 &project,
3862 SearchQuery::text(
3863 search_query,
3864 false,
3865 true,
3866 false,
3867 vec![
3868 PathMatcher::new("*.ts").unwrap(),
3869 PathMatcher::new("*.odd").unwrap(),
3870 ],
3871 Vec::new()
3872 ).unwrap(),
3873 cx
3874 )
3875 .await
3876 .unwrap(),
3877 HashMap::from_iter([
3878 ("dir/one.ts".to_string(), vec![14..18]),
3879 ("dir/two.ts".to_string(), vec![14..18]),
3880 ]),
3881 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
3882 );
3883
3884 assert_eq!(
3885 search(
3886 &project,
3887 SearchQuery::text(
3888 search_query,
3889 false,
3890 true,
3891 false,
3892 vec![
3893 PathMatcher::new("*.rs").unwrap(),
3894 PathMatcher::new("*.ts").unwrap(),
3895 PathMatcher::new("*.odd").unwrap(),
3896 ],
3897 Vec::new()
3898 ).unwrap(),
3899 cx
3900 )
3901 .await
3902 .unwrap(),
3903 HashMap::from_iter([
3904 ("dir/two.ts".to_string(), vec![14..18]),
3905 ("dir/one.rs".to_string(), vec![8..12]),
3906 ("dir/one.ts".to_string(), vec![14..18]),
3907 ("dir/two.rs".to_string(), vec![8..12]),
3908 ]),
3909 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
3910 );
3911}
3912
3913#[gpui::test]
3914async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
3915 init_test(cx);
3916
3917 let search_query = "file";
3918
3919 let fs = FakeFs::new(cx.executor());
3920 fs.insert_tree(
3921 "/dir",
3922 json!({
3923 "one.rs": r#"// Rust file one"#,
3924 "one.ts": r#"// TypeScript file one"#,
3925 "two.rs": r#"// Rust file two"#,
3926 "two.ts": r#"// TypeScript file two"#,
3927 }),
3928 )
3929 .await;
3930 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3931
3932 assert_eq!(
3933 search(
3934 &project,
3935 SearchQuery::text(
3936 search_query,
3937 false,
3938 true,
3939 false,
3940 Vec::new(),
3941 vec![PathMatcher::new("*.odd").unwrap()],
3942 )
3943 .unwrap(),
3944 cx
3945 )
3946 .await
3947 .unwrap(),
3948 HashMap::from_iter([
3949 ("dir/one.rs".to_string(), vec![8..12]),
3950 ("dir/one.ts".to_string(), vec![14..18]),
3951 ("dir/two.rs".to_string(), vec![8..12]),
3952 ("dir/two.ts".to_string(), vec![14..18]),
3953 ]),
3954 "If no exclusions match, all files should be returned"
3955 );
3956
3957 assert_eq!(
3958 search(
3959 &project,
3960 SearchQuery::text(
3961 search_query,
3962 false,
3963 true,
3964 false,
3965 Vec::new(),
3966 vec![PathMatcher::new("*.rs").unwrap()],
3967 )
3968 .unwrap(),
3969 cx
3970 )
3971 .await
3972 .unwrap(),
3973 HashMap::from_iter([
3974 ("dir/one.ts".to_string(), vec![14..18]),
3975 ("dir/two.ts".to_string(), vec![14..18]),
3976 ]),
3977 "Rust exclusion search should give only TypeScript files"
3978 );
3979
3980 assert_eq!(
3981 search(
3982 &project,
3983 SearchQuery::text(
3984 search_query,
3985 false,
3986 true,
3987 false,
3988 Vec::new(),
3989 vec![
3990 PathMatcher::new("*.ts").unwrap(),
3991 PathMatcher::new("*.odd").unwrap(),
3992 ],
3993 ).unwrap(),
3994 cx
3995 )
3996 .await
3997 .unwrap(),
3998 HashMap::from_iter([
3999 ("dir/one.rs".to_string(), vec![8..12]),
4000 ("dir/two.rs".to_string(), vec![8..12]),
4001 ]),
4002 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
4003 );
4004
4005 assert!(
4006 search(
4007 &project,
4008 SearchQuery::text(
4009 search_query,
4010 false,
4011 true,
4012 false,
4013 Vec::new(),
4014 vec![
4015 PathMatcher::new("*.rs").unwrap(),
4016 PathMatcher::new("*.ts").unwrap(),
4017 PathMatcher::new("*.odd").unwrap(),
4018 ],
4019 ).unwrap(),
4020 cx
4021 )
4022 .await
4023 .unwrap().is_empty(),
4024 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4025 );
4026}
4027
4028#[gpui::test]
4029async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4030 init_test(cx);
4031
4032 let search_query = "file";
4033
4034 let fs = FakeFs::new(cx.executor());
4035 fs.insert_tree(
4036 "/dir",
4037 json!({
4038 "one.rs": r#"// Rust file one"#,
4039 "one.ts": r#"// TypeScript file one"#,
4040 "two.rs": r#"// Rust file two"#,
4041 "two.ts": r#"// TypeScript file two"#,
4042 }),
4043 )
4044 .await;
4045 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4046
4047 assert!(
4048 search(
4049 &project,
4050 SearchQuery::text(
4051 search_query,
4052 false,
4053 true,
4054 false,
4055 vec![PathMatcher::new("*.odd").unwrap()],
4056 vec![PathMatcher::new("*.odd").unwrap()],
4057 )
4058 .unwrap(),
4059 cx
4060 )
4061 .await
4062 .unwrap()
4063 .is_empty(),
4064 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4065 );
4066
4067 assert!(
4068 search(
4069 &project,
4070 SearchQuery::text(
4071 search_query,
4072 false,
4073 true,
4074 false,
4075 vec![PathMatcher::new("*.ts").unwrap()],
4076 vec![PathMatcher::new("*.ts").unwrap()],
4077 ).unwrap(),
4078 cx
4079 )
4080 .await
4081 .unwrap()
4082 .is_empty(),
4083 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4084 );
4085
4086 assert!(
4087 search(
4088 &project,
4089 SearchQuery::text(
4090 search_query,
4091 false,
4092 true,
4093 false,
4094 vec![
4095 PathMatcher::new("*.ts").unwrap(),
4096 PathMatcher::new("*.odd").unwrap()
4097 ],
4098 vec![
4099 PathMatcher::new("*.ts").unwrap(),
4100 PathMatcher::new("*.odd").unwrap()
4101 ],
4102 )
4103 .unwrap(),
4104 cx
4105 )
4106 .await
4107 .unwrap()
4108 .is_empty(),
4109 "Non-matching inclusions and exclusions should not change that."
4110 );
4111
4112 assert_eq!(
4113 search(
4114 &project,
4115 SearchQuery::text(
4116 search_query,
4117 false,
4118 true,
4119 false,
4120 vec![
4121 PathMatcher::new("*.ts").unwrap(),
4122 PathMatcher::new("*.odd").unwrap()
4123 ],
4124 vec![
4125 PathMatcher::new("*.rs").unwrap(),
4126 PathMatcher::new("*.odd").unwrap()
4127 ],
4128 )
4129 .unwrap(),
4130 cx
4131 )
4132 .await
4133 .unwrap(),
4134 HashMap::from_iter([
4135 ("dir/one.ts".to_string(), vec![14..18]),
4136 ("dir/two.ts".to_string(), vec![14..18]),
4137 ]),
4138 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4139 );
4140}
4141
4142#[gpui::test]
4143async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
4144 init_test(cx);
4145
4146 let fs = FakeFs::new(cx.executor());
4147 fs.insert_tree(
4148 "/worktree-a",
4149 json!({
4150 "haystack.rs": r#"// NEEDLE"#,
4151 "haystack.ts": r#"// NEEDLE"#,
4152 }),
4153 )
4154 .await;
4155 fs.insert_tree(
4156 "/worktree-b",
4157 json!({
4158 "haystack.rs": r#"// NEEDLE"#,
4159 "haystack.ts": r#"// NEEDLE"#,
4160 }),
4161 )
4162 .await;
4163
4164 let project = Project::test(
4165 fs.clone(),
4166 ["/worktree-a".as_ref(), "/worktree-b".as_ref()],
4167 cx,
4168 )
4169 .await;
4170
4171 assert_eq!(
4172 search(
4173 &project,
4174 SearchQuery::text(
4175 "NEEDLE",
4176 false,
4177 true,
4178 false,
4179 vec![PathMatcher::new("worktree-a/*.rs").unwrap()],
4180 Vec::new()
4181 )
4182 .unwrap(),
4183 cx
4184 )
4185 .await
4186 .unwrap(),
4187 HashMap::from_iter([("worktree-a/haystack.rs".to_string(), vec![3..9])]),
4188 "should only return results from included worktree"
4189 );
4190 assert_eq!(
4191 search(
4192 &project,
4193 SearchQuery::text(
4194 "NEEDLE",
4195 false,
4196 true,
4197 false,
4198 vec![PathMatcher::new("worktree-b/*.rs").unwrap()],
4199 Vec::new()
4200 )
4201 .unwrap(),
4202 cx
4203 )
4204 .await
4205 .unwrap(),
4206 HashMap::from_iter([("worktree-b/haystack.rs".to_string(), vec![3..9])]),
4207 "should only return results from included worktree"
4208 );
4209
4210 assert_eq!(
4211 search(
4212 &project,
4213 SearchQuery::text(
4214 "NEEDLE",
4215 false,
4216 true,
4217 false,
4218 vec![PathMatcher::new("*.ts").unwrap()],
4219 Vec::new()
4220 )
4221 .unwrap(),
4222 cx
4223 )
4224 .await
4225 .unwrap(),
4226 HashMap::from_iter([
4227 ("worktree-a/haystack.ts".to_string(), vec![3..9]),
4228 ("worktree-b/haystack.ts".to_string(), vec![3..9])
4229 ]),
4230 "should return results from both worktrees"
4231 );
4232}
4233
4234#[gpui::test]
4235async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
4236 init_test(cx);
4237
4238 let fs = FakeFs::new(cx.background_executor.clone());
4239 fs.insert_tree(
4240 "/dir",
4241 json!({
4242 ".git": {},
4243 ".gitignore": "**/target\n/node_modules\n",
4244 "target": {
4245 "index.txt": "index_key:index_value"
4246 },
4247 "node_modules": {
4248 "eslint": {
4249 "index.ts": "const eslint_key = 'eslint value'",
4250 "package.json": r#"{ "some_key": "some value" }"#,
4251 },
4252 "prettier": {
4253 "index.ts": "const prettier_key = 'prettier value'",
4254 "package.json": r#"{ "other_key": "other value" }"#,
4255 },
4256 },
4257 "package.json": r#"{ "main_key": "main value" }"#,
4258 }),
4259 )
4260 .await;
4261 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4262
4263 let query = "key";
4264 assert_eq!(
4265 search(
4266 &project,
4267 SearchQuery::text(query, false, false, false, Vec::new(), Vec::new()).unwrap(),
4268 cx
4269 )
4270 .await
4271 .unwrap(),
4272 HashMap::from_iter([("dir/package.json".to_string(), vec![8..11])]),
4273 "Only one non-ignored file should have the query"
4274 );
4275
4276 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4277 assert_eq!(
4278 search(
4279 &project,
4280 SearchQuery::text(query, false, false, true, Vec::new(), Vec::new()).unwrap(),
4281 cx
4282 )
4283 .await
4284 .unwrap(),
4285 HashMap::from_iter([
4286 ("dir/package.json".to_string(), vec![8..11]),
4287 ("dir/target/index.txt".to_string(), vec![6..9]),
4288 (
4289 "dir/node_modules/prettier/package.json".to_string(),
4290 vec![9..12]
4291 ),
4292 (
4293 "dir/node_modules/prettier/index.ts".to_string(),
4294 vec![15..18]
4295 ),
4296 ("dir/node_modules/eslint/index.ts".to_string(), vec![13..16]),
4297 (
4298 "dir/node_modules/eslint/package.json".to_string(),
4299 vec![8..11]
4300 ),
4301 ]),
4302 "Unrestricted search with ignored directories should find every file with the query"
4303 );
4304
4305 let files_to_include = vec![PathMatcher::new("/dir/node_modules/prettier/**").unwrap()];
4306 let files_to_exclude = vec![PathMatcher::new("*.ts").unwrap()];
4307 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4308 assert_eq!(
4309 search(
4310 &project,
4311 SearchQuery::text(
4312 query,
4313 false,
4314 false,
4315 true,
4316 files_to_include,
4317 files_to_exclude,
4318 )
4319 .unwrap(),
4320 cx
4321 )
4322 .await
4323 .unwrap(),
4324 HashMap::from_iter([(
4325 "dir/node_modules/prettier/package.json".to_string(),
4326 vec![9..12]
4327 )]),
4328 "With search including ignored prettier directory and excluding TS files, only one file should be found"
4329 );
4330}
4331
4332#[test]
4333fn test_glob_literal_prefix() {
4334 assert_eq!(glob_literal_prefix("**/*.js"), "");
4335 assert_eq!(glob_literal_prefix("node_modules/**/*.js"), "node_modules");
4336 assert_eq!(glob_literal_prefix("foo/{bar,baz}.js"), "foo");
4337 assert_eq!(glob_literal_prefix("foo/bar/baz.js"), "foo/bar/baz.js");
4338}
4339
4340#[gpui::test]
4341async fn test_create_entry(cx: &mut gpui::TestAppContext) {
4342 init_test(cx);
4343
4344 let fs = FakeFs::new(cx.executor().clone());
4345 fs.insert_tree(
4346 "/one/two",
4347 json!({
4348 "three": {
4349 "a.txt": "",
4350 "four": {}
4351 },
4352 "c.rs": ""
4353 }),
4354 )
4355 .await;
4356
4357 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
4358 project
4359 .update(cx, |project, cx| {
4360 let id = project.worktrees().next().unwrap().read(cx).id();
4361 project.create_entry((id, "b.."), true, cx)
4362 })
4363 .unwrap()
4364 .await
4365 .unwrap();
4366
4367 // Can't create paths outside the project
4368 let result = project
4369 .update(cx, |project, cx| {
4370 let id = project.worktrees().next().unwrap().read(cx).id();
4371 project.create_entry((id, "../../boop"), true, cx)
4372 })
4373 .await;
4374 assert!(result.is_err());
4375
4376 // Can't create paths with '..'
4377 let result = project
4378 .update(cx, |project, cx| {
4379 let id = project.worktrees().next().unwrap().read(cx).id();
4380 project.create_entry((id, "four/../beep"), true, cx)
4381 })
4382 .await;
4383 assert!(result.is_err());
4384
4385 assert_eq!(
4386 fs.paths(true),
4387 vec![
4388 PathBuf::from("/"),
4389 PathBuf::from("/one"),
4390 PathBuf::from("/one/two"),
4391 PathBuf::from("/one/two/c.rs"),
4392 PathBuf::from("/one/two/three"),
4393 PathBuf::from("/one/two/three/a.txt"),
4394 PathBuf::from("/one/two/three/b.."),
4395 PathBuf::from("/one/two/three/four"),
4396 ]
4397 );
4398
4399 // And we cannot open buffers with '..'
4400 let result = project
4401 .update(cx, |project, cx| {
4402 let id = project.worktrees().next().unwrap().read(cx).id();
4403 project.open_buffer((id, "../c.rs"), cx)
4404 })
4405 .await;
4406 assert!(result.is_err())
4407}
4408
4409#[gpui::test]
4410async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
4411 init_test(cx);
4412
4413 let fs = FakeFs::new(cx.executor());
4414 fs.insert_tree(
4415 "/dir",
4416 json!({
4417 "a.tsx": "a",
4418 }),
4419 )
4420 .await;
4421
4422 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4423
4424 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4425 language_registry.add(tsx_lang());
4426 let language_server_names = [
4427 "TypeScriptServer",
4428 "TailwindServer",
4429 "ESLintServer",
4430 "NoHoverCapabilitiesServer",
4431 ];
4432 let mut fake_tsx_language_servers = language_registry.register_specific_fake_lsp_adapter(
4433 "tsx",
4434 true,
4435 FakeLspAdapter {
4436 name: &language_server_names[0],
4437 capabilities: lsp::ServerCapabilities {
4438 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4439 ..lsp::ServerCapabilities::default()
4440 },
4441 ..FakeLspAdapter::default()
4442 },
4443 );
4444 let _a = language_registry.register_specific_fake_lsp_adapter(
4445 "tsx",
4446 false,
4447 FakeLspAdapter {
4448 name: &language_server_names[1],
4449 capabilities: lsp::ServerCapabilities {
4450 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4451 ..lsp::ServerCapabilities::default()
4452 },
4453 ..FakeLspAdapter::default()
4454 },
4455 );
4456 let _b = language_registry.register_specific_fake_lsp_adapter(
4457 "tsx",
4458 false,
4459 FakeLspAdapter {
4460 name: &language_server_names[2],
4461 capabilities: lsp::ServerCapabilities {
4462 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4463 ..lsp::ServerCapabilities::default()
4464 },
4465 ..FakeLspAdapter::default()
4466 },
4467 );
4468 let _c = language_registry.register_specific_fake_lsp_adapter(
4469 "tsx",
4470 false,
4471 FakeLspAdapter {
4472 name: &language_server_names[3],
4473 capabilities: lsp::ServerCapabilities {
4474 hover_provider: None,
4475 ..lsp::ServerCapabilities::default()
4476 },
4477 ..FakeLspAdapter::default()
4478 },
4479 );
4480
4481 let buffer = project
4482 .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx))
4483 .await
4484 .unwrap();
4485 cx.executor().run_until_parked();
4486
4487 let mut servers_with_hover_requests = HashMap::default();
4488 for i in 0..language_server_names.len() {
4489 let new_server = fake_tsx_language_servers.next().await.unwrap_or_else(|| {
4490 panic!(
4491 "Failed to get language server #{i} with name {}",
4492 &language_server_names[i]
4493 )
4494 });
4495 let new_server_name = new_server.server.name();
4496 assert!(
4497 !servers_with_hover_requests.contains_key(new_server_name),
4498 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
4499 );
4500 let new_server_name = new_server_name.to_string();
4501 match new_server_name.as_str() {
4502 "TailwindServer" | "TypeScriptServer" => {
4503 servers_with_hover_requests.insert(
4504 new_server_name.clone(),
4505 new_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| {
4506 let name = new_server_name.clone();
4507 async move {
4508 Ok(Some(lsp::Hover {
4509 contents: lsp::HoverContents::Scalar(lsp::MarkedString::String(
4510 format!("{name} hover"),
4511 )),
4512 range: None,
4513 }))
4514 }
4515 }),
4516 );
4517 }
4518 "ESLintServer" => {
4519 servers_with_hover_requests.insert(
4520 new_server_name,
4521 new_server.handle_request::<lsp::request::HoverRequest, _, _>(
4522 |_, _| async move { Ok(None) },
4523 ),
4524 );
4525 }
4526 "NoHoverCapabilitiesServer" => {
4527 let _never_handled = new_server.handle_request::<lsp::request::HoverRequest, _, _>(
4528 |_, _| async move {
4529 panic!(
4530 "Should not call for hovers server with no corresponding capabilities"
4531 )
4532 },
4533 );
4534 }
4535 unexpected => panic!("Unexpected server name: {unexpected}"),
4536 }
4537 }
4538
4539 let hover_task = project.update(cx, |project, cx| {
4540 project.hover(&buffer, Point::new(0, 0), cx)
4541 });
4542 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
4543 |mut hover_request| async move {
4544 hover_request
4545 .next()
4546 .await
4547 .expect("All hover requests should have been triggered")
4548 },
4549 ))
4550 .await;
4551 assert_eq!(
4552 vec!["TailwindServer hover", "TypeScriptServer hover"],
4553 hover_task
4554 .await
4555 .into_iter()
4556 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
4557 .sorted()
4558 .collect::<Vec<_>>(),
4559 "Should receive hover responses from all related servers with hover capabilities"
4560 );
4561}
4562
4563#[gpui::test]
4564async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
4565 init_test(cx);
4566
4567 let fs = FakeFs::new(cx.executor());
4568 fs.insert_tree(
4569 "/dir",
4570 json!({
4571 "a.ts": "a",
4572 }),
4573 )
4574 .await;
4575
4576 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4577
4578 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4579 language_registry.add(typescript_lang());
4580 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
4581 "TypeScript",
4582 FakeLspAdapter {
4583 capabilities: lsp::ServerCapabilities {
4584 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4585 ..lsp::ServerCapabilities::default()
4586 },
4587 ..FakeLspAdapter::default()
4588 },
4589 );
4590
4591 let buffer = project
4592 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
4593 .await
4594 .unwrap();
4595 cx.executor().run_until_parked();
4596
4597 let fake_server = fake_language_servers
4598 .next()
4599 .await
4600 .expect("failed to get the language server");
4601
4602 let mut request_handled =
4603 fake_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| async move {
4604 Ok(Some(lsp::Hover {
4605 contents: lsp::HoverContents::Array(vec![
4606 lsp::MarkedString::String("".to_string()),
4607 lsp::MarkedString::String(" ".to_string()),
4608 lsp::MarkedString::String("\n\n\n".to_string()),
4609 ]),
4610 range: None,
4611 }))
4612 });
4613
4614 let hover_task = project.update(cx, |project, cx| {
4615 project.hover(&buffer, Point::new(0, 0), cx)
4616 });
4617 let () = request_handled
4618 .next()
4619 .await
4620 .expect("All hover requests should have been triggered");
4621 assert_eq!(
4622 Vec::<String>::new(),
4623 hover_task
4624 .await
4625 .into_iter()
4626 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
4627 .sorted()
4628 .collect::<Vec<_>>(),
4629 "Empty hover parts should be ignored"
4630 );
4631}
4632
4633#[gpui::test]
4634async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
4635 init_test(cx);
4636
4637 let fs = FakeFs::new(cx.executor());
4638 fs.insert_tree(
4639 "/dir",
4640 json!({
4641 "a.tsx": "a",
4642 }),
4643 )
4644 .await;
4645
4646 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4647
4648 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4649 language_registry.add(tsx_lang());
4650 let language_server_names = [
4651 "TypeScriptServer",
4652 "TailwindServer",
4653 "ESLintServer",
4654 "NoActionsCapabilitiesServer",
4655 ];
4656 let mut fake_tsx_language_servers = language_registry.register_specific_fake_lsp_adapter(
4657 "tsx",
4658 true,
4659 FakeLspAdapter {
4660 name: &language_server_names[0],
4661 capabilities: lsp::ServerCapabilities {
4662 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4663 ..lsp::ServerCapabilities::default()
4664 },
4665 ..FakeLspAdapter::default()
4666 },
4667 );
4668 let _a = language_registry.register_specific_fake_lsp_adapter(
4669 "tsx",
4670 false,
4671 FakeLspAdapter {
4672 name: &language_server_names[1],
4673 capabilities: lsp::ServerCapabilities {
4674 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4675 ..lsp::ServerCapabilities::default()
4676 },
4677 ..FakeLspAdapter::default()
4678 },
4679 );
4680 let _b = language_registry.register_specific_fake_lsp_adapter(
4681 "tsx",
4682 false,
4683 FakeLspAdapter {
4684 name: &language_server_names[2],
4685 capabilities: lsp::ServerCapabilities {
4686 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4687 ..lsp::ServerCapabilities::default()
4688 },
4689 ..FakeLspAdapter::default()
4690 },
4691 );
4692 let _c = language_registry.register_specific_fake_lsp_adapter(
4693 "tsx",
4694 false,
4695 FakeLspAdapter {
4696 name: &language_server_names[3],
4697 capabilities: lsp::ServerCapabilities {
4698 code_action_provider: None,
4699 ..lsp::ServerCapabilities::default()
4700 },
4701 ..FakeLspAdapter::default()
4702 },
4703 );
4704
4705 let buffer = project
4706 .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx))
4707 .await
4708 .unwrap();
4709 cx.executor().run_until_parked();
4710
4711 let mut servers_with_actions_requests = HashMap::default();
4712 for i in 0..language_server_names.len() {
4713 let new_server = fake_tsx_language_servers.next().await.unwrap_or_else(|| {
4714 panic!(
4715 "Failed to get language server #{i} with name {}",
4716 &language_server_names[i]
4717 )
4718 });
4719 let new_server_name = new_server.server.name();
4720 assert!(
4721 !servers_with_actions_requests.contains_key(new_server_name),
4722 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
4723 );
4724 let new_server_name = new_server_name.to_string();
4725 match new_server_name.as_str() {
4726 "TailwindServer" | "TypeScriptServer" => {
4727 servers_with_actions_requests.insert(
4728 new_server_name.clone(),
4729 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
4730 move |_, _| {
4731 let name = new_server_name.clone();
4732 async move {
4733 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
4734 lsp::CodeAction {
4735 title: format!("{name} code action"),
4736 ..lsp::CodeAction::default()
4737 },
4738 )]))
4739 }
4740 },
4741 ),
4742 );
4743 }
4744 "ESLintServer" => {
4745 servers_with_actions_requests.insert(
4746 new_server_name,
4747 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
4748 |_, _| async move { Ok(None) },
4749 ),
4750 );
4751 }
4752 "NoActionsCapabilitiesServer" => {
4753 let _never_handled = new_server
4754 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
4755 panic!(
4756 "Should not call for code actions server with no corresponding capabilities"
4757 )
4758 });
4759 }
4760 unexpected => panic!("Unexpected server name: {unexpected}"),
4761 }
4762 }
4763
4764 let code_actions_task = project.update(cx, |project, cx| {
4765 project.code_actions(&buffer, 0..buffer.read(cx).len(), cx)
4766 });
4767 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
4768 |mut code_actions_request| async move {
4769 code_actions_request
4770 .next()
4771 .await
4772 .expect("All code actions requests should have been triggered")
4773 },
4774 ))
4775 .await;
4776 assert_eq!(
4777 vec!["TailwindServer code action", "TypeScriptServer code action"],
4778 code_actions_task
4779 .await
4780 .into_iter()
4781 .map(|code_action| code_action.lsp_action.title)
4782 .sorted()
4783 .collect::<Vec<_>>(),
4784 "Should receive code actions responses from all related servers with hover capabilities"
4785 );
4786}
4787
4788async fn search(
4789 project: &Model<Project>,
4790 query: SearchQuery,
4791 cx: &mut gpui::TestAppContext,
4792) -> Result<HashMap<String, Vec<Range<usize>>>> {
4793 let mut search_rx = project.update(cx, |project, cx| project.search(query, cx));
4794 let mut results = HashMap::default();
4795 while let Some(search_result) = search_rx.next().await {
4796 match search_result {
4797 SearchResult::Buffer { buffer, ranges } => {
4798 results.entry(buffer).or_insert(ranges);
4799 }
4800 SearchResult::LimitReached => {}
4801 }
4802 }
4803 Ok(results
4804 .into_iter()
4805 .map(|(buffer, ranges)| {
4806 buffer.update(cx, |buffer, cx| {
4807 let path = buffer
4808 .file()
4809 .unwrap()
4810 .full_path(cx)
4811 .to_string_lossy()
4812 .to_string();
4813 let ranges = ranges
4814 .into_iter()
4815 .map(|range| range.to_offset(buffer))
4816 .collect::<Vec<_>>();
4817 (path, ranges)
4818 })
4819 })
4820 .collect())
4821}
4822
4823fn init_test(cx: &mut gpui::TestAppContext) {
4824 if std::env::var("RUST_LOG").is_ok() {
4825 env_logger::try_init().ok();
4826 }
4827
4828 cx.update(|cx| {
4829 let settings_store = SettingsStore::test(cx);
4830 cx.set_global(settings_store);
4831 release_channel::init("0.0.0", cx);
4832 language::init(cx);
4833 Project::init_settings(cx);
4834 });
4835}
4836
4837fn json_lang() -> Arc<Language> {
4838 Arc::new(Language::new(
4839 LanguageConfig {
4840 name: "JSON".into(),
4841 matcher: LanguageMatcher {
4842 path_suffixes: vec!["json".to_string()],
4843 ..Default::default()
4844 },
4845 ..Default::default()
4846 },
4847 None,
4848 ))
4849}
4850
4851fn js_lang() -> Arc<Language> {
4852 Arc::new(Language::new(
4853 LanguageConfig {
4854 name: Arc::from("JavaScript"),
4855 matcher: LanguageMatcher {
4856 path_suffixes: vec!["js".to_string()],
4857 ..Default::default()
4858 },
4859 ..Default::default()
4860 },
4861 None,
4862 ))
4863}
4864
4865fn rust_lang() -> Arc<Language> {
4866 Arc::new(Language::new(
4867 LanguageConfig {
4868 name: "Rust".into(),
4869 matcher: LanguageMatcher {
4870 path_suffixes: vec!["rs".to_string()],
4871 ..Default::default()
4872 },
4873 ..Default::default()
4874 },
4875 Some(tree_sitter_rust::language()),
4876 ))
4877}
4878
4879fn typescript_lang() -> Arc<Language> {
4880 Arc::new(Language::new(
4881 LanguageConfig {
4882 name: "TypeScript".into(),
4883 matcher: LanguageMatcher {
4884 path_suffixes: vec!["ts".to_string()],
4885 ..Default::default()
4886 },
4887 ..Default::default()
4888 },
4889 Some(tree_sitter_typescript::language_typescript()),
4890 ))
4891}
4892
4893fn tsx_lang() -> Arc<Language> {
4894 Arc::new(Language::new(
4895 LanguageConfig {
4896 name: "tsx".into(),
4897 matcher: LanguageMatcher {
4898 path_suffixes: vec!["tsx".to_string()],
4899 ..Default::default()
4900 },
4901 ..Default::default()
4902 },
4903 Some(tree_sitter_typescript::language_tsx()),
4904 ))
4905}