1use crate::{Event, *};
2use fs::FakeFs;
3use futures::{future, StreamExt};
4use gpui::AppContext;
5use language::{
6 language_settings::{AllLanguageSettings, LanguageSettingsContent},
7 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
8 LineEnding, OffsetRangeExt, Point, ToPoint,
9};
10use lsp::Url;
11use parking_lot::Mutex;
12use pretty_assertions::assert_eq;
13use serde_json::json;
14use std::{os, task::Poll};
15use unindent::Unindent as _;
16use util::{assert_set_eq, paths::PathMatcher, test::temp_tree};
17
18#[gpui::test]
19async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
20 cx.executor().allow_parking();
21
22 let (tx, mut rx) = futures::channel::mpsc::unbounded();
23 let _thread = std::thread::spawn(move || {
24 std::fs::metadata("/Users").unwrap();
25 std::thread::sleep(Duration::from_millis(1000));
26 tx.unbounded_send(1).unwrap();
27 });
28 rx.next().await.unwrap();
29}
30
31#[gpui::test]
32async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
33 cx.executor().allow_parking();
34
35 let io_task = smol::unblock(move || {
36 println!("sleeping on thread {:?}", std::thread::current().id());
37 std::thread::sleep(Duration::from_millis(10));
38 1
39 });
40
41 let task = cx.foreground_executor().spawn(async move {
42 io_task.await;
43 });
44
45 task.await;
46}
47
48#[gpui::test]
49async fn test_symlinks(cx: &mut gpui::TestAppContext) {
50 init_test(cx);
51 cx.executor().allow_parking();
52
53 let dir = temp_tree(json!({
54 "root": {
55 "apple": "",
56 "banana": {
57 "carrot": {
58 "date": "",
59 "endive": "",
60 }
61 },
62 "fennel": {
63 "grape": "",
64 }
65 }
66 }));
67
68 let root_link_path = dir.path().join("root_link");
69 os::unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
70 os::unix::fs::symlink(
71 &dir.path().join("root/fennel"),
72 &dir.path().join("root/finnochio"),
73 )
74 .unwrap();
75
76 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
77
78 project.update(cx, |project, cx| {
79 let tree = project.worktrees().next().unwrap().read(cx);
80 assert_eq!(tree.file_count(), 5);
81 assert_eq!(
82 tree.inode_for_path("fennel/grape"),
83 tree.inode_for_path("finnochio/grape")
84 );
85 });
86}
87
88#[gpui::test]
89async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
90 init_test(cx);
91
92 let fs = FakeFs::new(cx.executor());
93 fs.insert_tree(
94 "/the-root",
95 json!({
96 ".zed": {
97 "settings.json": r#"{ "tab_size": 8 }"#
98 },
99 "a": {
100 "a.rs": "fn a() {\n A\n}"
101 },
102 "b": {
103 ".zed": {
104 "settings.json": r#"{ "tab_size": 2 }"#
105 },
106 "b.rs": "fn b() {\n B\n}"
107 }
108 }),
109 )
110 .await;
111
112 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
113 let worktree = project.update(cx, |project, _| project.worktrees().next().unwrap());
114
115 cx.executor().run_until_parked();
116 cx.update(|cx| {
117 let tree = worktree.read(cx);
118
119 let settings_a = language_settings(
120 None,
121 Some(
122 &(File::for_entry(
123 tree.entry_for_path("a/a.rs").unwrap().clone(),
124 worktree.clone(),
125 ) as _),
126 ),
127 cx,
128 );
129 let settings_b = language_settings(
130 None,
131 Some(
132 &(File::for_entry(
133 tree.entry_for_path("b/b.rs").unwrap().clone(),
134 worktree.clone(),
135 ) as _),
136 ),
137 cx,
138 );
139
140 assert_eq!(settings_a.tab_size.get(), 8);
141 assert_eq!(settings_b.tab_size.get(), 2);
142 });
143}
144
145#[gpui::test]
146async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
147 init_test(cx);
148
149 let mut rust_language = Language::new(
150 LanguageConfig {
151 name: "Rust".into(),
152 path_suffixes: vec!["rs".to_string()],
153 ..Default::default()
154 },
155 Some(tree_sitter_rust::language()),
156 );
157 let mut json_language = Language::new(
158 LanguageConfig {
159 name: "JSON".into(),
160 path_suffixes: vec!["json".to_string()],
161 ..Default::default()
162 },
163 None,
164 );
165 let mut fake_rust_servers = rust_language
166 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
167 name: "the-rust-language-server",
168 capabilities: lsp::ServerCapabilities {
169 completion_provider: Some(lsp::CompletionOptions {
170 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
171 ..Default::default()
172 }),
173 ..Default::default()
174 },
175 ..Default::default()
176 }))
177 .await;
178 let mut fake_json_servers = json_language
179 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
180 name: "the-json-language-server",
181 capabilities: lsp::ServerCapabilities {
182 completion_provider: Some(lsp::CompletionOptions {
183 trigger_characters: Some(vec![":".to_string()]),
184 ..Default::default()
185 }),
186 ..Default::default()
187 },
188 ..Default::default()
189 }))
190 .await;
191
192 let fs = FakeFs::new(cx.executor());
193 fs.insert_tree(
194 "/the-root",
195 json!({
196 "test.rs": "const A: i32 = 1;",
197 "test2.rs": "",
198 "Cargo.toml": "a = 1",
199 "package.json": "{\"a\": 1}",
200 }),
201 )
202 .await;
203
204 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
205
206 // Open a buffer without an associated language server.
207 let toml_buffer = project
208 .update(cx, |project, cx| {
209 project.open_local_buffer("/the-root/Cargo.toml", cx)
210 })
211 .await
212 .unwrap();
213
214 // Open a buffer with an associated language server before the language for it has been loaded.
215 let rust_buffer = project
216 .update(cx, |project, cx| {
217 project.open_local_buffer("/the-root/test.rs", cx)
218 })
219 .await
220 .unwrap();
221 rust_buffer.update(cx, |buffer, _| {
222 assert_eq!(buffer.language().map(|l| l.name()), None);
223 });
224
225 // Now we add the languages to the project, and ensure they get assigned to all
226 // the relevant open buffers.
227 project.update(cx, |project, _| {
228 project.languages.add(Arc::new(json_language));
229 project.languages.add(Arc::new(rust_language));
230 });
231 cx.executor().run_until_parked();
232 rust_buffer.update(cx, |buffer, _| {
233 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
234 });
235
236 // A server is started up, and it is notified about Rust files.
237 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
238 assert_eq!(
239 fake_rust_server
240 .receive_notification::<lsp::notification::DidOpenTextDocument>()
241 .await
242 .text_document,
243 lsp::TextDocumentItem {
244 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
245 version: 0,
246 text: "const A: i32 = 1;".to_string(),
247 language_id: Default::default()
248 }
249 );
250
251 // The buffer is configured based on the language server's capabilities.
252 rust_buffer.update(cx, |buffer, _| {
253 assert_eq!(
254 buffer.completion_triggers(),
255 &[".".to_string(), "::".to_string()]
256 );
257 });
258 toml_buffer.update(cx, |buffer, _| {
259 assert!(buffer.completion_triggers().is_empty());
260 });
261
262 // Edit a buffer. The changes are reported to the language server.
263 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
264 assert_eq!(
265 fake_rust_server
266 .receive_notification::<lsp::notification::DidChangeTextDocument>()
267 .await
268 .text_document,
269 lsp::VersionedTextDocumentIdentifier::new(
270 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
271 1
272 )
273 );
274
275 // Open a third buffer with a different associated language server.
276 let json_buffer = project
277 .update(cx, |project, cx| {
278 project.open_local_buffer("/the-root/package.json", cx)
279 })
280 .await
281 .unwrap();
282
283 // A json language server is started up and is only notified about the json buffer.
284 let mut fake_json_server = fake_json_servers.next().await.unwrap();
285 assert_eq!(
286 fake_json_server
287 .receive_notification::<lsp::notification::DidOpenTextDocument>()
288 .await
289 .text_document,
290 lsp::TextDocumentItem {
291 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
292 version: 0,
293 text: "{\"a\": 1}".to_string(),
294 language_id: Default::default()
295 }
296 );
297
298 // This buffer is configured based on the second language server's
299 // capabilities.
300 json_buffer.update(cx, |buffer, _| {
301 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
302 });
303
304 // When opening another buffer whose language server is already running,
305 // it is also configured based on the existing language server's capabilities.
306 let rust_buffer2 = project
307 .update(cx, |project, cx| {
308 project.open_local_buffer("/the-root/test2.rs", cx)
309 })
310 .await
311 .unwrap();
312 rust_buffer2.update(cx, |buffer, _| {
313 assert_eq!(
314 buffer.completion_triggers(),
315 &[".".to_string(), "::".to_string()]
316 );
317 });
318
319 // Changes are reported only to servers matching the buffer's language.
320 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
321 rust_buffer2.update(cx, |buffer, cx| {
322 buffer.edit([(0..0, "let x = 1;")], None, cx)
323 });
324 assert_eq!(
325 fake_rust_server
326 .receive_notification::<lsp::notification::DidChangeTextDocument>()
327 .await
328 .text_document,
329 lsp::VersionedTextDocumentIdentifier::new(
330 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
331 1
332 )
333 );
334
335 // Save notifications are reported to all servers.
336 project
337 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
338 .await
339 .unwrap();
340 assert_eq!(
341 fake_rust_server
342 .receive_notification::<lsp::notification::DidSaveTextDocument>()
343 .await
344 .text_document,
345 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
346 );
347 assert_eq!(
348 fake_json_server
349 .receive_notification::<lsp::notification::DidSaveTextDocument>()
350 .await
351 .text_document,
352 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
353 );
354
355 // Renames are reported only to servers matching the buffer's language.
356 fs.rename(
357 Path::new("/the-root/test2.rs"),
358 Path::new("/the-root/test3.rs"),
359 Default::default(),
360 )
361 .await
362 .unwrap();
363 assert_eq!(
364 fake_rust_server
365 .receive_notification::<lsp::notification::DidCloseTextDocument>()
366 .await
367 .text_document,
368 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
369 );
370 assert_eq!(
371 fake_rust_server
372 .receive_notification::<lsp::notification::DidOpenTextDocument>()
373 .await
374 .text_document,
375 lsp::TextDocumentItem {
376 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
377 version: 0,
378 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
379 language_id: Default::default()
380 },
381 );
382
383 rust_buffer2.update(cx, |buffer, cx| {
384 buffer.update_diagnostics(
385 LanguageServerId(0),
386 DiagnosticSet::from_sorted_entries(
387 vec![DiagnosticEntry {
388 diagnostic: Default::default(),
389 range: Anchor::MIN..Anchor::MAX,
390 }],
391 &buffer.snapshot(),
392 ),
393 cx,
394 );
395 assert_eq!(
396 buffer
397 .snapshot()
398 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
399 .count(),
400 1
401 );
402 });
403
404 // When the rename changes the extension of the file, the buffer gets closed on the old
405 // language server and gets opened on the new one.
406 fs.rename(
407 Path::new("/the-root/test3.rs"),
408 Path::new("/the-root/test3.json"),
409 Default::default(),
410 )
411 .await
412 .unwrap();
413 assert_eq!(
414 fake_rust_server
415 .receive_notification::<lsp::notification::DidCloseTextDocument>()
416 .await
417 .text_document,
418 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
419 );
420 assert_eq!(
421 fake_json_server
422 .receive_notification::<lsp::notification::DidOpenTextDocument>()
423 .await
424 .text_document,
425 lsp::TextDocumentItem {
426 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
427 version: 0,
428 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
429 language_id: Default::default()
430 },
431 );
432
433 // We clear the diagnostics, since the language has changed.
434 rust_buffer2.update(cx, |buffer, _| {
435 assert_eq!(
436 buffer
437 .snapshot()
438 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
439 .count(),
440 0
441 );
442 });
443
444 // The renamed file's version resets after changing language server.
445 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
446 assert_eq!(
447 fake_json_server
448 .receive_notification::<lsp::notification::DidChangeTextDocument>()
449 .await
450 .text_document,
451 lsp::VersionedTextDocumentIdentifier::new(
452 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
453 1
454 )
455 );
456
457 // Restart language servers
458 project.update(cx, |project, cx| {
459 project.restart_language_servers_for_buffers(
460 vec![rust_buffer.clone(), json_buffer.clone()],
461 cx,
462 );
463 });
464
465 let mut rust_shutdown_requests = fake_rust_server
466 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
467 let mut json_shutdown_requests = fake_json_server
468 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
469 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
470
471 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
472 let mut fake_json_server = fake_json_servers.next().await.unwrap();
473
474 // Ensure rust document is reopened in new rust language server
475 assert_eq!(
476 fake_rust_server
477 .receive_notification::<lsp::notification::DidOpenTextDocument>()
478 .await
479 .text_document,
480 lsp::TextDocumentItem {
481 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
482 version: 0,
483 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
484 language_id: Default::default()
485 }
486 );
487
488 // Ensure json documents are reopened in new json language server
489 assert_set_eq!(
490 [
491 fake_json_server
492 .receive_notification::<lsp::notification::DidOpenTextDocument>()
493 .await
494 .text_document,
495 fake_json_server
496 .receive_notification::<lsp::notification::DidOpenTextDocument>()
497 .await
498 .text_document,
499 ],
500 [
501 lsp::TextDocumentItem {
502 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
503 version: 0,
504 text: json_buffer.update(cx, |buffer, _| buffer.text()),
505 language_id: Default::default()
506 },
507 lsp::TextDocumentItem {
508 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
509 version: 0,
510 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
511 language_id: Default::default()
512 }
513 ]
514 );
515
516 // Close notifications are reported only to servers matching the buffer's language.
517 cx.update(|_| drop(json_buffer));
518 let close_message = lsp::DidCloseTextDocumentParams {
519 text_document: lsp::TextDocumentIdentifier::new(
520 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
521 ),
522 };
523 assert_eq!(
524 fake_json_server
525 .receive_notification::<lsp::notification::DidCloseTextDocument>()
526 .await,
527 close_message,
528 );
529}
530
531#[gpui::test]
532async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
533 init_test(cx);
534
535 let mut language = Language::new(
536 LanguageConfig {
537 name: "Rust".into(),
538 path_suffixes: vec!["rs".to_string()],
539 ..Default::default()
540 },
541 Some(tree_sitter_rust::language()),
542 );
543 let mut fake_servers = language
544 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
545 name: "the-language-server",
546 ..Default::default()
547 }))
548 .await;
549
550 let fs = FakeFs::new(cx.executor());
551 fs.insert_tree(
552 "/the-root",
553 json!({
554 ".gitignore": "target\n",
555 "src": {
556 "a.rs": "",
557 "b.rs": "",
558 },
559 "target": {
560 "x": {
561 "out": {
562 "x.rs": ""
563 }
564 },
565 "y": {
566 "out": {
567 "y.rs": "",
568 }
569 },
570 "z": {
571 "out": {
572 "z.rs": ""
573 }
574 }
575 }
576 }),
577 )
578 .await;
579
580 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
581 project.update(cx, |project, _| {
582 project.languages.add(Arc::new(language));
583 });
584 cx.executor().run_until_parked();
585
586 // Start the language server by opening a buffer with a compatible file extension.
587 let _buffer = project
588 .update(cx, |project, cx| {
589 project.open_local_buffer("/the-root/src/a.rs", cx)
590 })
591 .await
592 .unwrap();
593
594 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
595 project.update(cx, |project, cx| {
596 let worktree = project.worktrees().next().unwrap();
597 assert_eq!(
598 worktree
599 .read(cx)
600 .snapshot()
601 .entries(true)
602 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
603 .collect::<Vec<_>>(),
604 &[
605 (Path::new(""), false),
606 (Path::new(".gitignore"), false),
607 (Path::new("src"), false),
608 (Path::new("src/a.rs"), false),
609 (Path::new("src/b.rs"), false),
610 (Path::new("target"), true),
611 ]
612 );
613 });
614
615 let prev_read_dir_count = fs.read_dir_call_count();
616
617 // Keep track of the FS events reported to the language server.
618 let fake_server = fake_servers.next().await.unwrap();
619 let file_changes = Arc::new(Mutex::new(Vec::new()));
620 fake_server
621 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
622 registrations: vec![lsp::Registration {
623 id: Default::default(),
624 method: "workspace/didChangeWatchedFiles".to_string(),
625 register_options: serde_json::to_value(
626 lsp::DidChangeWatchedFilesRegistrationOptions {
627 watchers: vec![
628 lsp::FileSystemWatcher {
629 glob_pattern: lsp::GlobPattern::String(
630 "/the-root/Cargo.toml".to_string(),
631 ),
632 kind: None,
633 },
634 lsp::FileSystemWatcher {
635 glob_pattern: lsp::GlobPattern::String(
636 "/the-root/src/*.{rs,c}".to_string(),
637 ),
638 kind: None,
639 },
640 lsp::FileSystemWatcher {
641 glob_pattern: lsp::GlobPattern::String(
642 "/the-root/target/y/**/*.rs".to_string(),
643 ),
644 kind: None,
645 },
646 ],
647 },
648 )
649 .ok(),
650 }],
651 })
652 .await
653 .unwrap();
654 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
655 let file_changes = file_changes.clone();
656 move |params, _| {
657 let mut file_changes = file_changes.lock();
658 file_changes.extend(params.changes);
659 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
660 }
661 });
662
663 cx.executor().run_until_parked();
664 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
665 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
666
667 // Now the language server has asked us to watch an ignored directory path,
668 // so we recursively load it.
669 project.update(cx, |project, cx| {
670 let worktree = project.worktrees().next().unwrap();
671 assert_eq!(
672 worktree
673 .read(cx)
674 .snapshot()
675 .entries(true)
676 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
677 .collect::<Vec<_>>(),
678 &[
679 (Path::new(""), false),
680 (Path::new(".gitignore"), false),
681 (Path::new("src"), false),
682 (Path::new("src/a.rs"), false),
683 (Path::new("src/b.rs"), false),
684 (Path::new("target"), true),
685 (Path::new("target/x"), true),
686 (Path::new("target/y"), true),
687 (Path::new("target/y/out"), true),
688 (Path::new("target/y/out/y.rs"), true),
689 (Path::new("target/z"), true),
690 ]
691 );
692 });
693
694 // Perform some file system mutations, two of which match the watched patterns,
695 // and one of which does not.
696 fs.create_file("/the-root/src/c.rs".as_ref(), Default::default())
697 .await
698 .unwrap();
699 fs.create_file("/the-root/src/d.txt".as_ref(), Default::default())
700 .await
701 .unwrap();
702 fs.remove_file("/the-root/src/b.rs".as_ref(), Default::default())
703 .await
704 .unwrap();
705 fs.create_file("/the-root/target/x/out/x2.rs".as_ref(), Default::default())
706 .await
707 .unwrap();
708 fs.create_file("/the-root/target/y/out/y2.rs".as_ref(), Default::default())
709 .await
710 .unwrap();
711
712 // The language server receives events for the FS mutations that match its watch patterns.
713 cx.executor().run_until_parked();
714 assert_eq!(
715 &*file_changes.lock(),
716 &[
717 lsp::FileEvent {
718 uri: lsp::Url::from_file_path("/the-root/src/b.rs").unwrap(),
719 typ: lsp::FileChangeType::DELETED,
720 },
721 lsp::FileEvent {
722 uri: lsp::Url::from_file_path("/the-root/src/c.rs").unwrap(),
723 typ: lsp::FileChangeType::CREATED,
724 },
725 lsp::FileEvent {
726 uri: lsp::Url::from_file_path("/the-root/target/y/out/y2.rs").unwrap(),
727 typ: lsp::FileChangeType::CREATED,
728 },
729 ]
730 );
731}
732
733#[gpui::test]
734async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
735 init_test(cx);
736
737 let fs = FakeFs::new(cx.executor());
738 fs.insert_tree(
739 "/dir",
740 json!({
741 "a.rs": "let a = 1;",
742 "b.rs": "let b = 2;"
743 }),
744 )
745 .await;
746
747 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
748
749 let buffer_a = project
750 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
751 .await
752 .unwrap();
753 let buffer_b = project
754 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
755 .await
756 .unwrap();
757
758 project.update(cx, |project, cx| {
759 project
760 .update_diagnostics(
761 LanguageServerId(0),
762 lsp::PublishDiagnosticsParams {
763 uri: Url::from_file_path("/dir/a.rs").unwrap(),
764 version: None,
765 diagnostics: vec![lsp::Diagnostic {
766 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
767 severity: Some(lsp::DiagnosticSeverity::ERROR),
768 message: "error 1".to_string(),
769 ..Default::default()
770 }],
771 },
772 &[],
773 cx,
774 )
775 .unwrap();
776 project
777 .update_diagnostics(
778 LanguageServerId(0),
779 lsp::PublishDiagnosticsParams {
780 uri: Url::from_file_path("/dir/b.rs").unwrap(),
781 version: None,
782 diagnostics: vec![lsp::Diagnostic {
783 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
784 severity: Some(lsp::DiagnosticSeverity::WARNING),
785 message: "error 2".to_string(),
786 ..Default::default()
787 }],
788 },
789 &[],
790 cx,
791 )
792 .unwrap();
793 });
794
795 buffer_a.update(cx, |buffer, _| {
796 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
797 assert_eq!(
798 chunks
799 .iter()
800 .map(|(s, d)| (s.as_str(), *d))
801 .collect::<Vec<_>>(),
802 &[
803 ("let ", None),
804 ("a", Some(DiagnosticSeverity::ERROR)),
805 (" = 1;", None),
806 ]
807 );
808 });
809 buffer_b.update(cx, |buffer, _| {
810 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
811 assert_eq!(
812 chunks
813 .iter()
814 .map(|(s, d)| (s.as_str(), *d))
815 .collect::<Vec<_>>(),
816 &[
817 ("let ", None),
818 ("b", Some(DiagnosticSeverity::WARNING)),
819 (" = 2;", None),
820 ]
821 );
822 });
823}
824
825#[gpui::test]
826async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
827 init_test(cx);
828
829 let fs = FakeFs::new(cx.executor());
830 fs.insert_tree(
831 "/root",
832 json!({
833 "dir": {
834 ".git": {
835 "HEAD": "ref: refs/heads/main",
836 },
837 ".gitignore": "b.rs",
838 "a.rs": "let a = 1;",
839 "b.rs": "let b = 2;",
840 },
841 "other.rs": "let b = c;"
842 }),
843 )
844 .await;
845
846 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
847 let (worktree, _) = project
848 .update(cx, |project, cx| {
849 project.find_or_create_local_worktree("/root/dir", true, cx)
850 })
851 .await
852 .unwrap();
853 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
854
855 let (worktree, _) = project
856 .update(cx, |project, cx| {
857 project.find_or_create_local_worktree("/root/other.rs", false, cx)
858 })
859 .await
860 .unwrap();
861 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
862
863 let server_id = LanguageServerId(0);
864 project.update(cx, |project, cx| {
865 project
866 .update_diagnostics(
867 server_id,
868 lsp::PublishDiagnosticsParams {
869 uri: Url::from_file_path("/root/dir/b.rs").unwrap(),
870 version: None,
871 diagnostics: vec![lsp::Diagnostic {
872 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
873 severity: Some(lsp::DiagnosticSeverity::ERROR),
874 message: "unused variable 'b'".to_string(),
875 ..Default::default()
876 }],
877 },
878 &[],
879 cx,
880 )
881 .unwrap();
882 project
883 .update_diagnostics(
884 server_id,
885 lsp::PublishDiagnosticsParams {
886 uri: Url::from_file_path("/root/other.rs").unwrap(),
887 version: None,
888 diagnostics: vec![lsp::Diagnostic {
889 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
890 severity: Some(lsp::DiagnosticSeverity::ERROR),
891 message: "unknown variable 'c'".to_string(),
892 ..Default::default()
893 }],
894 },
895 &[],
896 cx,
897 )
898 .unwrap();
899 });
900
901 let main_ignored_buffer = project
902 .update(cx, |project, cx| {
903 project.open_buffer((main_worktree_id, "b.rs"), cx)
904 })
905 .await
906 .unwrap();
907 main_ignored_buffer.update(cx, |buffer, _| {
908 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
909 assert_eq!(
910 chunks
911 .iter()
912 .map(|(s, d)| (s.as_str(), *d))
913 .collect::<Vec<_>>(),
914 &[
915 ("let ", None),
916 ("b", Some(DiagnosticSeverity::ERROR)),
917 (" = 2;", None),
918 ],
919 "Gigitnored buffers should still get in-buffer diagnostics",
920 );
921 });
922 let other_buffer = project
923 .update(cx, |project, cx| {
924 project.open_buffer((other_worktree_id, ""), cx)
925 })
926 .await
927 .unwrap();
928 other_buffer.update(cx, |buffer, _| {
929 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
930 assert_eq!(
931 chunks
932 .iter()
933 .map(|(s, d)| (s.as_str(), *d))
934 .collect::<Vec<_>>(),
935 &[
936 ("let b = ", None),
937 ("c", Some(DiagnosticSeverity::ERROR)),
938 (";", None),
939 ],
940 "Buffers from hidden projects should still get in-buffer diagnostics"
941 );
942 });
943
944 project.update(cx, |project, cx| {
945 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
946 assert_eq!(
947 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
948 vec![(
949 ProjectPath {
950 worktree_id: main_worktree_id,
951 path: Arc::from(Path::new("b.rs")),
952 },
953 server_id,
954 DiagnosticSummary {
955 error_count: 1,
956 warning_count: 0,
957 }
958 )]
959 );
960 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
961 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
962 });
963}
964
965#[gpui::test]
966async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
967 init_test(cx);
968
969 let progress_token = "the-progress-token";
970 let mut language = Language::new(
971 LanguageConfig {
972 name: "Rust".into(),
973 path_suffixes: vec!["rs".to_string()],
974 ..Default::default()
975 },
976 Some(tree_sitter_rust::language()),
977 );
978 let mut fake_servers = language
979 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
980 disk_based_diagnostics_progress_token: Some(progress_token.into()),
981 disk_based_diagnostics_sources: vec!["disk".into()],
982 ..Default::default()
983 }))
984 .await;
985
986 let fs = FakeFs::new(cx.executor());
987 fs.insert_tree(
988 "/dir",
989 json!({
990 "a.rs": "fn a() { A }",
991 "b.rs": "const y: i32 = 1",
992 }),
993 )
994 .await;
995
996 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
997 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
998 let worktree_id = project.update(cx, |p, cx| p.worktrees().next().unwrap().read(cx).id());
999
1000 // Cause worktree to start the fake language server
1001 let _buffer = project
1002 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1003 .await
1004 .unwrap();
1005
1006 let mut events = cx.events(&project);
1007
1008 let fake_server = fake_servers.next().await.unwrap();
1009 assert_eq!(
1010 events.next().await.unwrap(),
1011 Event::LanguageServerAdded(LanguageServerId(0)),
1012 );
1013
1014 fake_server
1015 .start_progress(format!("{}/0", progress_token))
1016 .await;
1017 assert_eq!(
1018 events.next().await.unwrap(),
1019 Event::DiskBasedDiagnosticsStarted {
1020 language_server_id: LanguageServerId(0),
1021 }
1022 );
1023
1024 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1025 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1026 version: None,
1027 diagnostics: vec![lsp::Diagnostic {
1028 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1029 severity: Some(lsp::DiagnosticSeverity::ERROR),
1030 message: "undefined variable 'A'".to_string(),
1031 ..Default::default()
1032 }],
1033 });
1034 assert_eq!(
1035 events.next().await.unwrap(),
1036 Event::DiagnosticsUpdated {
1037 language_server_id: LanguageServerId(0),
1038 path: (worktree_id, Path::new("a.rs")).into()
1039 }
1040 );
1041
1042 fake_server.end_progress(format!("{}/0", progress_token));
1043 assert_eq!(
1044 events.next().await.unwrap(),
1045 Event::DiskBasedDiagnosticsFinished {
1046 language_server_id: LanguageServerId(0)
1047 }
1048 );
1049
1050 let buffer = project
1051 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
1052 .await
1053 .unwrap();
1054
1055 buffer.update(cx, |buffer, _| {
1056 let snapshot = buffer.snapshot();
1057 let diagnostics = snapshot
1058 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1059 .collect::<Vec<_>>();
1060 assert_eq!(
1061 diagnostics,
1062 &[DiagnosticEntry {
1063 range: Point::new(0, 9)..Point::new(0, 10),
1064 diagnostic: Diagnostic {
1065 severity: lsp::DiagnosticSeverity::ERROR,
1066 message: "undefined variable 'A'".to_string(),
1067 group_id: 0,
1068 is_primary: true,
1069 ..Default::default()
1070 }
1071 }]
1072 )
1073 });
1074
1075 // Ensure publishing empty diagnostics twice only results in one update event.
1076 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1077 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1078 version: None,
1079 diagnostics: Default::default(),
1080 });
1081 assert_eq!(
1082 events.next().await.unwrap(),
1083 Event::DiagnosticsUpdated {
1084 language_server_id: LanguageServerId(0),
1085 path: (worktree_id, Path::new("a.rs")).into()
1086 }
1087 );
1088
1089 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1090 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1091 version: None,
1092 diagnostics: Default::default(),
1093 });
1094 cx.executor().run_until_parked();
1095 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1096}
1097
1098#[gpui::test]
1099async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1100 init_test(cx);
1101
1102 let progress_token = "the-progress-token";
1103 let mut language = Language::new(
1104 LanguageConfig {
1105 path_suffixes: vec!["rs".to_string()],
1106 ..Default::default()
1107 },
1108 None,
1109 );
1110 let mut fake_servers = language
1111 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1112 disk_based_diagnostics_sources: vec!["disk".into()],
1113 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1114 ..Default::default()
1115 }))
1116 .await;
1117
1118 let fs = FakeFs::new(cx.executor());
1119 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1120
1121 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1122 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1123
1124 let buffer = project
1125 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1126 .await
1127 .unwrap();
1128
1129 // Simulate diagnostics starting to update.
1130 let fake_server = fake_servers.next().await.unwrap();
1131 fake_server.start_progress(progress_token).await;
1132
1133 // Restart the server before the diagnostics finish updating.
1134 project.update(cx, |project, cx| {
1135 project.restart_language_servers_for_buffers([buffer], cx);
1136 });
1137 let mut events = cx.events(&project);
1138
1139 // Simulate the newly started server sending more diagnostics.
1140 let fake_server = fake_servers.next().await.unwrap();
1141 assert_eq!(
1142 events.next().await.unwrap(),
1143 Event::LanguageServerAdded(LanguageServerId(1))
1144 );
1145 fake_server.start_progress(progress_token).await;
1146 assert_eq!(
1147 events.next().await.unwrap(),
1148 Event::DiskBasedDiagnosticsStarted {
1149 language_server_id: LanguageServerId(1)
1150 }
1151 );
1152 project.update(cx, |project, _| {
1153 assert_eq!(
1154 project
1155 .language_servers_running_disk_based_diagnostics()
1156 .collect::<Vec<_>>(),
1157 [LanguageServerId(1)]
1158 );
1159 });
1160
1161 // All diagnostics are considered done, despite the old server's diagnostic
1162 // task never completing.
1163 fake_server.end_progress(progress_token);
1164 assert_eq!(
1165 events.next().await.unwrap(),
1166 Event::DiskBasedDiagnosticsFinished {
1167 language_server_id: LanguageServerId(1)
1168 }
1169 );
1170 project.update(cx, |project, _| {
1171 assert_eq!(
1172 project
1173 .language_servers_running_disk_based_diagnostics()
1174 .collect::<Vec<_>>(),
1175 [LanguageServerId(0); 0]
1176 );
1177 });
1178}
1179
1180#[gpui::test]
1181async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1182 init_test(cx);
1183
1184 let mut language = Language::new(
1185 LanguageConfig {
1186 path_suffixes: vec!["rs".to_string()],
1187 ..Default::default()
1188 },
1189 None,
1190 );
1191 let mut fake_servers = language
1192 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1193 ..Default::default()
1194 }))
1195 .await;
1196
1197 let fs = FakeFs::new(cx.executor());
1198 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1199
1200 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1201 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1202
1203 let buffer = project
1204 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1205 .await
1206 .unwrap();
1207
1208 // Publish diagnostics
1209 let fake_server = fake_servers.next().await.unwrap();
1210 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1211 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1212 version: None,
1213 diagnostics: vec![lsp::Diagnostic {
1214 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1215 severity: Some(lsp::DiagnosticSeverity::ERROR),
1216 message: "the message".to_string(),
1217 ..Default::default()
1218 }],
1219 });
1220
1221 cx.executor().run_until_parked();
1222 buffer.update(cx, |buffer, _| {
1223 assert_eq!(
1224 buffer
1225 .snapshot()
1226 .diagnostics_in_range::<_, usize>(0..1, false)
1227 .map(|entry| entry.diagnostic.message.clone())
1228 .collect::<Vec<_>>(),
1229 ["the message".to_string()]
1230 );
1231 });
1232 project.update(cx, |project, cx| {
1233 assert_eq!(
1234 project.diagnostic_summary(false, cx),
1235 DiagnosticSummary {
1236 error_count: 1,
1237 warning_count: 0,
1238 }
1239 );
1240 });
1241
1242 project.update(cx, |project, cx| {
1243 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1244 });
1245
1246 // The diagnostics are cleared.
1247 cx.executor().run_until_parked();
1248 buffer.update(cx, |buffer, _| {
1249 assert_eq!(
1250 buffer
1251 .snapshot()
1252 .diagnostics_in_range::<_, usize>(0..1, false)
1253 .map(|entry| entry.diagnostic.message.clone())
1254 .collect::<Vec<_>>(),
1255 Vec::<String>::new(),
1256 );
1257 });
1258 project.update(cx, |project, cx| {
1259 assert_eq!(
1260 project.diagnostic_summary(false, cx),
1261 DiagnosticSummary {
1262 error_count: 0,
1263 warning_count: 0,
1264 }
1265 );
1266 });
1267}
1268
1269#[gpui::test]
1270async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1271 init_test(cx);
1272
1273 let mut language = Language::new(
1274 LanguageConfig {
1275 path_suffixes: vec!["rs".to_string()],
1276 ..Default::default()
1277 },
1278 None,
1279 );
1280 let mut fake_servers = language
1281 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1282 name: "the-lsp",
1283 ..Default::default()
1284 }))
1285 .await;
1286
1287 let fs = FakeFs::new(cx.executor());
1288 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1289
1290 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1291 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1292
1293 let buffer = project
1294 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1295 .await
1296 .unwrap();
1297
1298 // Before restarting the server, report diagnostics with an unknown buffer version.
1299 let fake_server = fake_servers.next().await.unwrap();
1300 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1301 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1302 version: Some(10000),
1303 diagnostics: Vec::new(),
1304 });
1305 cx.executor().run_until_parked();
1306
1307 project.update(cx, |project, cx| {
1308 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1309 });
1310 let mut fake_server = fake_servers.next().await.unwrap();
1311 let notification = fake_server
1312 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1313 .await
1314 .text_document;
1315 assert_eq!(notification.version, 0);
1316}
1317
1318#[gpui::test]
1319async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1320 init_test(cx);
1321
1322 let mut rust = Language::new(
1323 LanguageConfig {
1324 name: Arc::from("Rust"),
1325 path_suffixes: vec!["rs".to_string()],
1326 ..Default::default()
1327 },
1328 None,
1329 );
1330 let mut fake_rust_servers = rust
1331 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1332 name: "rust-lsp",
1333 ..Default::default()
1334 }))
1335 .await;
1336 let mut js = Language::new(
1337 LanguageConfig {
1338 name: Arc::from("JavaScript"),
1339 path_suffixes: vec!["js".to_string()],
1340 ..Default::default()
1341 },
1342 None,
1343 );
1344 let mut fake_js_servers = js
1345 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1346 name: "js-lsp",
1347 ..Default::default()
1348 }))
1349 .await;
1350
1351 let fs = FakeFs::new(cx.executor());
1352 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1353 .await;
1354
1355 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1356 project.update(cx, |project, _| {
1357 project.languages.add(Arc::new(rust));
1358 project.languages.add(Arc::new(js));
1359 });
1360
1361 let _rs_buffer = project
1362 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1363 .await
1364 .unwrap();
1365 let _js_buffer = project
1366 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
1367 .await
1368 .unwrap();
1369
1370 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1371 assert_eq!(
1372 fake_rust_server_1
1373 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1374 .await
1375 .text_document
1376 .uri
1377 .as_str(),
1378 "file:///dir/a.rs"
1379 );
1380
1381 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1382 assert_eq!(
1383 fake_js_server
1384 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1385 .await
1386 .text_document
1387 .uri
1388 .as_str(),
1389 "file:///dir/b.js"
1390 );
1391
1392 // Disable Rust language server, ensuring only that server gets stopped.
1393 cx.update(|cx| {
1394 cx.update_global(|settings: &mut SettingsStore, cx| {
1395 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1396 settings.languages.insert(
1397 Arc::from("Rust"),
1398 LanguageSettingsContent {
1399 enable_language_server: Some(false),
1400 ..Default::default()
1401 },
1402 );
1403 });
1404 })
1405 });
1406 fake_rust_server_1
1407 .receive_notification::<lsp::notification::Exit>()
1408 .await;
1409
1410 // Enable Rust and disable JavaScript language servers, ensuring that the
1411 // former gets started again and that the latter stops.
1412 cx.update(|cx| {
1413 cx.update_global(|settings: &mut SettingsStore, cx| {
1414 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1415 settings.languages.insert(
1416 Arc::from("Rust"),
1417 LanguageSettingsContent {
1418 enable_language_server: Some(true),
1419 ..Default::default()
1420 },
1421 );
1422 settings.languages.insert(
1423 Arc::from("JavaScript"),
1424 LanguageSettingsContent {
1425 enable_language_server: Some(false),
1426 ..Default::default()
1427 },
1428 );
1429 });
1430 })
1431 });
1432 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1433 assert_eq!(
1434 fake_rust_server_2
1435 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1436 .await
1437 .text_document
1438 .uri
1439 .as_str(),
1440 "file:///dir/a.rs"
1441 );
1442 fake_js_server
1443 .receive_notification::<lsp::notification::Exit>()
1444 .await;
1445}
1446
1447#[gpui::test(iterations = 3)]
1448async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1449 init_test(cx);
1450
1451 let mut language = Language::new(
1452 LanguageConfig {
1453 name: "Rust".into(),
1454 path_suffixes: vec!["rs".to_string()],
1455 ..Default::default()
1456 },
1457 Some(tree_sitter_rust::language()),
1458 );
1459 let mut fake_servers = language
1460 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1461 disk_based_diagnostics_sources: vec!["disk".into()],
1462 ..Default::default()
1463 }))
1464 .await;
1465
1466 let text = "
1467 fn a() { A }
1468 fn b() { BB }
1469 fn c() { CCC }
1470 "
1471 .unindent();
1472
1473 let fs = FakeFs::new(cx.executor());
1474 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1475
1476 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1477 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1478
1479 let buffer = project
1480 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1481 .await
1482 .unwrap();
1483
1484 let mut fake_server = fake_servers.next().await.unwrap();
1485 let open_notification = fake_server
1486 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1487 .await;
1488
1489 // Edit the buffer, moving the content down
1490 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1491 let change_notification_1 = fake_server
1492 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1493 .await;
1494 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1495
1496 // Report some diagnostics for the initial version of the buffer
1497 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1498 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1499 version: Some(open_notification.text_document.version),
1500 diagnostics: vec![
1501 lsp::Diagnostic {
1502 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1503 severity: Some(DiagnosticSeverity::ERROR),
1504 message: "undefined variable 'A'".to_string(),
1505 source: Some("disk".to_string()),
1506 ..Default::default()
1507 },
1508 lsp::Diagnostic {
1509 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1510 severity: Some(DiagnosticSeverity::ERROR),
1511 message: "undefined variable 'BB'".to_string(),
1512 source: Some("disk".to_string()),
1513 ..Default::default()
1514 },
1515 lsp::Diagnostic {
1516 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1517 severity: Some(DiagnosticSeverity::ERROR),
1518 source: Some("disk".to_string()),
1519 message: "undefined variable 'CCC'".to_string(),
1520 ..Default::default()
1521 },
1522 ],
1523 });
1524
1525 // The diagnostics have moved down since they were created.
1526 cx.executor().run_until_parked();
1527 buffer.update(cx, |buffer, _| {
1528 assert_eq!(
1529 buffer
1530 .snapshot()
1531 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1532 .collect::<Vec<_>>(),
1533 &[
1534 DiagnosticEntry {
1535 range: Point::new(3, 9)..Point::new(3, 11),
1536 diagnostic: Diagnostic {
1537 source: Some("disk".into()),
1538 severity: DiagnosticSeverity::ERROR,
1539 message: "undefined variable 'BB'".to_string(),
1540 is_disk_based: true,
1541 group_id: 1,
1542 is_primary: true,
1543 ..Default::default()
1544 },
1545 },
1546 DiagnosticEntry {
1547 range: Point::new(4, 9)..Point::new(4, 12),
1548 diagnostic: Diagnostic {
1549 source: Some("disk".into()),
1550 severity: DiagnosticSeverity::ERROR,
1551 message: "undefined variable 'CCC'".to_string(),
1552 is_disk_based: true,
1553 group_id: 2,
1554 is_primary: true,
1555 ..Default::default()
1556 }
1557 }
1558 ]
1559 );
1560 assert_eq!(
1561 chunks_with_diagnostics(buffer, 0..buffer.len()),
1562 [
1563 ("\n\nfn a() { ".to_string(), None),
1564 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1565 (" }\nfn b() { ".to_string(), None),
1566 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1567 (" }\nfn c() { ".to_string(), None),
1568 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1569 (" }\n".to_string(), None),
1570 ]
1571 );
1572 assert_eq!(
1573 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1574 [
1575 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1576 (" }\nfn c() { ".to_string(), None),
1577 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1578 ]
1579 );
1580 });
1581
1582 // Ensure overlapping diagnostics are highlighted correctly.
1583 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1584 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1585 version: Some(open_notification.text_document.version),
1586 diagnostics: vec![
1587 lsp::Diagnostic {
1588 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1589 severity: Some(DiagnosticSeverity::ERROR),
1590 message: "undefined variable 'A'".to_string(),
1591 source: Some("disk".to_string()),
1592 ..Default::default()
1593 },
1594 lsp::Diagnostic {
1595 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1596 severity: Some(DiagnosticSeverity::WARNING),
1597 message: "unreachable statement".to_string(),
1598 source: Some("disk".to_string()),
1599 ..Default::default()
1600 },
1601 ],
1602 });
1603
1604 cx.executor().run_until_parked();
1605 buffer.update(cx, |buffer, _| {
1606 assert_eq!(
1607 buffer
1608 .snapshot()
1609 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1610 .collect::<Vec<_>>(),
1611 &[
1612 DiagnosticEntry {
1613 range: Point::new(2, 9)..Point::new(2, 12),
1614 diagnostic: Diagnostic {
1615 source: Some("disk".into()),
1616 severity: DiagnosticSeverity::WARNING,
1617 message: "unreachable statement".to_string(),
1618 is_disk_based: true,
1619 group_id: 4,
1620 is_primary: true,
1621 ..Default::default()
1622 }
1623 },
1624 DiagnosticEntry {
1625 range: Point::new(2, 9)..Point::new(2, 10),
1626 diagnostic: Diagnostic {
1627 source: Some("disk".into()),
1628 severity: DiagnosticSeverity::ERROR,
1629 message: "undefined variable 'A'".to_string(),
1630 is_disk_based: true,
1631 group_id: 3,
1632 is_primary: true,
1633 ..Default::default()
1634 },
1635 }
1636 ]
1637 );
1638 assert_eq!(
1639 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1640 [
1641 ("fn a() { ".to_string(), None),
1642 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1643 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1644 ("\n".to_string(), None),
1645 ]
1646 );
1647 assert_eq!(
1648 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1649 [
1650 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1651 ("\n".to_string(), None),
1652 ]
1653 );
1654 });
1655
1656 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1657 // changes since the last save.
1658 buffer.update(cx, |buffer, cx| {
1659 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1660 buffer.edit(
1661 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1662 None,
1663 cx,
1664 );
1665 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1666 });
1667 let change_notification_2 = fake_server
1668 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1669 .await;
1670 assert!(
1671 change_notification_2.text_document.version > change_notification_1.text_document.version
1672 );
1673
1674 // Handle out-of-order diagnostics
1675 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1676 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1677 version: Some(change_notification_2.text_document.version),
1678 diagnostics: vec![
1679 lsp::Diagnostic {
1680 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1681 severity: Some(DiagnosticSeverity::ERROR),
1682 message: "undefined variable 'BB'".to_string(),
1683 source: Some("disk".to_string()),
1684 ..Default::default()
1685 },
1686 lsp::Diagnostic {
1687 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1688 severity: Some(DiagnosticSeverity::WARNING),
1689 message: "undefined variable 'A'".to_string(),
1690 source: Some("disk".to_string()),
1691 ..Default::default()
1692 },
1693 ],
1694 });
1695
1696 cx.executor().run_until_parked();
1697 buffer.update(cx, |buffer, _| {
1698 assert_eq!(
1699 buffer
1700 .snapshot()
1701 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1702 .collect::<Vec<_>>(),
1703 &[
1704 DiagnosticEntry {
1705 range: Point::new(2, 21)..Point::new(2, 22),
1706 diagnostic: Diagnostic {
1707 source: Some("disk".into()),
1708 severity: DiagnosticSeverity::WARNING,
1709 message: "undefined variable 'A'".to_string(),
1710 is_disk_based: true,
1711 group_id: 6,
1712 is_primary: true,
1713 ..Default::default()
1714 }
1715 },
1716 DiagnosticEntry {
1717 range: Point::new(3, 9)..Point::new(3, 14),
1718 diagnostic: Diagnostic {
1719 source: Some("disk".into()),
1720 severity: DiagnosticSeverity::ERROR,
1721 message: "undefined variable 'BB'".to_string(),
1722 is_disk_based: true,
1723 group_id: 5,
1724 is_primary: true,
1725 ..Default::default()
1726 },
1727 }
1728 ]
1729 );
1730 });
1731}
1732
1733#[gpui::test]
1734async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1735 init_test(cx);
1736
1737 let text = concat!(
1738 "let one = ;\n", //
1739 "let two = \n",
1740 "let three = 3;\n",
1741 );
1742
1743 let fs = FakeFs::new(cx.executor());
1744 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1745
1746 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1747 let buffer = project
1748 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1749 .await
1750 .unwrap();
1751
1752 project.update(cx, |project, cx| {
1753 project
1754 .update_buffer_diagnostics(
1755 &buffer,
1756 LanguageServerId(0),
1757 None,
1758 vec![
1759 DiagnosticEntry {
1760 range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 10)),
1761 diagnostic: Diagnostic {
1762 severity: DiagnosticSeverity::ERROR,
1763 message: "syntax error 1".to_string(),
1764 ..Default::default()
1765 },
1766 },
1767 DiagnosticEntry {
1768 range: Unclipped(PointUtf16::new(1, 10))..Unclipped(PointUtf16::new(1, 10)),
1769 diagnostic: Diagnostic {
1770 severity: DiagnosticSeverity::ERROR,
1771 message: "syntax error 2".to_string(),
1772 ..Default::default()
1773 },
1774 },
1775 ],
1776 cx,
1777 )
1778 .unwrap();
1779 });
1780
1781 // An empty range is extended forward to include the following character.
1782 // At the end of a line, an empty range is extended backward to include
1783 // the preceding character.
1784 buffer.update(cx, |buffer, _| {
1785 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1786 assert_eq!(
1787 chunks
1788 .iter()
1789 .map(|(s, d)| (s.as_str(), *d))
1790 .collect::<Vec<_>>(),
1791 &[
1792 ("let one = ", None),
1793 (";", Some(DiagnosticSeverity::ERROR)),
1794 ("\nlet two =", None),
1795 (" ", Some(DiagnosticSeverity::ERROR)),
1796 ("\nlet three = 3;\n", None)
1797 ]
1798 );
1799 });
1800}
1801
1802#[gpui::test]
1803async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
1804 init_test(cx);
1805
1806 let fs = FakeFs::new(cx.executor());
1807 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
1808 .await;
1809
1810 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1811
1812 project.update(cx, |project, cx| {
1813 project
1814 .update_diagnostic_entries(
1815 LanguageServerId(0),
1816 Path::new("/dir/a.rs").to_owned(),
1817 None,
1818 vec![DiagnosticEntry {
1819 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1820 diagnostic: Diagnostic {
1821 severity: DiagnosticSeverity::ERROR,
1822 is_primary: true,
1823 message: "syntax error a1".to_string(),
1824 ..Default::default()
1825 },
1826 }],
1827 cx,
1828 )
1829 .unwrap();
1830 project
1831 .update_diagnostic_entries(
1832 LanguageServerId(1),
1833 Path::new("/dir/a.rs").to_owned(),
1834 None,
1835 vec![DiagnosticEntry {
1836 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1837 diagnostic: Diagnostic {
1838 severity: DiagnosticSeverity::ERROR,
1839 is_primary: true,
1840 message: "syntax error b1".to_string(),
1841 ..Default::default()
1842 },
1843 }],
1844 cx,
1845 )
1846 .unwrap();
1847
1848 assert_eq!(
1849 project.diagnostic_summary(false, cx),
1850 DiagnosticSummary {
1851 error_count: 2,
1852 warning_count: 0,
1853 }
1854 );
1855 });
1856}
1857
1858#[gpui::test]
1859async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
1860 init_test(cx);
1861
1862 let mut language = Language::new(
1863 LanguageConfig {
1864 name: "Rust".into(),
1865 path_suffixes: vec!["rs".to_string()],
1866 ..Default::default()
1867 },
1868 Some(tree_sitter_rust::language()),
1869 );
1870 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
1871
1872 let text = "
1873 fn a() {
1874 f1();
1875 }
1876 fn b() {
1877 f2();
1878 }
1879 fn c() {
1880 f3();
1881 }
1882 "
1883 .unindent();
1884
1885 let fs = FakeFs::new(cx.executor());
1886 fs.insert_tree(
1887 "/dir",
1888 json!({
1889 "a.rs": text.clone(),
1890 }),
1891 )
1892 .await;
1893
1894 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1895 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1896 let buffer = project
1897 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1898 .await
1899 .unwrap();
1900
1901 let mut fake_server = fake_servers.next().await.unwrap();
1902 let lsp_document_version = fake_server
1903 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1904 .await
1905 .text_document
1906 .version;
1907
1908 // Simulate editing the buffer after the language server computes some edits.
1909 buffer.update(cx, |buffer, cx| {
1910 buffer.edit(
1911 [(
1912 Point::new(0, 0)..Point::new(0, 0),
1913 "// above first function\n",
1914 )],
1915 None,
1916 cx,
1917 );
1918 buffer.edit(
1919 [(
1920 Point::new(2, 0)..Point::new(2, 0),
1921 " // inside first function\n",
1922 )],
1923 None,
1924 cx,
1925 );
1926 buffer.edit(
1927 [(
1928 Point::new(6, 4)..Point::new(6, 4),
1929 "// inside second function ",
1930 )],
1931 None,
1932 cx,
1933 );
1934
1935 assert_eq!(
1936 buffer.text(),
1937 "
1938 // above first function
1939 fn a() {
1940 // inside first function
1941 f1();
1942 }
1943 fn b() {
1944 // inside second function f2();
1945 }
1946 fn c() {
1947 f3();
1948 }
1949 "
1950 .unindent()
1951 );
1952 });
1953
1954 let edits = project
1955 .update(cx, |project, cx| {
1956 project.edits_from_lsp(
1957 &buffer,
1958 vec![
1959 // replace body of first function
1960 lsp::TextEdit {
1961 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
1962 new_text: "
1963 fn a() {
1964 f10();
1965 }
1966 "
1967 .unindent(),
1968 },
1969 // edit inside second function
1970 lsp::TextEdit {
1971 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
1972 new_text: "00".into(),
1973 },
1974 // edit inside third function via two distinct edits
1975 lsp::TextEdit {
1976 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
1977 new_text: "4000".into(),
1978 },
1979 lsp::TextEdit {
1980 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
1981 new_text: "".into(),
1982 },
1983 ],
1984 LanguageServerId(0),
1985 Some(lsp_document_version),
1986 cx,
1987 )
1988 })
1989 .await
1990 .unwrap();
1991
1992 buffer.update(cx, |buffer, cx| {
1993 for (range, new_text) in edits {
1994 buffer.edit([(range, new_text)], None, cx);
1995 }
1996 assert_eq!(
1997 buffer.text(),
1998 "
1999 // above first function
2000 fn a() {
2001 // inside first function
2002 f10();
2003 }
2004 fn b() {
2005 // inside second function f200();
2006 }
2007 fn c() {
2008 f4000();
2009 }
2010 "
2011 .unindent()
2012 );
2013 });
2014}
2015
2016#[gpui::test]
2017async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2018 init_test(cx);
2019
2020 let text = "
2021 use a::b;
2022 use a::c;
2023
2024 fn f() {
2025 b();
2026 c();
2027 }
2028 "
2029 .unindent();
2030
2031 let fs = FakeFs::new(cx.executor());
2032 fs.insert_tree(
2033 "/dir",
2034 json!({
2035 "a.rs": text.clone(),
2036 }),
2037 )
2038 .await;
2039
2040 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2041 let buffer = project
2042 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2043 .await
2044 .unwrap();
2045
2046 // Simulate the language server sending us a small edit in the form of a very large diff.
2047 // Rust-analyzer does this when performing a merge-imports code action.
2048 let edits = project
2049 .update(cx, |project, cx| {
2050 project.edits_from_lsp(
2051 &buffer,
2052 [
2053 // Replace the first use statement without editing the semicolon.
2054 lsp::TextEdit {
2055 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2056 new_text: "a::{b, c}".into(),
2057 },
2058 // Reinsert the remainder of the file between the semicolon and the final
2059 // newline of the file.
2060 lsp::TextEdit {
2061 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2062 new_text: "\n\n".into(),
2063 },
2064 lsp::TextEdit {
2065 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2066 new_text: "
2067 fn f() {
2068 b();
2069 c();
2070 }"
2071 .unindent(),
2072 },
2073 // Delete everything after the first newline of the file.
2074 lsp::TextEdit {
2075 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2076 new_text: "".into(),
2077 },
2078 ],
2079 LanguageServerId(0),
2080 None,
2081 cx,
2082 )
2083 })
2084 .await
2085 .unwrap();
2086
2087 buffer.update(cx, |buffer, cx| {
2088 let edits = edits
2089 .into_iter()
2090 .map(|(range, text)| {
2091 (
2092 range.start.to_point(buffer)..range.end.to_point(buffer),
2093 text,
2094 )
2095 })
2096 .collect::<Vec<_>>();
2097
2098 assert_eq!(
2099 edits,
2100 [
2101 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2102 (Point::new(1, 0)..Point::new(2, 0), "".into())
2103 ]
2104 );
2105
2106 for (range, new_text) in edits {
2107 buffer.edit([(range, new_text)], None, cx);
2108 }
2109 assert_eq!(
2110 buffer.text(),
2111 "
2112 use a::{b, c};
2113
2114 fn f() {
2115 b();
2116 c();
2117 }
2118 "
2119 .unindent()
2120 );
2121 });
2122}
2123
2124#[gpui::test]
2125async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2126 init_test(cx);
2127
2128 let text = "
2129 use a::b;
2130 use a::c;
2131
2132 fn f() {
2133 b();
2134 c();
2135 }
2136 "
2137 .unindent();
2138
2139 let fs = FakeFs::new(cx.executor());
2140 fs.insert_tree(
2141 "/dir",
2142 json!({
2143 "a.rs": text.clone(),
2144 }),
2145 )
2146 .await;
2147
2148 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2149 let buffer = project
2150 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2151 .await
2152 .unwrap();
2153
2154 // Simulate the language server sending us edits in a non-ordered fashion,
2155 // with ranges sometimes being inverted or pointing to invalid locations.
2156 let edits = project
2157 .update(cx, |project, cx| {
2158 project.edits_from_lsp(
2159 &buffer,
2160 [
2161 lsp::TextEdit {
2162 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2163 new_text: "\n\n".into(),
2164 },
2165 lsp::TextEdit {
2166 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2167 new_text: "a::{b, c}".into(),
2168 },
2169 lsp::TextEdit {
2170 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2171 new_text: "".into(),
2172 },
2173 lsp::TextEdit {
2174 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2175 new_text: "
2176 fn f() {
2177 b();
2178 c();
2179 }"
2180 .unindent(),
2181 },
2182 ],
2183 LanguageServerId(0),
2184 None,
2185 cx,
2186 )
2187 })
2188 .await
2189 .unwrap();
2190
2191 buffer.update(cx, |buffer, cx| {
2192 let edits = edits
2193 .into_iter()
2194 .map(|(range, text)| {
2195 (
2196 range.start.to_point(buffer)..range.end.to_point(buffer),
2197 text,
2198 )
2199 })
2200 .collect::<Vec<_>>();
2201
2202 assert_eq!(
2203 edits,
2204 [
2205 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2206 (Point::new(1, 0)..Point::new(2, 0), "".into())
2207 ]
2208 );
2209
2210 for (range, new_text) in edits {
2211 buffer.edit([(range, new_text)], None, cx);
2212 }
2213 assert_eq!(
2214 buffer.text(),
2215 "
2216 use a::{b, c};
2217
2218 fn f() {
2219 b();
2220 c();
2221 }
2222 "
2223 .unindent()
2224 );
2225 });
2226}
2227
2228fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2229 buffer: &Buffer,
2230 range: Range<T>,
2231) -> Vec<(String, Option<DiagnosticSeverity>)> {
2232 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2233 for chunk in buffer.snapshot().chunks(range, true) {
2234 if chunks.last().map_or(false, |prev_chunk| {
2235 prev_chunk.1 == chunk.diagnostic_severity
2236 }) {
2237 chunks.last_mut().unwrap().0.push_str(chunk.text);
2238 } else {
2239 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2240 }
2241 }
2242 chunks
2243}
2244
2245#[gpui::test(iterations = 10)]
2246async fn test_definition(cx: &mut gpui::TestAppContext) {
2247 init_test(cx);
2248
2249 let mut language = Language::new(
2250 LanguageConfig {
2251 name: "Rust".into(),
2252 path_suffixes: vec!["rs".to_string()],
2253 ..Default::default()
2254 },
2255 Some(tree_sitter_rust::language()),
2256 );
2257 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
2258
2259 let fs = FakeFs::new(cx.executor());
2260 fs.insert_tree(
2261 "/dir",
2262 json!({
2263 "a.rs": "const fn a() { A }",
2264 "b.rs": "const y: i32 = crate::a()",
2265 }),
2266 )
2267 .await;
2268
2269 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2270 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2271
2272 let buffer = project
2273 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
2274 .await
2275 .unwrap();
2276
2277 let fake_server = fake_servers.next().await.unwrap();
2278 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2279 let params = params.text_document_position_params;
2280 assert_eq!(
2281 params.text_document.uri.to_file_path().unwrap(),
2282 Path::new("/dir/b.rs"),
2283 );
2284 assert_eq!(params.position, lsp::Position::new(0, 22));
2285
2286 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2287 lsp::Location::new(
2288 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
2289 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2290 ),
2291 )))
2292 });
2293
2294 let mut definitions = project
2295 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2296 .await
2297 .unwrap();
2298
2299 // Assert no new language server started
2300 cx.executor().run_until_parked();
2301 assert!(fake_servers.try_next().is_err());
2302
2303 assert_eq!(definitions.len(), 1);
2304 let definition = definitions.pop().unwrap();
2305 cx.update(|cx| {
2306 let target_buffer = definition.target.buffer.read(cx);
2307 assert_eq!(
2308 target_buffer
2309 .file()
2310 .unwrap()
2311 .as_local()
2312 .unwrap()
2313 .abs_path(cx),
2314 Path::new("/dir/a.rs"),
2315 );
2316 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2317 assert_eq!(
2318 list_worktrees(&project, cx),
2319 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
2320 );
2321
2322 drop(definition);
2323 });
2324 cx.update(|cx| {
2325 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2326 });
2327
2328 fn list_worktrees<'a>(
2329 project: &'a Model<Project>,
2330 cx: &'a AppContext,
2331 ) -> Vec<(&'a Path, bool)> {
2332 project
2333 .read(cx)
2334 .worktrees()
2335 .map(|worktree| {
2336 let worktree = worktree.read(cx);
2337 (
2338 worktree.as_local().unwrap().abs_path().as_ref(),
2339 worktree.is_visible(),
2340 )
2341 })
2342 .collect::<Vec<_>>()
2343 }
2344}
2345
2346#[gpui::test]
2347async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2348 init_test(cx);
2349
2350 let mut language = Language::new(
2351 LanguageConfig {
2352 name: "TypeScript".into(),
2353 path_suffixes: vec!["ts".to_string()],
2354 ..Default::default()
2355 },
2356 Some(tree_sitter_typescript::language_typescript()),
2357 );
2358 let mut fake_language_servers = language
2359 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
2360 capabilities: lsp::ServerCapabilities {
2361 completion_provider: Some(lsp::CompletionOptions {
2362 trigger_characters: Some(vec![":".to_string()]),
2363 ..Default::default()
2364 }),
2365 ..Default::default()
2366 },
2367 ..Default::default()
2368 }))
2369 .await;
2370
2371 let fs = FakeFs::new(cx.executor());
2372 fs.insert_tree(
2373 "/dir",
2374 json!({
2375 "a.ts": "",
2376 }),
2377 )
2378 .await;
2379
2380 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2381 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2382 let buffer = project
2383 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2384 .await
2385 .unwrap();
2386
2387 let fake_server = fake_language_servers.next().await.unwrap();
2388
2389 let text = "let a = b.fqn";
2390 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2391 let completions = project.update(cx, |project, cx| {
2392 project.completions(&buffer, text.len(), cx)
2393 });
2394
2395 fake_server
2396 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2397 Ok(Some(lsp::CompletionResponse::Array(vec![
2398 lsp::CompletionItem {
2399 label: "fullyQualifiedName?".into(),
2400 insert_text: Some("fullyQualifiedName".into()),
2401 ..Default::default()
2402 },
2403 ])))
2404 })
2405 .next()
2406 .await;
2407 let completions = completions.await.unwrap();
2408 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2409 assert_eq!(completions.len(), 1);
2410 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2411 assert_eq!(
2412 completions[0].old_range.to_offset(&snapshot),
2413 text.len() - 3..text.len()
2414 );
2415
2416 let text = "let a = \"atoms/cmp\"";
2417 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2418 let completions = project.update(cx, |project, cx| {
2419 project.completions(&buffer, text.len() - 1, cx)
2420 });
2421
2422 fake_server
2423 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2424 Ok(Some(lsp::CompletionResponse::Array(vec![
2425 lsp::CompletionItem {
2426 label: "component".into(),
2427 ..Default::default()
2428 },
2429 ])))
2430 })
2431 .next()
2432 .await;
2433 let completions = completions.await.unwrap();
2434 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2435 assert_eq!(completions.len(), 1);
2436 assert_eq!(completions[0].new_text, "component");
2437 assert_eq!(
2438 completions[0].old_range.to_offset(&snapshot),
2439 text.len() - 4..text.len() - 1
2440 );
2441}
2442
2443#[gpui::test]
2444async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2445 init_test(cx);
2446
2447 let mut language = Language::new(
2448 LanguageConfig {
2449 name: "TypeScript".into(),
2450 path_suffixes: vec!["ts".to_string()],
2451 ..Default::default()
2452 },
2453 Some(tree_sitter_typescript::language_typescript()),
2454 );
2455 let mut fake_language_servers = language
2456 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
2457 capabilities: lsp::ServerCapabilities {
2458 completion_provider: Some(lsp::CompletionOptions {
2459 trigger_characters: Some(vec![":".to_string()]),
2460 ..Default::default()
2461 }),
2462 ..Default::default()
2463 },
2464 ..Default::default()
2465 }))
2466 .await;
2467
2468 let fs = FakeFs::new(cx.executor());
2469 fs.insert_tree(
2470 "/dir",
2471 json!({
2472 "a.ts": "",
2473 }),
2474 )
2475 .await;
2476
2477 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2478 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2479 let buffer = project
2480 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2481 .await
2482 .unwrap();
2483
2484 let fake_server = fake_language_servers.next().await.unwrap();
2485
2486 let text = "let a = b.fqn";
2487 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2488 let completions = project.update(cx, |project, cx| {
2489 project.completions(&buffer, text.len(), cx)
2490 });
2491
2492 fake_server
2493 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2494 Ok(Some(lsp::CompletionResponse::Array(vec![
2495 lsp::CompletionItem {
2496 label: "fullyQualifiedName?".into(),
2497 insert_text: Some("fully\rQualified\r\nName".into()),
2498 ..Default::default()
2499 },
2500 ])))
2501 })
2502 .next()
2503 .await;
2504 let completions = completions.await.unwrap();
2505 assert_eq!(completions.len(), 1);
2506 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2507}
2508
2509#[gpui::test(iterations = 10)]
2510async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2511 init_test(cx);
2512
2513 let mut language = Language::new(
2514 LanguageConfig {
2515 name: "TypeScript".into(),
2516 path_suffixes: vec!["ts".to_string()],
2517 ..Default::default()
2518 },
2519 None,
2520 );
2521 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
2522
2523 let fs = FakeFs::new(cx.executor());
2524 fs.insert_tree(
2525 "/dir",
2526 json!({
2527 "a.ts": "a",
2528 }),
2529 )
2530 .await;
2531
2532 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2533 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2534 let buffer = project
2535 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2536 .await
2537 .unwrap();
2538
2539 let fake_server = fake_language_servers.next().await.unwrap();
2540
2541 // Language server returns code actions that contain commands, and not edits.
2542 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
2543 fake_server
2544 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2545 Ok(Some(vec![
2546 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2547 title: "The code action".into(),
2548 command: Some(lsp::Command {
2549 title: "The command".into(),
2550 command: "_the/command".into(),
2551 arguments: Some(vec![json!("the-argument")]),
2552 }),
2553 ..Default::default()
2554 }),
2555 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2556 title: "two".into(),
2557 ..Default::default()
2558 }),
2559 ]))
2560 })
2561 .next()
2562 .await;
2563
2564 let action = actions.await.unwrap()[0].clone();
2565 let apply = project.update(cx, |project, cx| {
2566 project.apply_code_action(buffer.clone(), action, true, cx)
2567 });
2568
2569 // Resolving the code action does not populate its edits. In absence of
2570 // edits, we must execute the given command.
2571 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2572 |action, _| async move { Ok(action) },
2573 );
2574
2575 // While executing the command, the language server sends the editor
2576 // a `workspaceEdit` request.
2577 fake_server
2578 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2579 let fake = fake_server.clone();
2580 move |params, _| {
2581 assert_eq!(params.command, "_the/command");
2582 let fake = fake.clone();
2583 async move {
2584 fake.server
2585 .request::<lsp::request::ApplyWorkspaceEdit>(
2586 lsp::ApplyWorkspaceEditParams {
2587 label: None,
2588 edit: lsp::WorkspaceEdit {
2589 changes: Some(
2590 [(
2591 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2592 vec![lsp::TextEdit {
2593 range: lsp::Range::new(
2594 lsp::Position::new(0, 0),
2595 lsp::Position::new(0, 0),
2596 ),
2597 new_text: "X".into(),
2598 }],
2599 )]
2600 .into_iter()
2601 .collect(),
2602 ),
2603 ..Default::default()
2604 },
2605 },
2606 )
2607 .await
2608 .unwrap();
2609 Ok(Some(json!(null)))
2610 }
2611 }
2612 })
2613 .next()
2614 .await;
2615
2616 // Applying the code action returns a project transaction containing the edits
2617 // sent by the language server in its `workspaceEdit` request.
2618 let transaction = apply.await.unwrap();
2619 assert!(transaction.0.contains_key(&buffer));
2620 buffer.update(cx, |buffer, cx| {
2621 assert_eq!(buffer.text(), "Xa");
2622 buffer.undo(cx);
2623 assert_eq!(buffer.text(), "a");
2624 });
2625}
2626
2627#[gpui::test(iterations = 10)]
2628async fn test_save_file(cx: &mut gpui::TestAppContext) {
2629 init_test(cx);
2630
2631 let fs = FakeFs::new(cx.executor());
2632 fs.insert_tree(
2633 "/dir",
2634 json!({
2635 "file1": "the old contents",
2636 }),
2637 )
2638 .await;
2639
2640 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2641 let buffer = project
2642 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2643 .await
2644 .unwrap();
2645 buffer.update(cx, |buffer, cx| {
2646 assert_eq!(buffer.text(), "the old contents");
2647 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2648 });
2649
2650 project
2651 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2652 .await
2653 .unwrap();
2654
2655 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2656 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2657}
2658
2659#[gpui::test(iterations = 30)]
2660async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
2661 init_test(cx);
2662
2663 let fs = FakeFs::new(cx.executor().clone());
2664 fs.insert_tree(
2665 "/dir",
2666 json!({
2667 "file1": "the original contents",
2668 }),
2669 )
2670 .await;
2671
2672 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2673 let worktree = project.read_with(cx, |project, _| project.worktrees().next().unwrap());
2674 let buffer = project
2675 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2676 .await
2677 .unwrap();
2678
2679 // Simulate buffer diffs being slow, so that they don't complete before
2680 // the next file change occurs.
2681 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2682
2683 // Change the buffer's file on disk, and then wait for the file change
2684 // to be detected by the worktree, so that the buffer starts reloading.
2685 fs.save(
2686 "/dir/file1".as_ref(),
2687 &"the first contents".into(),
2688 Default::default(),
2689 )
2690 .await
2691 .unwrap();
2692 worktree.next_event(cx);
2693
2694 // Change the buffer's file again. Depending on the random seed, the
2695 // previous file change may still be in progress.
2696 fs.save(
2697 "/dir/file1".as_ref(),
2698 &"the second contents".into(),
2699 Default::default(),
2700 )
2701 .await
2702 .unwrap();
2703 worktree.next_event(cx);
2704
2705 cx.executor().run_until_parked();
2706 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2707 buffer.read_with(cx, |buffer, _| {
2708 assert_eq!(buffer.text(), on_disk_text);
2709 assert!(!buffer.is_dirty(), "buffer should not be dirty");
2710 assert!(!buffer.has_conflict(), "buffer should not be dirty");
2711 });
2712}
2713
2714#[gpui::test(iterations = 30)]
2715async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
2716 init_test(cx);
2717
2718 let fs = FakeFs::new(cx.executor().clone());
2719 fs.insert_tree(
2720 "/dir",
2721 json!({
2722 "file1": "the original contents",
2723 }),
2724 )
2725 .await;
2726
2727 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2728 let worktree = project.read_with(cx, |project, _| project.worktrees().next().unwrap());
2729 let buffer = project
2730 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2731 .await
2732 .unwrap();
2733
2734 // Simulate buffer diffs being slow, so that they don't complete before
2735 // the next file change occurs.
2736 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2737
2738 // Change the buffer's file on disk, and then wait for the file change
2739 // to be detected by the worktree, so that the buffer starts reloading.
2740 fs.save(
2741 "/dir/file1".as_ref(),
2742 &"the first contents".into(),
2743 Default::default(),
2744 )
2745 .await
2746 .unwrap();
2747 worktree.next_event(cx);
2748
2749 cx.executor()
2750 .spawn(cx.executor().simulate_random_delay())
2751 .await;
2752
2753 // Perform a noop edit, causing the buffer's version to increase.
2754 buffer.update(cx, |buffer, cx| {
2755 buffer.edit([(0..0, " ")], None, cx);
2756 buffer.undo(cx);
2757 });
2758
2759 cx.executor().run_until_parked();
2760 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2761 buffer.read_with(cx, |buffer, _| {
2762 let buffer_text = buffer.text();
2763 if buffer_text == on_disk_text {
2764 assert!(
2765 !buffer.is_dirty() && !buffer.has_conflict(),
2766 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
2767 );
2768 }
2769 // If the file change occurred while the buffer was processing the first
2770 // change, the buffer will be in a conflicting state.
2771 else {
2772 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2773 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2774 }
2775 });
2776}
2777
2778#[gpui::test]
2779async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2780 init_test(cx);
2781
2782 let fs = FakeFs::new(cx.executor());
2783 fs.insert_tree(
2784 "/dir",
2785 json!({
2786 "file1": "the old contents",
2787 }),
2788 )
2789 .await;
2790
2791 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2792 let buffer = project
2793 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2794 .await
2795 .unwrap();
2796 buffer.update(cx, |buffer, cx| {
2797 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2798 });
2799
2800 project
2801 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2802 .await
2803 .unwrap();
2804
2805 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2806 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2807}
2808
2809#[gpui::test]
2810async fn test_save_as(cx: &mut gpui::TestAppContext) {
2811 init_test(cx);
2812
2813 let fs = FakeFs::new(cx.executor());
2814 fs.insert_tree("/dir", json!({})).await;
2815
2816 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2817
2818 let languages = project.update(cx, |project, _| project.languages().clone());
2819 languages.register(
2820 "/some/path",
2821 LanguageConfig {
2822 name: "Rust".into(),
2823 path_suffixes: vec!["rs".into()],
2824 ..Default::default()
2825 },
2826 tree_sitter_rust::language(),
2827 vec![],
2828 |_| Default::default(),
2829 );
2830
2831 let buffer = project.update(cx, |project, cx| {
2832 project.create_buffer("", None, cx).unwrap()
2833 });
2834 buffer.update(cx, |buffer, cx| {
2835 buffer.edit([(0..0, "abc")], None, cx);
2836 assert!(buffer.is_dirty());
2837 assert!(!buffer.has_conflict());
2838 assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text");
2839 });
2840 project
2841 .update(cx, |project, cx| {
2842 project.save_buffer_as(buffer.clone(), "/dir/file1.rs".into(), cx)
2843 })
2844 .await
2845 .unwrap();
2846 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
2847
2848 cx.executor().run_until_parked();
2849 buffer.update(cx, |buffer, cx| {
2850 assert_eq!(
2851 buffer.file().unwrap().full_path(cx),
2852 Path::new("dir/file1.rs")
2853 );
2854 assert!(!buffer.is_dirty());
2855 assert!(!buffer.has_conflict());
2856 assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust");
2857 });
2858
2859 let opened_buffer = project
2860 .update(cx, |project, cx| {
2861 project.open_local_buffer("/dir/file1.rs", cx)
2862 })
2863 .await
2864 .unwrap();
2865 assert_eq!(opened_buffer, buffer);
2866}
2867
2868#[gpui::test(retries = 5)]
2869async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
2870 init_test(cx);
2871 cx.executor().allow_parking();
2872
2873 let dir = temp_tree(json!({
2874 "a": {
2875 "file1": "",
2876 "file2": "",
2877 "file3": "",
2878 },
2879 "b": {
2880 "c": {
2881 "file4": "",
2882 "file5": "",
2883 }
2884 }
2885 }));
2886
2887 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
2888 let rpc = project.update(cx, |p, _| p.client.clone());
2889
2890 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2891 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
2892 async move { buffer.await.unwrap() }
2893 };
2894 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2895 project.update(cx, |project, cx| {
2896 let tree = project.worktrees().next().unwrap();
2897 tree.read(cx)
2898 .entry_for_path(path)
2899 .unwrap_or_else(|| panic!("no entry for path {}", path))
2900 .id
2901 })
2902 };
2903
2904 let buffer2 = buffer_for_path("a/file2", cx).await;
2905 let buffer3 = buffer_for_path("a/file3", cx).await;
2906 let buffer4 = buffer_for_path("b/c/file4", cx).await;
2907 let buffer5 = buffer_for_path("b/c/file5", cx).await;
2908
2909 let file2_id = id_for_path("a/file2", cx);
2910 let file3_id = id_for_path("a/file3", cx);
2911 let file4_id = id_for_path("b/c/file4", cx);
2912
2913 // Create a remote copy of this worktree.
2914 let tree = project.update(cx, |project, _| project.worktrees().next().unwrap());
2915
2916 let metadata = tree.update(cx, |tree, _| tree.as_local().unwrap().metadata_proto());
2917
2918 let updates = Arc::new(Mutex::new(Vec::new()));
2919 tree.update(cx, |tree, cx| {
2920 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
2921 let updates = updates.clone();
2922 move |update| {
2923 updates.lock().push(update);
2924 async { true }
2925 }
2926 });
2927 });
2928
2929 let remote = cx.update(|cx| Worktree::remote(1, 1, metadata, rpc.clone(), cx));
2930
2931 cx.executor().run_until_parked();
2932
2933 cx.update(|cx| {
2934 assert!(!buffer2.read(cx).is_dirty());
2935 assert!(!buffer3.read(cx).is_dirty());
2936 assert!(!buffer4.read(cx).is_dirty());
2937 assert!(!buffer5.read(cx).is_dirty());
2938 });
2939
2940 // Rename and delete files and directories.
2941 tree.flush_fs_events(cx).await;
2942 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
2943 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
2944 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
2945 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
2946 tree.flush_fs_events(cx).await;
2947
2948 let expected_paths = vec![
2949 "a",
2950 "a/file1",
2951 "a/file2.new",
2952 "b",
2953 "d",
2954 "d/file3",
2955 "d/file4",
2956 ];
2957
2958 cx.update(|app| {
2959 assert_eq!(
2960 tree.read(app)
2961 .paths()
2962 .map(|p| p.to_str().unwrap())
2963 .collect::<Vec<_>>(),
2964 expected_paths
2965 );
2966 });
2967
2968 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
2969 assert_eq!(id_for_path("d/file3", cx), file3_id);
2970 assert_eq!(id_for_path("d/file4", cx), file4_id);
2971
2972 cx.update(|cx| {
2973 assert_eq!(
2974 buffer2.read(cx).file().unwrap().path().as_ref(),
2975 Path::new("a/file2.new")
2976 );
2977 assert_eq!(
2978 buffer3.read(cx).file().unwrap().path().as_ref(),
2979 Path::new("d/file3")
2980 );
2981 assert_eq!(
2982 buffer4.read(cx).file().unwrap().path().as_ref(),
2983 Path::new("d/file4")
2984 );
2985 assert_eq!(
2986 buffer5.read(cx).file().unwrap().path().as_ref(),
2987 Path::new("b/c/file5")
2988 );
2989
2990 assert!(!buffer2.read(cx).file().unwrap().is_deleted());
2991 assert!(!buffer3.read(cx).file().unwrap().is_deleted());
2992 assert!(!buffer4.read(cx).file().unwrap().is_deleted());
2993 assert!(buffer5.read(cx).file().unwrap().is_deleted());
2994 });
2995
2996 // Update the remote worktree. Check that it becomes consistent with the
2997 // local worktree.
2998 cx.executor().run_until_parked();
2999
3000 remote.update(cx, |remote, _| {
3001 for update in updates.lock().drain(..) {
3002 remote.as_remote_mut().unwrap().update_from_remote(update);
3003 }
3004 });
3005 cx.executor().run_until_parked();
3006 remote.update(cx, |remote, _| {
3007 assert_eq!(
3008 remote
3009 .paths()
3010 .map(|p| p.to_str().unwrap())
3011 .collect::<Vec<_>>(),
3012 expected_paths
3013 );
3014 });
3015}
3016
3017#[gpui::test(iterations = 10)]
3018async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3019 init_test(cx);
3020
3021 let fs = FakeFs::new(cx.executor());
3022 fs.insert_tree(
3023 "/dir",
3024 json!({
3025 "a": {
3026 "file1": "",
3027 }
3028 }),
3029 )
3030 .await;
3031
3032 let project = Project::test(fs, [Path::new("/dir")], cx).await;
3033 let tree = project.update(cx, |project, _| project.worktrees().next().unwrap());
3034 let tree_id = tree.update(cx, |tree, _| tree.id());
3035
3036 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3037 project.update(cx, |project, cx| {
3038 let tree = project.worktrees().next().unwrap();
3039 tree.read(cx)
3040 .entry_for_path(path)
3041 .unwrap_or_else(|| panic!("no entry for path {}", path))
3042 .id
3043 })
3044 };
3045
3046 let dir_id = id_for_path("a", cx);
3047 let file_id = id_for_path("a/file1", cx);
3048 let buffer = project
3049 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3050 .await
3051 .unwrap();
3052 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3053
3054 project
3055 .update(cx, |project, cx| {
3056 project.rename_entry(dir_id, Path::new("b"), cx)
3057 })
3058 .unwrap()
3059 .await
3060 .unwrap();
3061 cx.executor().run_until_parked();
3062
3063 assert_eq!(id_for_path("b", cx), dir_id);
3064 assert_eq!(id_for_path("b/file1", cx), file_id);
3065 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3066}
3067
3068#[gpui::test]
3069async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3070 init_test(cx);
3071
3072 let fs = FakeFs::new(cx.executor());
3073 fs.insert_tree(
3074 "/dir",
3075 json!({
3076 "a.txt": "a-contents",
3077 "b.txt": "b-contents",
3078 }),
3079 )
3080 .await;
3081
3082 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3083
3084 // Spawn multiple tasks to open paths, repeating some paths.
3085 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3086 (
3087 p.open_local_buffer("/dir/a.txt", cx),
3088 p.open_local_buffer("/dir/b.txt", cx),
3089 p.open_local_buffer("/dir/a.txt", cx),
3090 )
3091 });
3092
3093 let buffer_a_1 = buffer_a_1.await.unwrap();
3094 let buffer_a_2 = buffer_a_2.await.unwrap();
3095 let buffer_b = buffer_b.await.unwrap();
3096 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3097 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3098
3099 // There is only one buffer per path.
3100 let buffer_a_id = buffer_a_1.entity_id();
3101 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3102
3103 // Open the same path again while it is still open.
3104 drop(buffer_a_1);
3105 let buffer_a_3 = project
3106 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3107 .await
3108 .unwrap();
3109
3110 // There's still only one buffer per path.
3111 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3112}
3113
3114#[gpui::test]
3115async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3116 init_test(cx);
3117
3118 let fs = FakeFs::new(cx.executor());
3119 fs.insert_tree(
3120 "/dir",
3121 json!({
3122 "file1": "abc",
3123 "file2": "def",
3124 "file3": "ghi",
3125 }),
3126 )
3127 .await;
3128
3129 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3130
3131 let buffer1 = project
3132 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3133 .await
3134 .unwrap();
3135 let events = Arc::new(Mutex::new(Vec::new()));
3136
3137 // initially, the buffer isn't dirty.
3138 buffer1.update(cx, |buffer, cx| {
3139 cx.subscribe(&buffer1, {
3140 let events = events.clone();
3141 move |_, _, event, _| match event {
3142 BufferEvent::Operation(_) => {}
3143 _ => events.lock().push(event.clone()),
3144 }
3145 })
3146 .detach();
3147
3148 assert!(!buffer.is_dirty());
3149 assert!(events.lock().is_empty());
3150
3151 buffer.edit([(1..2, "")], None, cx);
3152 });
3153
3154 // after the first edit, the buffer is dirty, and emits a dirtied event.
3155 buffer1.update(cx, |buffer, cx| {
3156 assert!(buffer.text() == "ac");
3157 assert!(buffer.is_dirty());
3158 assert_eq!(
3159 *events.lock(),
3160 &[language::Event::Edited, language::Event::DirtyChanged]
3161 );
3162 events.lock().clear();
3163 buffer.did_save(
3164 buffer.version(),
3165 buffer.as_rope().fingerprint(),
3166 buffer.file().unwrap().mtime(),
3167 cx,
3168 );
3169 });
3170
3171 // after saving, the buffer is not dirty, and emits a saved event.
3172 buffer1.update(cx, |buffer, cx| {
3173 assert!(!buffer.is_dirty());
3174 assert_eq!(*events.lock(), &[language::Event::Saved]);
3175 events.lock().clear();
3176
3177 buffer.edit([(1..1, "B")], None, cx);
3178 buffer.edit([(2..2, "D")], None, cx);
3179 });
3180
3181 // after editing again, the buffer is dirty, and emits another dirty event.
3182 buffer1.update(cx, |buffer, cx| {
3183 assert!(buffer.text() == "aBDc");
3184 assert!(buffer.is_dirty());
3185 assert_eq!(
3186 *events.lock(),
3187 &[
3188 language::Event::Edited,
3189 language::Event::DirtyChanged,
3190 language::Event::Edited,
3191 ],
3192 );
3193 events.lock().clear();
3194
3195 // After restoring the buffer to its previously-saved state,
3196 // the buffer is not considered dirty anymore.
3197 buffer.edit([(1..3, "")], None, cx);
3198 assert!(buffer.text() == "ac");
3199 assert!(!buffer.is_dirty());
3200 });
3201
3202 assert_eq!(
3203 *events.lock(),
3204 &[language::Event::Edited, language::Event::DirtyChanged]
3205 );
3206
3207 // When a file is deleted, the buffer is considered dirty.
3208 let events = Arc::new(Mutex::new(Vec::new()));
3209 let buffer2 = project
3210 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3211 .await
3212 .unwrap();
3213 buffer2.update(cx, |_, cx| {
3214 cx.subscribe(&buffer2, {
3215 let events = events.clone();
3216 move |_, _, event, _| events.lock().push(event.clone())
3217 })
3218 .detach();
3219 });
3220
3221 fs.remove_file("/dir/file2".as_ref(), Default::default())
3222 .await
3223 .unwrap();
3224 cx.executor().run_until_parked();
3225 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3226 assert_eq!(
3227 *events.lock(),
3228 &[
3229 language::Event::DirtyChanged,
3230 language::Event::FileHandleChanged
3231 ]
3232 );
3233
3234 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3235 let events = Arc::new(Mutex::new(Vec::new()));
3236 let buffer3 = project
3237 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
3238 .await
3239 .unwrap();
3240 buffer3.update(cx, |_, cx| {
3241 cx.subscribe(&buffer3, {
3242 let events = events.clone();
3243 move |_, _, event, _| events.lock().push(event.clone())
3244 })
3245 .detach();
3246 });
3247
3248 buffer3.update(cx, |buffer, cx| {
3249 buffer.edit([(0..0, "x")], None, cx);
3250 });
3251 events.lock().clear();
3252 fs.remove_file("/dir/file3".as_ref(), Default::default())
3253 .await
3254 .unwrap();
3255 cx.executor().run_until_parked();
3256 assert_eq!(*events.lock(), &[language::Event::FileHandleChanged]);
3257 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3258}
3259
3260#[gpui::test]
3261async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3262 init_test(cx);
3263
3264 let initial_contents = "aaa\nbbbbb\nc\n";
3265 let fs = FakeFs::new(cx.executor());
3266 fs.insert_tree(
3267 "/dir",
3268 json!({
3269 "the-file": initial_contents,
3270 }),
3271 )
3272 .await;
3273 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3274 let buffer = project
3275 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
3276 .await
3277 .unwrap();
3278
3279 let anchors = (0..3)
3280 .map(|row| buffer.update(cx, |b, _| b.anchor_before(Point::new(row, 1))))
3281 .collect::<Vec<_>>();
3282
3283 // Change the file on disk, adding two new lines of text, and removing
3284 // one line.
3285 buffer.update(cx, |buffer, _| {
3286 assert!(!buffer.is_dirty());
3287 assert!(!buffer.has_conflict());
3288 });
3289 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3290 fs.save(
3291 "/dir/the-file".as_ref(),
3292 &new_contents.into(),
3293 LineEnding::Unix,
3294 )
3295 .await
3296 .unwrap();
3297
3298 // Because the buffer was not modified, it is reloaded from disk. Its
3299 // contents are edited according to the diff between the old and new
3300 // file contents.
3301 cx.executor().run_until_parked();
3302 buffer.update(cx, |buffer, _| {
3303 assert_eq!(buffer.text(), new_contents);
3304 assert!(!buffer.is_dirty());
3305 assert!(!buffer.has_conflict());
3306
3307 let anchor_positions = anchors
3308 .iter()
3309 .map(|anchor| anchor.to_point(&*buffer))
3310 .collect::<Vec<_>>();
3311 assert_eq!(
3312 anchor_positions,
3313 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
3314 );
3315 });
3316
3317 // Modify the buffer
3318 buffer.update(cx, |buffer, cx| {
3319 buffer.edit([(0..0, " ")], None, cx);
3320 assert!(buffer.is_dirty());
3321 assert!(!buffer.has_conflict());
3322 });
3323
3324 // Change the file on disk again, adding blank lines to the beginning.
3325 fs.save(
3326 "/dir/the-file".as_ref(),
3327 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3328 LineEnding::Unix,
3329 )
3330 .await
3331 .unwrap();
3332
3333 // Because the buffer is modified, it doesn't reload from disk, but is
3334 // marked as having a conflict.
3335 cx.executor().run_until_parked();
3336 buffer.update(cx, |buffer, _| {
3337 assert!(buffer.has_conflict());
3338 });
3339}
3340
3341#[gpui::test]
3342async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3343 init_test(cx);
3344
3345 let fs = FakeFs::new(cx.executor());
3346 fs.insert_tree(
3347 "/dir",
3348 json!({
3349 "file1": "a\nb\nc\n",
3350 "file2": "one\r\ntwo\r\nthree\r\n",
3351 }),
3352 )
3353 .await;
3354
3355 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3356 let buffer1 = project
3357 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3358 .await
3359 .unwrap();
3360 let buffer2 = project
3361 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3362 .await
3363 .unwrap();
3364
3365 buffer1.update(cx, |buffer, _| {
3366 assert_eq!(buffer.text(), "a\nb\nc\n");
3367 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3368 });
3369 buffer2.update(cx, |buffer, _| {
3370 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3371 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3372 });
3373
3374 // Change a file's line endings on disk from unix to windows. The buffer's
3375 // state updates correctly.
3376 fs.save(
3377 "/dir/file1".as_ref(),
3378 &"aaa\nb\nc\n".into(),
3379 LineEnding::Windows,
3380 )
3381 .await
3382 .unwrap();
3383 cx.executor().run_until_parked();
3384 buffer1.update(cx, |buffer, _| {
3385 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3386 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3387 });
3388
3389 // Save a file with windows line endings. The file is written correctly.
3390 buffer2.update(cx, |buffer, cx| {
3391 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3392 });
3393 project
3394 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3395 .await
3396 .unwrap();
3397 assert_eq!(
3398 fs.load("/dir/file2".as_ref()).await.unwrap(),
3399 "one\r\ntwo\r\nthree\r\nfour\r\n",
3400 );
3401}
3402
3403#[gpui::test]
3404async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3405 init_test(cx);
3406
3407 let fs = FakeFs::new(cx.executor());
3408 fs.insert_tree(
3409 "/the-dir",
3410 json!({
3411 "a.rs": "
3412 fn foo(mut v: Vec<usize>) {
3413 for x in &v {
3414 v.push(1);
3415 }
3416 }
3417 "
3418 .unindent(),
3419 }),
3420 )
3421 .await;
3422
3423 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3424 let buffer = project
3425 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3426 .await
3427 .unwrap();
3428
3429 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3430 let message = lsp::PublishDiagnosticsParams {
3431 uri: buffer_uri.clone(),
3432 diagnostics: vec![
3433 lsp::Diagnostic {
3434 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3435 severity: Some(DiagnosticSeverity::WARNING),
3436 message: "error 1".to_string(),
3437 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3438 location: lsp::Location {
3439 uri: buffer_uri.clone(),
3440 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3441 },
3442 message: "error 1 hint 1".to_string(),
3443 }]),
3444 ..Default::default()
3445 },
3446 lsp::Diagnostic {
3447 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3448 severity: Some(DiagnosticSeverity::HINT),
3449 message: "error 1 hint 1".to_string(),
3450 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3451 location: lsp::Location {
3452 uri: buffer_uri.clone(),
3453 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3454 },
3455 message: "original diagnostic".to_string(),
3456 }]),
3457 ..Default::default()
3458 },
3459 lsp::Diagnostic {
3460 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3461 severity: Some(DiagnosticSeverity::ERROR),
3462 message: "error 2".to_string(),
3463 related_information: Some(vec![
3464 lsp::DiagnosticRelatedInformation {
3465 location: lsp::Location {
3466 uri: buffer_uri.clone(),
3467 range: lsp::Range::new(
3468 lsp::Position::new(1, 13),
3469 lsp::Position::new(1, 15),
3470 ),
3471 },
3472 message: "error 2 hint 1".to_string(),
3473 },
3474 lsp::DiagnosticRelatedInformation {
3475 location: lsp::Location {
3476 uri: buffer_uri.clone(),
3477 range: lsp::Range::new(
3478 lsp::Position::new(1, 13),
3479 lsp::Position::new(1, 15),
3480 ),
3481 },
3482 message: "error 2 hint 2".to_string(),
3483 },
3484 ]),
3485 ..Default::default()
3486 },
3487 lsp::Diagnostic {
3488 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3489 severity: Some(DiagnosticSeverity::HINT),
3490 message: "error 2 hint 1".to_string(),
3491 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3492 location: lsp::Location {
3493 uri: buffer_uri.clone(),
3494 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3495 },
3496 message: "original diagnostic".to_string(),
3497 }]),
3498 ..Default::default()
3499 },
3500 lsp::Diagnostic {
3501 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3502 severity: Some(DiagnosticSeverity::HINT),
3503 message: "error 2 hint 2".to_string(),
3504 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3505 location: lsp::Location {
3506 uri: buffer_uri,
3507 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3508 },
3509 message: "original diagnostic".to_string(),
3510 }]),
3511 ..Default::default()
3512 },
3513 ],
3514 version: None,
3515 };
3516
3517 project
3518 .update(cx, |p, cx| {
3519 p.update_diagnostics(LanguageServerId(0), message, &[], cx)
3520 })
3521 .unwrap();
3522 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3523
3524 assert_eq!(
3525 buffer
3526 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3527 .collect::<Vec<_>>(),
3528 &[
3529 DiagnosticEntry {
3530 range: Point::new(1, 8)..Point::new(1, 9),
3531 diagnostic: Diagnostic {
3532 severity: DiagnosticSeverity::WARNING,
3533 message: "error 1".to_string(),
3534 group_id: 1,
3535 is_primary: true,
3536 ..Default::default()
3537 }
3538 },
3539 DiagnosticEntry {
3540 range: Point::new(1, 8)..Point::new(1, 9),
3541 diagnostic: Diagnostic {
3542 severity: DiagnosticSeverity::HINT,
3543 message: "error 1 hint 1".to_string(),
3544 group_id: 1,
3545 is_primary: false,
3546 ..Default::default()
3547 }
3548 },
3549 DiagnosticEntry {
3550 range: Point::new(1, 13)..Point::new(1, 15),
3551 diagnostic: Diagnostic {
3552 severity: DiagnosticSeverity::HINT,
3553 message: "error 2 hint 1".to_string(),
3554 group_id: 0,
3555 is_primary: false,
3556 ..Default::default()
3557 }
3558 },
3559 DiagnosticEntry {
3560 range: Point::new(1, 13)..Point::new(1, 15),
3561 diagnostic: Diagnostic {
3562 severity: DiagnosticSeverity::HINT,
3563 message: "error 2 hint 2".to_string(),
3564 group_id: 0,
3565 is_primary: false,
3566 ..Default::default()
3567 }
3568 },
3569 DiagnosticEntry {
3570 range: Point::new(2, 8)..Point::new(2, 17),
3571 diagnostic: Diagnostic {
3572 severity: DiagnosticSeverity::ERROR,
3573 message: "error 2".to_string(),
3574 group_id: 0,
3575 is_primary: true,
3576 ..Default::default()
3577 }
3578 }
3579 ]
3580 );
3581
3582 assert_eq!(
3583 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3584 &[
3585 DiagnosticEntry {
3586 range: Point::new(1, 13)..Point::new(1, 15),
3587 diagnostic: Diagnostic {
3588 severity: DiagnosticSeverity::HINT,
3589 message: "error 2 hint 1".to_string(),
3590 group_id: 0,
3591 is_primary: false,
3592 ..Default::default()
3593 }
3594 },
3595 DiagnosticEntry {
3596 range: Point::new(1, 13)..Point::new(1, 15),
3597 diagnostic: Diagnostic {
3598 severity: DiagnosticSeverity::HINT,
3599 message: "error 2 hint 2".to_string(),
3600 group_id: 0,
3601 is_primary: false,
3602 ..Default::default()
3603 }
3604 },
3605 DiagnosticEntry {
3606 range: Point::new(2, 8)..Point::new(2, 17),
3607 diagnostic: Diagnostic {
3608 severity: DiagnosticSeverity::ERROR,
3609 message: "error 2".to_string(),
3610 group_id: 0,
3611 is_primary: true,
3612 ..Default::default()
3613 }
3614 }
3615 ]
3616 );
3617
3618 assert_eq!(
3619 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3620 &[
3621 DiagnosticEntry {
3622 range: Point::new(1, 8)..Point::new(1, 9),
3623 diagnostic: Diagnostic {
3624 severity: DiagnosticSeverity::WARNING,
3625 message: "error 1".to_string(),
3626 group_id: 1,
3627 is_primary: true,
3628 ..Default::default()
3629 }
3630 },
3631 DiagnosticEntry {
3632 range: Point::new(1, 8)..Point::new(1, 9),
3633 diagnostic: Diagnostic {
3634 severity: DiagnosticSeverity::HINT,
3635 message: "error 1 hint 1".to_string(),
3636 group_id: 1,
3637 is_primary: false,
3638 ..Default::default()
3639 }
3640 },
3641 ]
3642 );
3643}
3644
3645#[gpui::test]
3646async fn test_rename(cx: &mut gpui::TestAppContext) {
3647 init_test(cx);
3648
3649 let mut language = Language::new(
3650 LanguageConfig {
3651 name: "Rust".into(),
3652 path_suffixes: vec!["rs".to_string()],
3653 ..Default::default()
3654 },
3655 Some(tree_sitter_rust::language()),
3656 );
3657 let mut fake_servers = language
3658 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
3659 capabilities: lsp::ServerCapabilities {
3660 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
3661 prepare_provider: Some(true),
3662 work_done_progress_options: Default::default(),
3663 })),
3664 ..Default::default()
3665 },
3666 ..Default::default()
3667 }))
3668 .await;
3669
3670 let fs = FakeFs::new(cx.executor());
3671 fs.insert_tree(
3672 "/dir",
3673 json!({
3674 "one.rs": "const ONE: usize = 1;",
3675 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3676 }),
3677 )
3678 .await;
3679
3680 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3681 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
3682 let buffer = project
3683 .update(cx, |project, cx| {
3684 project.open_local_buffer("/dir/one.rs", cx)
3685 })
3686 .await
3687 .unwrap();
3688
3689 let fake_server = fake_servers.next().await.unwrap();
3690
3691 let response = project.update(cx, |project, cx| {
3692 project.prepare_rename(buffer.clone(), 7, cx)
3693 });
3694 fake_server
3695 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
3696 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3697 assert_eq!(params.position, lsp::Position::new(0, 7));
3698 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
3699 lsp::Position::new(0, 6),
3700 lsp::Position::new(0, 9),
3701 ))))
3702 })
3703 .next()
3704 .await
3705 .unwrap();
3706 let range = response.await.unwrap().unwrap();
3707 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
3708 assert_eq!(range, 6..9);
3709
3710 let response = project.update(cx, |project, cx| {
3711 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
3712 });
3713 fake_server
3714 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
3715 assert_eq!(
3716 params.text_document_position.text_document.uri.as_str(),
3717 "file:///dir/one.rs"
3718 );
3719 assert_eq!(
3720 params.text_document_position.position,
3721 lsp::Position::new(0, 7)
3722 );
3723 assert_eq!(params.new_name, "THREE");
3724 Ok(Some(lsp::WorkspaceEdit {
3725 changes: Some(
3726 [
3727 (
3728 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
3729 vec![lsp::TextEdit::new(
3730 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
3731 "THREE".to_string(),
3732 )],
3733 ),
3734 (
3735 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
3736 vec![
3737 lsp::TextEdit::new(
3738 lsp::Range::new(
3739 lsp::Position::new(0, 24),
3740 lsp::Position::new(0, 27),
3741 ),
3742 "THREE".to_string(),
3743 ),
3744 lsp::TextEdit::new(
3745 lsp::Range::new(
3746 lsp::Position::new(0, 35),
3747 lsp::Position::new(0, 38),
3748 ),
3749 "THREE".to_string(),
3750 ),
3751 ],
3752 ),
3753 ]
3754 .into_iter()
3755 .collect(),
3756 ),
3757 ..Default::default()
3758 }))
3759 })
3760 .next()
3761 .await
3762 .unwrap();
3763 let mut transaction = response.await.unwrap().0;
3764 assert_eq!(transaction.len(), 2);
3765 assert_eq!(
3766 transaction
3767 .remove_entry(&buffer)
3768 .unwrap()
3769 .0
3770 .update(cx, |buffer, _| buffer.text()),
3771 "const THREE: usize = 1;"
3772 );
3773 assert_eq!(
3774 transaction
3775 .into_keys()
3776 .next()
3777 .unwrap()
3778 .update(cx, |buffer, _| buffer.text()),
3779 "const TWO: usize = one::THREE + one::THREE;"
3780 );
3781}
3782
3783#[gpui::test]
3784async fn test_search(cx: &mut gpui::TestAppContext) {
3785 init_test(cx);
3786
3787 let fs = FakeFs::new(cx.executor());
3788 fs.insert_tree(
3789 "/dir",
3790 json!({
3791 "one.rs": "const ONE: usize = 1;",
3792 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3793 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3794 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3795 }),
3796 )
3797 .await;
3798 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3799 assert_eq!(
3800 search(
3801 &project,
3802 SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
3803 cx
3804 )
3805 .await
3806 .unwrap(),
3807 HashMap::from_iter([
3808 ("two.rs".to_string(), vec![6..9]),
3809 ("three.rs".to_string(), vec![37..40])
3810 ])
3811 );
3812
3813 let buffer_4 = project
3814 .update(cx, |project, cx| {
3815 project.open_local_buffer("/dir/four.rs", cx)
3816 })
3817 .await
3818 .unwrap();
3819 buffer_4.update(cx, |buffer, cx| {
3820 let text = "two::TWO";
3821 buffer.edit([(20..28, text), (31..43, text)], None, cx);
3822 });
3823
3824 assert_eq!(
3825 search(
3826 &project,
3827 SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
3828 cx
3829 )
3830 .await
3831 .unwrap(),
3832 HashMap::from_iter([
3833 ("two.rs".to_string(), vec![6..9]),
3834 ("three.rs".to_string(), vec![37..40]),
3835 ("four.rs".to_string(), vec![25..28, 36..39])
3836 ])
3837 );
3838}
3839
3840#[gpui::test]
3841async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
3842 init_test(cx);
3843
3844 let search_query = "file";
3845
3846 let fs = FakeFs::new(cx.executor());
3847 fs.insert_tree(
3848 "/dir",
3849 json!({
3850 "one.rs": r#"// Rust file one"#,
3851 "one.ts": r#"// TypeScript file one"#,
3852 "two.rs": r#"// Rust file two"#,
3853 "two.ts": r#"// TypeScript file two"#,
3854 }),
3855 )
3856 .await;
3857 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3858
3859 assert!(
3860 search(
3861 &project,
3862 SearchQuery::text(
3863 search_query,
3864 false,
3865 true,
3866 false,
3867 vec![PathMatcher::new("*.odd").unwrap()],
3868 Vec::new()
3869 )
3870 .unwrap(),
3871 cx
3872 )
3873 .await
3874 .unwrap()
3875 .is_empty(),
3876 "If no inclusions match, no files should be returned"
3877 );
3878
3879 assert_eq!(
3880 search(
3881 &project,
3882 SearchQuery::text(
3883 search_query,
3884 false,
3885 true,
3886 false,
3887 vec![PathMatcher::new("*.rs").unwrap()],
3888 Vec::new()
3889 )
3890 .unwrap(),
3891 cx
3892 )
3893 .await
3894 .unwrap(),
3895 HashMap::from_iter([
3896 ("one.rs".to_string(), vec![8..12]),
3897 ("two.rs".to_string(), vec![8..12]),
3898 ]),
3899 "Rust only search should give only Rust files"
3900 );
3901
3902 assert_eq!(
3903 search(
3904 &project,
3905 SearchQuery::text(
3906 search_query,
3907 false,
3908 true,
3909 false,
3910 vec![
3911 PathMatcher::new("*.ts").unwrap(),
3912 PathMatcher::new("*.odd").unwrap(),
3913 ],
3914 Vec::new()
3915 ).unwrap(),
3916 cx
3917 )
3918 .await
3919 .unwrap(),
3920 HashMap::from_iter([
3921 ("one.ts".to_string(), vec![14..18]),
3922 ("two.ts".to_string(), vec![14..18]),
3923 ]),
3924 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
3925 );
3926
3927 assert_eq!(
3928 search(
3929 &project,
3930 SearchQuery::text(
3931 search_query,
3932 false,
3933 true,
3934 false,
3935 vec![
3936 PathMatcher::new("*.rs").unwrap(),
3937 PathMatcher::new("*.ts").unwrap(),
3938 PathMatcher::new("*.odd").unwrap(),
3939 ],
3940 Vec::new()
3941 ).unwrap(),
3942 cx
3943 )
3944 .await
3945 .unwrap(),
3946 HashMap::from_iter([
3947 ("one.rs".to_string(), vec![8..12]),
3948 ("one.ts".to_string(), vec![14..18]),
3949 ("two.rs".to_string(), vec![8..12]),
3950 ("two.ts".to_string(), vec![14..18]),
3951 ]),
3952 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
3953 );
3954}
3955
3956#[gpui::test]
3957async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
3958 init_test(cx);
3959
3960 let search_query = "file";
3961
3962 let fs = FakeFs::new(cx.executor());
3963 fs.insert_tree(
3964 "/dir",
3965 json!({
3966 "one.rs": r#"// Rust file one"#,
3967 "one.ts": r#"// TypeScript file one"#,
3968 "two.rs": r#"// Rust file two"#,
3969 "two.ts": r#"// TypeScript file two"#,
3970 }),
3971 )
3972 .await;
3973 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3974
3975 assert_eq!(
3976 search(
3977 &project,
3978 SearchQuery::text(
3979 search_query,
3980 false,
3981 true,
3982 false,
3983 Vec::new(),
3984 vec![PathMatcher::new("*.odd").unwrap()],
3985 )
3986 .unwrap(),
3987 cx
3988 )
3989 .await
3990 .unwrap(),
3991 HashMap::from_iter([
3992 ("one.rs".to_string(), vec![8..12]),
3993 ("one.ts".to_string(), vec![14..18]),
3994 ("two.rs".to_string(), vec![8..12]),
3995 ("two.ts".to_string(), vec![14..18]),
3996 ]),
3997 "If no exclusions match, all files should be returned"
3998 );
3999
4000 assert_eq!(
4001 search(
4002 &project,
4003 SearchQuery::text(
4004 search_query,
4005 false,
4006 true,
4007 false,
4008 Vec::new(),
4009 vec![PathMatcher::new("*.rs").unwrap()],
4010 )
4011 .unwrap(),
4012 cx
4013 )
4014 .await
4015 .unwrap(),
4016 HashMap::from_iter([
4017 ("one.ts".to_string(), vec![14..18]),
4018 ("two.ts".to_string(), vec![14..18]),
4019 ]),
4020 "Rust exclusion search should give only TypeScript files"
4021 );
4022
4023 assert_eq!(
4024 search(
4025 &project,
4026 SearchQuery::text(
4027 search_query,
4028 false,
4029 true,
4030 false,
4031 Vec::new(),
4032 vec![
4033 PathMatcher::new("*.ts").unwrap(),
4034 PathMatcher::new("*.odd").unwrap(),
4035 ],
4036 ).unwrap(),
4037 cx
4038 )
4039 .await
4040 .unwrap(),
4041 HashMap::from_iter([
4042 ("one.rs".to_string(), vec![8..12]),
4043 ("two.rs".to_string(), vec![8..12]),
4044 ]),
4045 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
4046 );
4047
4048 assert!(
4049 search(
4050 &project,
4051 SearchQuery::text(
4052 search_query,
4053 false,
4054 true,
4055 false,
4056 Vec::new(),
4057 vec![
4058 PathMatcher::new("*.rs").unwrap(),
4059 PathMatcher::new("*.ts").unwrap(),
4060 PathMatcher::new("*.odd").unwrap(),
4061 ],
4062 ).unwrap(),
4063 cx
4064 )
4065 .await
4066 .unwrap().is_empty(),
4067 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4068 );
4069}
4070
4071#[gpui::test]
4072async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4073 init_test(cx);
4074
4075 let search_query = "file";
4076
4077 let fs = FakeFs::new(cx.executor());
4078 fs.insert_tree(
4079 "/dir",
4080 json!({
4081 "one.rs": r#"// Rust file one"#,
4082 "one.ts": r#"// TypeScript file one"#,
4083 "two.rs": r#"// Rust file two"#,
4084 "two.ts": r#"// TypeScript file two"#,
4085 }),
4086 )
4087 .await;
4088 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4089
4090 assert!(
4091 search(
4092 &project,
4093 SearchQuery::text(
4094 search_query,
4095 false,
4096 true,
4097 false,
4098 vec![PathMatcher::new("*.odd").unwrap()],
4099 vec![PathMatcher::new("*.odd").unwrap()],
4100 )
4101 .unwrap(),
4102 cx
4103 )
4104 .await
4105 .unwrap()
4106 .is_empty(),
4107 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4108 );
4109
4110 assert!(
4111 search(
4112 &project,
4113 SearchQuery::text(
4114 search_query,
4115 false,
4116 true,
4117 false,
4118 vec![PathMatcher::new("*.ts").unwrap()],
4119 vec![PathMatcher::new("*.ts").unwrap()],
4120 ).unwrap(),
4121 cx
4122 )
4123 .await
4124 .unwrap()
4125 .is_empty(),
4126 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4127 );
4128
4129 assert!(
4130 search(
4131 &project,
4132 SearchQuery::text(
4133 search_query,
4134 false,
4135 true,
4136 false,
4137 vec![
4138 PathMatcher::new("*.ts").unwrap(),
4139 PathMatcher::new("*.odd").unwrap()
4140 ],
4141 vec![
4142 PathMatcher::new("*.ts").unwrap(),
4143 PathMatcher::new("*.odd").unwrap()
4144 ],
4145 )
4146 .unwrap(),
4147 cx
4148 )
4149 .await
4150 .unwrap()
4151 .is_empty(),
4152 "Non-matching inclusions and exclusions should not change that."
4153 );
4154
4155 assert_eq!(
4156 search(
4157 &project,
4158 SearchQuery::text(
4159 search_query,
4160 false,
4161 true,
4162 false,
4163 vec![
4164 PathMatcher::new("*.ts").unwrap(),
4165 PathMatcher::new("*.odd").unwrap()
4166 ],
4167 vec![
4168 PathMatcher::new("*.rs").unwrap(),
4169 PathMatcher::new("*.odd").unwrap()
4170 ],
4171 )
4172 .unwrap(),
4173 cx
4174 )
4175 .await
4176 .unwrap(),
4177 HashMap::from_iter([
4178 ("one.ts".to_string(), vec![14..18]),
4179 ("two.ts".to_string(), vec![14..18]),
4180 ]),
4181 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4182 );
4183}
4184
4185#[gpui::test]
4186async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
4187 init_test(cx);
4188
4189 let fs = FakeFs::new(cx.background_executor.clone());
4190 fs.insert_tree(
4191 "/dir",
4192 json!({
4193 ".git": {},
4194 ".gitignore": "**/target\n/node_modules\n",
4195 "target": {
4196 "index.txt": "index_key:index_value"
4197 },
4198 "node_modules": {
4199 "eslint": {
4200 "index.ts": "const eslint_key = 'eslint value'",
4201 "package.json": r#"{ "some_key": "some value" }"#,
4202 },
4203 "prettier": {
4204 "index.ts": "const prettier_key = 'prettier value'",
4205 "package.json": r#"{ "other_key": "other value" }"#,
4206 },
4207 },
4208 "package.json": r#"{ "main_key": "main value" }"#,
4209 }),
4210 )
4211 .await;
4212 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4213
4214 let query = "key";
4215 assert_eq!(
4216 search(
4217 &project,
4218 SearchQuery::text(query, false, false, false, Vec::new(), Vec::new()).unwrap(),
4219 cx
4220 )
4221 .await
4222 .unwrap(),
4223 HashMap::from_iter([("package.json".to_string(), vec![8..11])]),
4224 "Only one non-ignored file should have the query"
4225 );
4226
4227 assert_eq!(
4228 search(
4229 &project,
4230 SearchQuery::text(query, false, false, true, Vec::new(), Vec::new()).unwrap(),
4231 cx
4232 )
4233 .await
4234 .unwrap(),
4235 HashMap::from_iter([
4236 ("package.json".to_string(), vec![8..11]),
4237 ("target/index.txt".to_string(), vec![6..9]),
4238 (
4239 "node_modules/prettier/package.json".to_string(),
4240 vec![9..12]
4241 ),
4242 ("node_modules/prettier/index.ts".to_string(), vec![15..18]),
4243 ("node_modules/eslint/index.ts".to_string(), vec![13..16]),
4244 ("node_modules/eslint/package.json".to_string(), vec![8..11]),
4245 ]),
4246 "Unrestricted search with ignored directories should find every file with the query"
4247 );
4248
4249 assert_eq!(
4250 search(
4251 &project,
4252 SearchQuery::text(
4253 query,
4254 false,
4255 false,
4256 true,
4257 vec![PathMatcher::new("node_modules/prettier/**").unwrap()],
4258 vec![PathMatcher::new("*.ts").unwrap()],
4259 )
4260 .unwrap(),
4261 cx
4262 )
4263 .await
4264 .unwrap(),
4265 HashMap::from_iter([(
4266 "node_modules/prettier/package.json".to_string(),
4267 vec![9..12]
4268 )]),
4269 "With search including ignored prettier directory and excluding TS files, only one file should be found"
4270 );
4271}
4272
4273#[test]
4274fn test_glob_literal_prefix() {
4275 assert_eq!(glob_literal_prefix("**/*.js"), "");
4276 assert_eq!(glob_literal_prefix("node_modules/**/*.js"), "node_modules");
4277 assert_eq!(glob_literal_prefix("foo/{bar,baz}.js"), "foo");
4278 assert_eq!(glob_literal_prefix("foo/bar/baz.js"), "foo/bar/baz.js");
4279}
4280
4281async fn search(
4282 project: &Model<Project>,
4283 query: SearchQuery,
4284 cx: &mut gpui::TestAppContext,
4285) -> Result<HashMap<String, Vec<Range<usize>>>> {
4286 let mut search_rx = project.update(cx, |project, cx| project.search(query, cx));
4287 let mut result = HashMap::default();
4288 while let Some((buffer, range)) = search_rx.next().await {
4289 result.entry(buffer).or_insert(range);
4290 }
4291 Ok(result
4292 .into_iter()
4293 .map(|(buffer, ranges)| {
4294 buffer.update(cx, |buffer, _| {
4295 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
4296 let ranges = ranges
4297 .into_iter()
4298 .map(|range| range.to_offset(buffer))
4299 .collect::<Vec<_>>();
4300 (path, ranges)
4301 })
4302 })
4303 .collect())
4304}
4305
4306fn init_test(cx: &mut gpui::TestAppContext) {
4307 if std::env::var("RUST_LOG").is_ok() {
4308 env_logger::try_init().ok();
4309 }
4310
4311 cx.update(|cx| {
4312 let settings_store = SettingsStore::test(cx);
4313 cx.set_global(settings_store);
4314 language::init(cx);
4315 Project::init_settings(cx);
4316 });
4317}