1use crate::{search::PathMatcher, worktree::WorktreeModelHandle, Event, *};
2use fs2::{FakeFs, RealFs};
3use futures::{future, StreamExt};
4use gpui2::AppContext;
5use language2::{
6 language_settings::{AllLanguageSettings, LanguageSettingsContent},
7 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
8 LineEnding, OffsetRangeExt, Point, ToPoint,
9};
10use lsp2::Url;
11use parking_lot::Mutex;
12use pretty_assertions::assert_eq;
13use serde_json::json;
14use std::{os::unix, task::Poll};
15use unindent::Unindent as _;
16use util::{assert_set_eq, test::temp_tree};
17
18#[gpui2::test]
19async fn test_symlinks(cx: &mut gpui2::TestAppContext) {
20 init_test(cx);
21 cx.executor().allow_parking();
22
23 let dir = temp_tree(json!({
24 "root": {
25 "apple": "",
26 "banana": {
27 "carrot": {
28 "date": "",
29 "endive": "",
30 }
31 },
32 "fennel": {
33 "grape": "",
34 }
35 }
36 }));
37
38 let root_link_path = dir.path().join("root_link");
39 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
40 unix::fs::symlink(
41 &dir.path().join("root/fennel"),
42 &dir.path().join("root/finnochio"),
43 )
44 .unwrap();
45
46 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
47 project.update(cx, |project, cx| {
48 let tree = project.worktrees().next().unwrap().read(cx);
49 assert_eq!(tree.file_count(), 5);
50 assert_eq!(
51 tree.inode_for_path("fennel/grape"),
52 tree.inode_for_path("finnochio/grape")
53 );
54 });
55}
56
57#[gpui2::test]
58async fn test_managing_project_specific_settings(cx: &mut gpui2::TestAppContext) {
59 init_test(cx);
60
61 let fs = FakeFs::new(cx.executor().clone());
62 fs.insert_tree(
63 "/the-root",
64 json!({
65 ".zed": {
66 "settings.json": r#"{ "tab_size": 8 }"#
67 },
68 "a": {
69 "a.rs": "fn a() {\n A\n}"
70 },
71 "b": {
72 ".zed": {
73 "settings.json": r#"{ "tab_size": 2 }"#
74 },
75 "b.rs": "fn b() {\n B\n}"
76 }
77 }),
78 )
79 .await;
80
81 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
82 let worktree = project.update(cx, |project, _| project.worktrees().next().unwrap());
83
84 cx.executor().run_until_parked();
85 cx.update(|cx| {
86 let tree = worktree.read(cx);
87
88 let settings_a = language_settings(
89 None,
90 Some(
91 &(File::for_entry(
92 tree.entry_for_path("a/a.rs").unwrap().clone(),
93 worktree.clone(),
94 ) as _),
95 ),
96 cx,
97 );
98 let settings_b = language_settings(
99 None,
100 Some(
101 &(File::for_entry(
102 tree.entry_for_path("b/b.rs").unwrap().clone(),
103 worktree.clone(),
104 ) as _),
105 ),
106 cx,
107 );
108
109 assert_eq!(settings_a.tab_size.get(), 8);
110 assert_eq!(settings_b.tab_size.get(), 2);
111 });
112}
113
114#[gpui2::test]
115async fn test_managing_language_servers(cx: &mut gpui2::TestAppContext) {
116 init_test(cx);
117
118 let mut rust_language = Language::new(
119 LanguageConfig {
120 name: "Rust".into(),
121 path_suffixes: vec!["rs".to_string()],
122 ..Default::default()
123 },
124 Some(tree_sitter_rust::language()),
125 );
126 let mut json_language = Language::new(
127 LanguageConfig {
128 name: "JSON".into(),
129 path_suffixes: vec!["json".to_string()],
130 ..Default::default()
131 },
132 None,
133 );
134 let mut fake_rust_servers = rust_language
135 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
136 name: "the-rust-language-server",
137 capabilities: lsp2::ServerCapabilities {
138 completion_provider: Some(lsp2::CompletionOptions {
139 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
140 ..Default::default()
141 }),
142 ..Default::default()
143 },
144 ..Default::default()
145 }))
146 .await;
147 let mut fake_json_servers = json_language
148 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
149 name: "the-json-language-server",
150 capabilities: lsp2::ServerCapabilities {
151 completion_provider: Some(lsp2::CompletionOptions {
152 trigger_characters: Some(vec![":".to_string()]),
153 ..Default::default()
154 }),
155 ..Default::default()
156 },
157 ..Default::default()
158 }))
159 .await;
160
161 let fs = FakeFs::new(cx.executor().clone());
162 fs.insert_tree(
163 "/the-root",
164 json!({
165 "test.rs": "const A: i32 = 1;",
166 "test2.rs": "",
167 "Cargo.toml": "a = 1",
168 "package.json": "{\"a\": 1}",
169 }),
170 )
171 .await;
172
173 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
174
175 // Open a buffer without an associated language server.
176 let toml_buffer = project
177 .update(cx, |project, cx| {
178 project.open_local_buffer("/the-root/Cargo.toml", cx)
179 })
180 .await
181 .unwrap();
182
183 // Open a buffer with an associated language server before the language for it has been loaded.
184 let rust_buffer = project
185 .update(cx, |project, cx| {
186 project.open_local_buffer("/the-root/test.rs", cx)
187 })
188 .await
189 .unwrap();
190 rust_buffer.update(cx, |buffer, _| {
191 assert_eq!(buffer.language().map(|l| l.name()), None);
192 });
193
194 // Now we add the languages to the project, and ensure they get assigned to all
195 // the relevant open buffers.
196 project.update(cx, |project, _| {
197 project.languages.add(Arc::new(json_language));
198 project.languages.add(Arc::new(rust_language));
199 });
200 cx.executor().run_until_parked();
201 rust_buffer.update(cx, |buffer, _| {
202 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
203 });
204
205 // A server is started up, and it is notified about Rust files.
206 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
207 assert_eq!(
208 fake_rust_server
209 .receive_notification::<lsp2::notification::DidOpenTextDocument>()
210 .await
211 .text_document,
212 lsp2::TextDocumentItem {
213 uri: lsp2::Url::from_file_path("/the-root/test.rs").unwrap(),
214 version: 0,
215 text: "const A: i32 = 1;".to_string(),
216 language_id: Default::default()
217 }
218 );
219
220 // The buffer is configured based on the language server's capabilities.
221 rust_buffer.update(cx, |buffer, _| {
222 assert_eq!(
223 buffer.completion_triggers(),
224 &[".".to_string(), "::".to_string()]
225 );
226 });
227 toml_buffer.update(cx, |buffer, _| {
228 assert!(buffer.completion_triggers().is_empty());
229 });
230
231 // Edit a buffer. The changes are reported to the language server.
232 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
233 assert_eq!(
234 fake_rust_server
235 .receive_notification::<lsp2::notification::DidChangeTextDocument>()
236 .await
237 .text_document,
238 lsp2::VersionedTextDocumentIdentifier::new(
239 lsp2::Url::from_file_path("/the-root/test.rs").unwrap(),
240 1
241 )
242 );
243
244 // Open a third buffer with a different associated language server.
245 let json_buffer = project
246 .update(cx, |project, cx| {
247 project.open_local_buffer("/the-root/package.json", cx)
248 })
249 .await
250 .unwrap();
251
252 // A json language server is started up and is only notified about the json buffer.
253 let mut fake_json_server = fake_json_servers.next().await.unwrap();
254 assert_eq!(
255 fake_json_server
256 .receive_notification::<lsp2::notification::DidOpenTextDocument>()
257 .await
258 .text_document,
259 lsp2::TextDocumentItem {
260 uri: lsp2::Url::from_file_path("/the-root/package.json").unwrap(),
261 version: 0,
262 text: "{\"a\": 1}".to_string(),
263 language_id: Default::default()
264 }
265 );
266
267 // This buffer is configured based on the second language server's
268 // capabilities.
269 json_buffer.update(cx, |buffer, _| {
270 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
271 });
272
273 // When opening another buffer whose language server is already running,
274 // it is also configured based on the existing language server's capabilities.
275 let rust_buffer2 = project
276 .update(cx, |project, cx| {
277 project.open_local_buffer("/the-root/test2.rs", cx)
278 })
279 .await
280 .unwrap();
281 rust_buffer2.update(cx, |buffer, _| {
282 assert_eq!(
283 buffer.completion_triggers(),
284 &[".".to_string(), "::".to_string()]
285 );
286 });
287
288 // Changes are reported only to servers matching the buffer's language.
289 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
290 rust_buffer2.update(cx, |buffer, cx| {
291 buffer.edit([(0..0, "let x = 1;")], None, cx)
292 });
293 assert_eq!(
294 fake_rust_server
295 .receive_notification::<lsp2::notification::DidChangeTextDocument>()
296 .await
297 .text_document,
298 lsp2::VersionedTextDocumentIdentifier::new(
299 lsp2::Url::from_file_path("/the-root/test2.rs").unwrap(),
300 1
301 )
302 );
303
304 // Save notifications are reported to all servers.
305 project
306 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
307 .await
308 .unwrap();
309 assert_eq!(
310 fake_rust_server
311 .receive_notification::<lsp2::notification::DidSaveTextDocument>()
312 .await
313 .text_document,
314 lsp2::TextDocumentIdentifier::new(
315 lsp2::Url::from_file_path("/the-root/Cargo.toml").unwrap()
316 )
317 );
318 assert_eq!(
319 fake_json_server
320 .receive_notification::<lsp2::notification::DidSaveTextDocument>()
321 .await
322 .text_document,
323 lsp2::TextDocumentIdentifier::new(
324 lsp2::Url::from_file_path("/the-root/Cargo.toml").unwrap()
325 )
326 );
327
328 // Renames are reported only to servers matching the buffer's language.
329 fs.rename(
330 Path::new("/the-root/test2.rs"),
331 Path::new("/the-root/test3.rs"),
332 Default::default(),
333 )
334 .await
335 .unwrap();
336 assert_eq!(
337 fake_rust_server
338 .receive_notification::<lsp2::notification::DidCloseTextDocument>()
339 .await
340 .text_document,
341 lsp2::TextDocumentIdentifier::new(lsp2::Url::from_file_path("/the-root/test2.rs").unwrap()),
342 );
343 assert_eq!(
344 fake_rust_server
345 .receive_notification::<lsp2::notification::DidOpenTextDocument>()
346 .await
347 .text_document,
348 lsp2::TextDocumentItem {
349 uri: lsp2::Url::from_file_path("/the-root/test3.rs").unwrap(),
350 version: 0,
351 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
352 language_id: Default::default()
353 },
354 );
355
356 rust_buffer2.update(cx, |buffer, cx| {
357 buffer.update_diagnostics(
358 LanguageServerId(0),
359 DiagnosticSet::from_sorted_entries(
360 vec![DiagnosticEntry {
361 diagnostic: Default::default(),
362 range: Anchor::MIN..Anchor::MAX,
363 }],
364 &buffer.snapshot(),
365 ),
366 cx,
367 );
368 assert_eq!(
369 buffer
370 .snapshot()
371 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
372 .count(),
373 1
374 );
375 });
376
377 // When the rename changes the extension of the file, the buffer gets closed on the old
378 // language server and gets opened on the new one.
379 fs.rename(
380 Path::new("/the-root/test3.rs"),
381 Path::new("/the-root/test3.json"),
382 Default::default(),
383 )
384 .await
385 .unwrap();
386 assert_eq!(
387 fake_rust_server
388 .receive_notification::<lsp2::notification::DidCloseTextDocument>()
389 .await
390 .text_document,
391 lsp2::TextDocumentIdentifier::new(lsp2::Url::from_file_path("/the-root/test3.rs").unwrap(),),
392 );
393 assert_eq!(
394 fake_json_server
395 .receive_notification::<lsp2::notification::DidOpenTextDocument>()
396 .await
397 .text_document,
398 lsp2::TextDocumentItem {
399 uri: lsp2::Url::from_file_path("/the-root/test3.json").unwrap(),
400 version: 0,
401 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
402 language_id: Default::default()
403 },
404 );
405
406 // We clear the diagnostics, since the language has changed.
407 rust_buffer2.update(cx, |buffer, _| {
408 assert_eq!(
409 buffer
410 .snapshot()
411 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
412 .count(),
413 0
414 );
415 });
416
417 // The renamed file's version resets after changing language server.
418 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
419 assert_eq!(
420 fake_json_server
421 .receive_notification::<lsp2::notification::DidChangeTextDocument>()
422 .await
423 .text_document,
424 lsp2::VersionedTextDocumentIdentifier::new(
425 lsp2::Url::from_file_path("/the-root/test3.json").unwrap(),
426 1
427 )
428 );
429
430 // Restart language servers
431 project.update(cx, |project, cx| {
432 project.restart_language_servers_for_buffers(
433 vec![rust_buffer.clone(), json_buffer.clone()],
434 cx,
435 );
436 });
437
438 let mut rust_shutdown_requests = fake_rust_server
439 .handle_request::<lsp2::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
440 let mut json_shutdown_requests = fake_json_server
441 .handle_request::<lsp2::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
442 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
443
444 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
445 let mut fake_json_server = fake_json_servers.next().await.unwrap();
446
447 // Ensure rust document is reopened in new rust language server
448 assert_eq!(
449 fake_rust_server
450 .receive_notification::<lsp2::notification::DidOpenTextDocument>()
451 .await
452 .text_document,
453 lsp2::TextDocumentItem {
454 uri: lsp2::Url::from_file_path("/the-root/test.rs").unwrap(),
455 version: 0,
456 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
457 language_id: Default::default()
458 }
459 );
460
461 // Ensure json documents are reopened in new json language server
462 assert_set_eq!(
463 [
464 fake_json_server
465 .receive_notification::<lsp2::notification::DidOpenTextDocument>()
466 .await
467 .text_document,
468 fake_json_server
469 .receive_notification::<lsp2::notification::DidOpenTextDocument>()
470 .await
471 .text_document,
472 ],
473 [
474 lsp2::TextDocumentItem {
475 uri: lsp2::Url::from_file_path("/the-root/package.json").unwrap(),
476 version: 0,
477 text: json_buffer.update(cx, |buffer, _| buffer.text()),
478 language_id: Default::default()
479 },
480 lsp2::TextDocumentItem {
481 uri: lsp2::Url::from_file_path("/the-root/test3.json").unwrap(),
482 version: 0,
483 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
484 language_id: Default::default()
485 }
486 ]
487 );
488
489 // Close notifications are reported only to servers matching the buffer's language.
490 cx.update(|_| drop(json_buffer));
491 let close_message = lsp2::DidCloseTextDocumentParams {
492 text_document: lsp2::TextDocumentIdentifier::new(
493 lsp2::Url::from_file_path("/the-root/package.json").unwrap(),
494 ),
495 };
496 assert_eq!(
497 fake_json_server
498 .receive_notification::<lsp2::notification::DidCloseTextDocument>()
499 .await,
500 close_message,
501 );
502}
503
504#[gpui2::test]
505async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui2::TestAppContext) {
506 init_test(cx);
507
508 let mut language = Language::new(
509 LanguageConfig {
510 name: "Rust".into(),
511 path_suffixes: vec!["rs".to_string()],
512 ..Default::default()
513 },
514 Some(tree_sitter_rust::language()),
515 );
516 let mut fake_servers = language
517 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
518 name: "the-language-server",
519 ..Default::default()
520 }))
521 .await;
522
523 let fs = FakeFs::new(cx.executor().clone());
524 fs.insert_tree(
525 "/the-root",
526 json!({
527 ".gitignore": "target\n",
528 "src": {
529 "a.rs": "",
530 "b.rs": "",
531 },
532 "target": {
533 "x": {
534 "out": {
535 "x.rs": ""
536 }
537 },
538 "y": {
539 "out": {
540 "y.rs": "",
541 }
542 },
543 "z": {
544 "out": {
545 "z.rs": ""
546 }
547 }
548 }
549 }),
550 )
551 .await;
552
553 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
554 project.update(cx, |project, _| {
555 project.languages.add(Arc::new(language));
556 });
557 cx.executor().run_until_parked();
558
559 // Start the language server by opening a buffer with a compatible file extension.
560 let _buffer = project
561 .update(cx, |project, cx| {
562 project.open_local_buffer("/the-root/src/a.rs", cx)
563 })
564 .await
565 .unwrap();
566
567 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
568 project.update(cx, |project, cx| {
569 let worktree = project.worktrees().next().unwrap();
570 assert_eq!(
571 worktree
572 .read(cx)
573 .snapshot()
574 .entries(true)
575 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
576 .collect::<Vec<_>>(),
577 &[
578 (Path::new(""), false),
579 (Path::new(".gitignore"), false),
580 (Path::new("src"), false),
581 (Path::new("src/a.rs"), false),
582 (Path::new("src/b.rs"), false),
583 (Path::new("target"), true),
584 ]
585 );
586 });
587
588 let prev_read_dir_count = fs.read_dir_call_count();
589
590 // Keep track of the FS events reported to the language server.
591 let fake_server = fake_servers.next().await.unwrap();
592 let file_changes = Arc::new(Mutex::new(Vec::new()));
593 fake_server
594 .request::<lsp2::request::RegisterCapability>(lsp2::RegistrationParams {
595 registrations: vec![lsp2::Registration {
596 id: Default::default(),
597 method: "workspace/didChangeWatchedFiles".to_string(),
598 register_options: serde_json::to_value(
599 lsp2::DidChangeWatchedFilesRegistrationOptions {
600 watchers: vec![
601 lsp2::FileSystemWatcher {
602 glob_pattern: lsp2::GlobPattern::String(
603 "/the-root/Cargo.toml".to_string(),
604 ),
605 kind: None,
606 },
607 lsp2::FileSystemWatcher {
608 glob_pattern: lsp2::GlobPattern::String(
609 "/the-root/src/*.{rs,c}".to_string(),
610 ),
611 kind: None,
612 },
613 lsp2::FileSystemWatcher {
614 glob_pattern: lsp2::GlobPattern::String(
615 "/the-root/target/y/**/*.rs".to_string(),
616 ),
617 kind: None,
618 },
619 ],
620 },
621 )
622 .ok(),
623 }],
624 })
625 .await
626 .unwrap();
627 fake_server.handle_notification::<lsp2::notification::DidChangeWatchedFiles, _>({
628 let file_changes = file_changes.clone();
629 move |params, _| {
630 let mut file_changes = file_changes.lock();
631 file_changes.extend(params.changes);
632 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
633 }
634 });
635
636 cx.executor().run_until_parked();
637 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
638 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
639
640 // Now the language server has asked us to watch an ignored directory path,
641 // so we recursively load it.
642 project.update(cx, |project, cx| {
643 let worktree = project.worktrees().next().unwrap();
644 assert_eq!(
645 worktree
646 .read(cx)
647 .snapshot()
648 .entries(true)
649 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
650 .collect::<Vec<_>>(),
651 &[
652 (Path::new(""), false),
653 (Path::new(".gitignore"), false),
654 (Path::new("src"), false),
655 (Path::new("src/a.rs"), false),
656 (Path::new("src/b.rs"), false),
657 (Path::new("target"), true),
658 (Path::new("target/x"), true),
659 (Path::new("target/y"), true),
660 (Path::new("target/y/out"), true),
661 (Path::new("target/y/out/y.rs"), true),
662 (Path::new("target/z"), true),
663 ]
664 );
665 });
666
667 // Perform some file system mutations, two of which match the watched patterns,
668 // and one of which does not.
669 fs.create_file("/the-root/src/c.rs".as_ref(), Default::default())
670 .await
671 .unwrap();
672 fs.create_file("/the-root/src/d.txt".as_ref(), Default::default())
673 .await
674 .unwrap();
675 fs.remove_file("/the-root/src/b.rs".as_ref(), Default::default())
676 .await
677 .unwrap();
678 fs.create_file("/the-root/target/x/out/x2.rs".as_ref(), Default::default())
679 .await
680 .unwrap();
681 fs.create_file("/the-root/target/y/out/y2.rs".as_ref(), Default::default())
682 .await
683 .unwrap();
684
685 // The language server receives events for the FS mutations that match its watch patterns.
686 cx.executor().run_until_parked();
687 assert_eq!(
688 &*file_changes.lock(),
689 &[
690 lsp2::FileEvent {
691 uri: lsp2::Url::from_file_path("/the-root/src/b.rs").unwrap(),
692 typ: lsp2::FileChangeType::DELETED,
693 },
694 lsp2::FileEvent {
695 uri: lsp2::Url::from_file_path("/the-root/src/c.rs").unwrap(),
696 typ: lsp2::FileChangeType::CREATED,
697 },
698 lsp2::FileEvent {
699 uri: lsp2::Url::from_file_path("/the-root/target/y/out/y2.rs").unwrap(),
700 typ: lsp2::FileChangeType::CREATED,
701 },
702 ]
703 );
704}
705
706#[gpui2::test]
707async fn test_single_file_worktrees_diagnostics(cx: &mut gpui2::TestAppContext) {
708 init_test(cx);
709
710 let fs = FakeFs::new(cx.executor().clone());
711 fs.insert_tree(
712 "/dir",
713 json!({
714 "a.rs": "let a = 1;",
715 "b.rs": "let b = 2;"
716 }),
717 )
718 .await;
719
720 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
721
722 let buffer_a = project
723 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
724 .await
725 .unwrap();
726 let buffer_b = project
727 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
728 .await
729 .unwrap();
730
731 project.update(cx, |project, cx| {
732 project
733 .update_diagnostics(
734 LanguageServerId(0),
735 lsp2::PublishDiagnosticsParams {
736 uri: Url::from_file_path("/dir/a.rs").unwrap(),
737 version: None,
738 diagnostics: vec![lsp2::Diagnostic {
739 range: lsp2::Range::new(
740 lsp2::Position::new(0, 4),
741 lsp2::Position::new(0, 5),
742 ),
743 severity: Some(lsp2::DiagnosticSeverity::ERROR),
744 message: "error 1".to_string(),
745 ..Default::default()
746 }],
747 },
748 &[],
749 cx,
750 )
751 .unwrap();
752 project
753 .update_diagnostics(
754 LanguageServerId(0),
755 lsp2::PublishDiagnosticsParams {
756 uri: Url::from_file_path("/dir/b.rs").unwrap(),
757 version: None,
758 diagnostics: vec![lsp2::Diagnostic {
759 range: lsp2::Range::new(
760 lsp2::Position::new(0, 4),
761 lsp2::Position::new(0, 5),
762 ),
763 severity: Some(lsp2::DiagnosticSeverity::WARNING),
764 message: "error 2".to_string(),
765 ..Default::default()
766 }],
767 },
768 &[],
769 cx,
770 )
771 .unwrap();
772 });
773
774 buffer_a.update(cx, |buffer, _| {
775 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
776 assert_eq!(
777 chunks
778 .iter()
779 .map(|(s, d)| (s.as_str(), *d))
780 .collect::<Vec<_>>(),
781 &[
782 ("let ", None),
783 ("a", Some(DiagnosticSeverity::ERROR)),
784 (" = 1;", None),
785 ]
786 );
787 });
788 buffer_b.update(cx, |buffer, _| {
789 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
790 assert_eq!(
791 chunks
792 .iter()
793 .map(|(s, d)| (s.as_str(), *d))
794 .collect::<Vec<_>>(),
795 &[
796 ("let ", None),
797 ("b", Some(DiagnosticSeverity::WARNING)),
798 (" = 2;", None),
799 ]
800 );
801 });
802}
803
804#[gpui2::test]
805async fn test_hidden_worktrees_diagnostics(cx: &mut gpui2::TestAppContext) {
806 init_test(cx);
807
808 let fs = FakeFs::new(cx.executor().clone());
809 fs.insert_tree(
810 "/root",
811 json!({
812 "dir": {
813 "a.rs": "let a = 1;",
814 },
815 "other.rs": "let b = c;"
816 }),
817 )
818 .await;
819
820 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
821
822 let (worktree, _) = project
823 .update(cx, |project, cx| {
824 project.find_or_create_local_worktree("/root/other.rs", false, cx)
825 })
826 .await
827 .unwrap();
828 let worktree_id = worktree.update(cx, |tree, _| tree.id());
829
830 project.update(cx, |project, cx| {
831 project
832 .update_diagnostics(
833 LanguageServerId(0),
834 lsp2::PublishDiagnosticsParams {
835 uri: Url::from_file_path("/root/other.rs").unwrap(),
836 version: None,
837 diagnostics: vec![lsp2::Diagnostic {
838 range: lsp2::Range::new(
839 lsp2::Position::new(0, 8),
840 lsp2::Position::new(0, 9),
841 ),
842 severity: Some(lsp2::DiagnosticSeverity::ERROR),
843 message: "unknown variable 'c'".to_string(),
844 ..Default::default()
845 }],
846 },
847 &[],
848 cx,
849 )
850 .unwrap();
851 });
852
853 let buffer = project
854 .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
855 .await
856 .unwrap();
857 buffer.update(cx, |buffer, _| {
858 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
859 assert_eq!(
860 chunks
861 .iter()
862 .map(|(s, d)| (s.as_str(), *d))
863 .collect::<Vec<_>>(),
864 &[
865 ("let b = ", None),
866 ("c", Some(DiagnosticSeverity::ERROR)),
867 (";", None),
868 ]
869 );
870 });
871
872 project.update(cx, |project, cx| {
873 assert_eq!(project.diagnostic_summaries(cx).next(), None);
874 assert_eq!(project.diagnostic_summary(cx).error_count, 0);
875 });
876}
877
878#[gpui2::test]
879async fn test_disk_based_diagnostics_progress(cx: &mut gpui2::TestAppContext) {
880 init_test(cx);
881
882 let progress_token = "the-progress-token";
883 let mut language = Language::new(
884 LanguageConfig {
885 name: "Rust".into(),
886 path_suffixes: vec!["rs".to_string()],
887 ..Default::default()
888 },
889 Some(tree_sitter_rust::language()),
890 );
891 let mut fake_servers = language
892 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
893 disk_based_diagnostics_progress_token: Some(progress_token.into()),
894 disk_based_diagnostics_sources: vec!["disk".into()],
895 ..Default::default()
896 }))
897 .await;
898
899 let fs = FakeFs::new(cx.executor().clone());
900 fs.insert_tree(
901 "/dir",
902 json!({
903 "a.rs": "fn a() { A }",
904 "b.rs": "const y: i32 = 1",
905 }),
906 )
907 .await;
908
909 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
910 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
911 let worktree_id = project.update(cx, |p, cx| p.worktrees().next().unwrap().read(cx).id());
912
913 // Cause worktree to start the fake language server
914 let _buffer = project
915 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
916 .await
917 .unwrap();
918
919 let mut events = cx.subscribe(&project);
920
921 let fake_server = fake_servers.next().await.unwrap();
922 assert_eq!(
923 events.next().await.unwrap(),
924 Event::LanguageServerAdded(LanguageServerId(0)),
925 );
926
927 fake_server
928 .start_progress(format!("{}/0", progress_token))
929 .await;
930 assert_eq!(
931 events.next().await.unwrap(),
932 Event::DiskBasedDiagnosticsStarted {
933 language_server_id: LanguageServerId(0),
934 }
935 );
936
937 fake_server.notify::<lsp2::notification::PublishDiagnostics>(lsp2::PublishDiagnosticsParams {
938 uri: Url::from_file_path("/dir/a.rs").unwrap(),
939 version: None,
940 diagnostics: vec![lsp2::Diagnostic {
941 range: lsp2::Range::new(lsp2::Position::new(0, 9), lsp2::Position::new(0, 10)),
942 severity: Some(lsp2::DiagnosticSeverity::ERROR),
943 message: "undefined variable 'A'".to_string(),
944 ..Default::default()
945 }],
946 });
947 assert_eq!(
948 events.next().await.unwrap(),
949 Event::DiagnosticsUpdated {
950 language_server_id: LanguageServerId(0),
951 path: (worktree_id, Path::new("a.rs")).into()
952 }
953 );
954
955 fake_server.end_progress(format!("{}/0", progress_token));
956 assert_eq!(
957 events.next().await.unwrap(),
958 Event::DiskBasedDiagnosticsFinished {
959 language_server_id: LanguageServerId(0)
960 }
961 );
962
963 let buffer = project
964 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
965 .await
966 .unwrap();
967
968 buffer.update(cx, |buffer, _| {
969 let snapshot = buffer.snapshot();
970 let diagnostics = snapshot
971 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
972 .collect::<Vec<_>>();
973 assert_eq!(
974 diagnostics,
975 &[DiagnosticEntry {
976 range: Point::new(0, 9)..Point::new(0, 10),
977 diagnostic: Diagnostic {
978 severity: lsp2::DiagnosticSeverity::ERROR,
979 message: "undefined variable 'A'".to_string(),
980 group_id: 0,
981 is_primary: true,
982 ..Default::default()
983 }
984 }]
985 )
986 });
987
988 // Ensure publishing empty diagnostics twice only results in one update event.
989 fake_server.notify::<lsp2::notification::PublishDiagnostics>(lsp2::PublishDiagnosticsParams {
990 uri: Url::from_file_path("/dir/a.rs").unwrap(),
991 version: None,
992 diagnostics: Default::default(),
993 });
994 assert_eq!(
995 events.next().await.unwrap(),
996 Event::DiagnosticsUpdated {
997 language_server_id: LanguageServerId(0),
998 path: (worktree_id, Path::new("a.rs")).into()
999 }
1000 );
1001
1002 fake_server.notify::<lsp2::notification::PublishDiagnostics>(lsp2::PublishDiagnosticsParams {
1003 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1004 version: None,
1005 diagnostics: Default::default(),
1006 });
1007 cx.executor().run_until_parked();
1008 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1009}
1010
1011#[gpui2::test]
1012async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui2::TestAppContext) {
1013 init_test(cx);
1014
1015 let progress_token = "the-progress-token";
1016 let mut language = Language::new(
1017 LanguageConfig {
1018 path_suffixes: vec!["rs".to_string()],
1019 ..Default::default()
1020 },
1021 None,
1022 );
1023 let mut fake_servers = language
1024 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1025 disk_based_diagnostics_sources: vec!["disk".into()],
1026 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1027 ..Default::default()
1028 }))
1029 .await;
1030
1031 let fs = FakeFs::new(cx.executor().clone());
1032 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1033
1034 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1035 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1036
1037 let buffer = project
1038 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1039 .await
1040 .unwrap();
1041
1042 // Simulate diagnostics starting to update.
1043 let fake_server = fake_servers.next().await.unwrap();
1044 fake_server.start_progress(progress_token).await;
1045
1046 // Restart the server before the diagnostics finish updating.
1047 project.update(cx, |project, cx| {
1048 project.restart_language_servers_for_buffers([buffer], cx);
1049 });
1050 let mut events = cx.subscribe(&project);
1051
1052 // Simulate the newly started server sending more diagnostics.
1053 let fake_server = fake_servers.next().await.unwrap();
1054 assert_eq!(
1055 events.next().await.unwrap(),
1056 Event::LanguageServerAdded(LanguageServerId(1))
1057 );
1058 fake_server.start_progress(progress_token).await;
1059 assert_eq!(
1060 events.next().await.unwrap(),
1061 Event::DiskBasedDiagnosticsStarted {
1062 language_server_id: LanguageServerId(1)
1063 }
1064 );
1065 project.update(cx, |project, _| {
1066 assert_eq!(
1067 project
1068 .language_servers_running_disk_based_diagnostics()
1069 .collect::<Vec<_>>(),
1070 [LanguageServerId(1)]
1071 );
1072 });
1073
1074 // All diagnostics are considered done, despite the old server's diagnostic
1075 // task never completing.
1076 fake_server.end_progress(progress_token);
1077 assert_eq!(
1078 events.next().await.unwrap(),
1079 Event::DiskBasedDiagnosticsFinished {
1080 language_server_id: LanguageServerId(1)
1081 }
1082 );
1083 project.update(cx, |project, _| {
1084 assert_eq!(
1085 project
1086 .language_servers_running_disk_based_diagnostics()
1087 .collect::<Vec<_>>(),
1088 [LanguageServerId(0); 0]
1089 );
1090 });
1091}
1092
1093#[gpui2::test]
1094async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui2::TestAppContext) {
1095 init_test(cx);
1096
1097 let mut language = Language::new(
1098 LanguageConfig {
1099 path_suffixes: vec!["rs".to_string()],
1100 ..Default::default()
1101 },
1102 None,
1103 );
1104 let mut fake_servers = language
1105 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1106 ..Default::default()
1107 }))
1108 .await;
1109
1110 let fs = FakeFs::new(cx.executor().clone());
1111 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1112
1113 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1114 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1115
1116 let buffer = project
1117 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1118 .await
1119 .unwrap();
1120
1121 // Publish diagnostics
1122 let fake_server = fake_servers.next().await.unwrap();
1123 fake_server.notify::<lsp2::notification::PublishDiagnostics>(lsp2::PublishDiagnosticsParams {
1124 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1125 version: None,
1126 diagnostics: vec![lsp2::Diagnostic {
1127 range: lsp2::Range::new(lsp2::Position::new(0, 0), lsp2::Position::new(0, 0)),
1128 severity: Some(lsp2::DiagnosticSeverity::ERROR),
1129 message: "the message".to_string(),
1130 ..Default::default()
1131 }],
1132 });
1133
1134 cx.executor().run_until_parked();
1135 buffer.update(cx, |buffer, _| {
1136 assert_eq!(
1137 buffer
1138 .snapshot()
1139 .diagnostics_in_range::<_, usize>(0..1, false)
1140 .map(|entry| entry.diagnostic.message.clone())
1141 .collect::<Vec<_>>(),
1142 ["the message".to_string()]
1143 );
1144 });
1145 project.update(cx, |project, cx| {
1146 assert_eq!(
1147 project.diagnostic_summary(cx),
1148 DiagnosticSummary {
1149 error_count: 1,
1150 warning_count: 0,
1151 }
1152 );
1153 });
1154
1155 project.update(cx, |project, cx| {
1156 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1157 });
1158
1159 // The diagnostics are cleared.
1160 cx.executor().run_until_parked();
1161 buffer.update(cx, |buffer, _| {
1162 assert_eq!(
1163 buffer
1164 .snapshot()
1165 .diagnostics_in_range::<_, usize>(0..1, false)
1166 .map(|entry| entry.diagnostic.message.clone())
1167 .collect::<Vec<_>>(),
1168 Vec::<String>::new(),
1169 );
1170 });
1171 project.update(cx, |project, cx| {
1172 assert_eq!(
1173 project.diagnostic_summary(cx),
1174 DiagnosticSummary {
1175 error_count: 0,
1176 warning_count: 0,
1177 }
1178 );
1179 });
1180}
1181
1182#[gpui2::test]
1183async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui2::TestAppContext) {
1184 init_test(cx);
1185
1186 let mut language = Language::new(
1187 LanguageConfig {
1188 path_suffixes: vec!["rs".to_string()],
1189 ..Default::default()
1190 },
1191 None,
1192 );
1193 let mut fake_servers = language
1194 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1195 name: "the-lsp",
1196 ..Default::default()
1197 }))
1198 .await;
1199
1200 let fs = FakeFs::new(cx.executor().clone());
1201 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1202
1203 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1204 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1205
1206 let buffer = project
1207 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1208 .await
1209 .unwrap();
1210
1211 // Before restarting the server, report diagnostics with an unknown buffer version.
1212 let fake_server = fake_servers.next().await.unwrap();
1213 fake_server.notify::<lsp2::notification::PublishDiagnostics>(lsp2::PublishDiagnosticsParams {
1214 uri: lsp2::Url::from_file_path("/dir/a.rs").unwrap(),
1215 version: Some(10000),
1216 diagnostics: Vec::new(),
1217 });
1218 cx.executor().run_until_parked();
1219
1220 project.update(cx, |project, cx| {
1221 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1222 });
1223 let mut fake_server = fake_servers.next().await.unwrap();
1224 let notification = fake_server
1225 .receive_notification::<lsp2::notification::DidOpenTextDocument>()
1226 .await
1227 .text_document;
1228 assert_eq!(notification.version, 0);
1229}
1230
1231#[gpui2::test]
1232async fn test_toggling_enable_language_server(cx: &mut gpui2::TestAppContext) {
1233 init_test(cx);
1234
1235 let mut rust = Language::new(
1236 LanguageConfig {
1237 name: Arc::from("Rust"),
1238 path_suffixes: vec!["rs".to_string()],
1239 ..Default::default()
1240 },
1241 None,
1242 );
1243 let mut fake_rust_servers = rust
1244 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1245 name: "rust-lsp",
1246 ..Default::default()
1247 }))
1248 .await;
1249 let mut js = Language::new(
1250 LanguageConfig {
1251 name: Arc::from("JavaScript"),
1252 path_suffixes: vec!["js".to_string()],
1253 ..Default::default()
1254 },
1255 None,
1256 );
1257 let mut fake_js_servers = js
1258 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1259 name: "js-lsp",
1260 ..Default::default()
1261 }))
1262 .await;
1263
1264 let fs = FakeFs::new(cx.executor().clone());
1265 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1266 .await;
1267
1268 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1269 project.update(cx, |project, _| {
1270 project.languages.add(Arc::new(rust));
1271 project.languages.add(Arc::new(js));
1272 });
1273
1274 let _rs_buffer = project
1275 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1276 .await
1277 .unwrap();
1278 let _js_buffer = project
1279 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
1280 .await
1281 .unwrap();
1282
1283 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1284 assert_eq!(
1285 fake_rust_server_1
1286 .receive_notification::<lsp2::notification::DidOpenTextDocument>()
1287 .await
1288 .text_document
1289 .uri
1290 .as_str(),
1291 "file:///dir/a.rs"
1292 );
1293
1294 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1295 assert_eq!(
1296 fake_js_server
1297 .receive_notification::<lsp2::notification::DidOpenTextDocument>()
1298 .await
1299 .text_document
1300 .uri
1301 .as_str(),
1302 "file:///dir/b.js"
1303 );
1304
1305 // Disable Rust language server, ensuring only that server gets stopped.
1306 cx.update(|cx| {
1307 cx.update_global(|settings: &mut SettingsStore, cx| {
1308 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1309 settings.languages.insert(
1310 Arc::from("Rust"),
1311 LanguageSettingsContent {
1312 enable_language_server: Some(false),
1313 ..Default::default()
1314 },
1315 );
1316 });
1317 })
1318 });
1319 fake_rust_server_1
1320 .receive_notification::<lsp2::notification::Exit>()
1321 .await;
1322
1323 // Enable Rust and disable JavaScript language servers, ensuring that the
1324 // former gets started again and that the latter stops.
1325 cx.update(|cx| {
1326 cx.update_global(|settings: &mut SettingsStore, cx| {
1327 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1328 settings.languages.insert(
1329 Arc::from("Rust"),
1330 LanguageSettingsContent {
1331 enable_language_server: Some(true),
1332 ..Default::default()
1333 },
1334 );
1335 settings.languages.insert(
1336 Arc::from("JavaScript"),
1337 LanguageSettingsContent {
1338 enable_language_server: Some(false),
1339 ..Default::default()
1340 },
1341 );
1342 });
1343 })
1344 });
1345 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1346 assert_eq!(
1347 fake_rust_server_2
1348 .receive_notification::<lsp2::notification::DidOpenTextDocument>()
1349 .await
1350 .text_document
1351 .uri
1352 .as_str(),
1353 "file:///dir/a.rs"
1354 );
1355 fake_js_server
1356 .receive_notification::<lsp2::notification::Exit>()
1357 .await;
1358}
1359
1360#[gpui2::test(iterations = 3)]
1361async fn test_transforming_diagnostics(cx: &mut gpui2::TestAppContext) {
1362 init_test(cx);
1363
1364 let mut language = Language::new(
1365 LanguageConfig {
1366 name: "Rust".into(),
1367 path_suffixes: vec!["rs".to_string()],
1368 ..Default::default()
1369 },
1370 Some(tree_sitter_rust::language()),
1371 );
1372 let mut fake_servers = language
1373 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1374 disk_based_diagnostics_sources: vec!["disk".into()],
1375 ..Default::default()
1376 }))
1377 .await;
1378
1379 let text = "
1380 fn a() { A }
1381 fn b() { BB }
1382 fn c() { CCC }
1383 "
1384 .unindent();
1385
1386 let fs = FakeFs::new(cx.executor().clone());
1387 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1388
1389 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1390 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1391
1392 let buffer = project
1393 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1394 .await
1395 .unwrap();
1396
1397 let mut fake_server = fake_servers.next().await.unwrap();
1398 let open_notification = fake_server
1399 .receive_notification::<lsp2::notification::DidOpenTextDocument>()
1400 .await;
1401
1402 // Edit the buffer, moving the content down
1403 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1404 let change_notification_1 = fake_server
1405 .receive_notification::<lsp2::notification::DidChangeTextDocument>()
1406 .await;
1407 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1408
1409 // Report some diagnostics for the initial version of the buffer
1410 fake_server.notify::<lsp2::notification::PublishDiagnostics>(lsp2::PublishDiagnosticsParams {
1411 uri: lsp2::Url::from_file_path("/dir/a.rs").unwrap(),
1412 version: Some(open_notification.text_document.version),
1413 diagnostics: vec![
1414 lsp2::Diagnostic {
1415 range: lsp2::Range::new(lsp2::Position::new(0, 9), lsp2::Position::new(0, 10)),
1416 severity: Some(DiagnosticSeverity::ERROR),
1417 message: "undefined variable 'A'".to_string(),
1418 source: Some("disk".to_string()),
1419 ..Default::default()
1420 },
1421 lsp2::Diagnostic {
1422 range: lsp2::Range::new(lsp2::Position::new(1, 9), lsp2::Position::new(1, 11)),
1423 severity: Some(DiagnosticSeverity::ERROR),
1424 message: "undefined variable 'BB'".to_string(),
1425 source: Some("disk".to_string()),
1426 ..Default::default()
1427 },
1428 lsp2::Diagnostic {
1429 range: lsp2::Range::new(lsp2::Position::new(2, 9), lsp2::Position::new(2, 12)),
1430 severity: Some(DiagnosticSeverity::ERROR),
1431 source: Some("disk".to_string()),
1432 message: "undefined variable 'CCC'".to_string(),
1433 ..Default::default()
1434 },
1435 ],
1436 });
1437
1438 // The diagnostics have moved down since they were created.
1439 cx.executor().run_until_parked();
1440 buffer.update(cx, |buffer, _| {
1441 assert_eq!(
1442 buffer
1443 .snapshot()
1444 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1445 .collect::<Vec<_>>(),
1446 &[
1447 DiagnosticEntry {
1448 range: Point::new(3, 9)..Point::new(3, 11),
1449 diagnostic: Diagnostic {
1450 source: Some("disk".into()),
1451 severity: DiagnosticSeverity::ERROR,
1452 message: "undefined variable 'BB'".to_string(),
1453 is_disk_based: true,
1454 group_id: 1,
1455 is_primary: true,
1456 ..Default::default()
1457 },
1458 },
1459 DiagnosticEntry {
1460 range: Point::new(4, 9)..Point::new(4, 12),
1461 diagnostic: Diagnostic {
1462 source: Some("disk".into()),
1463 severity: DiagnosticSeverity::ERROR,
1464 message: "undefined variable 'CCC'".to_string(),
1465 is_disk_based: true,
1466 group_id: 2,
1467 is_primary: true,
1468 ..Default::default()
1469 }
1470 }
1471 ]
1472 );
1473 assert_eq!(
1474 chunks_with_diagnostics(buffer, 0..buffer.len()),
1475 [
1476 ("\n\nfn a() { ".to_string(), None),
1477 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1478 (" }\nfn b() { ".to_string(), None),
1479 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1480 (" }\nfn c() { ".to_string(), None),
1481 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1482 (" }\n".to_string(), None),
1483 ]
1484 );
1485 assert_eq!(
1486 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1487 [
1488 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1489 (" }\nfn c() { ".to_string(), None),
1490 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1491 ]
1492 );
1493 });
1494
1495 // Ensure overlapping diagnostics are highlighted correctly.
1496 fake_server.notify::<lsp2::notification::PublishDiagnostics>(lsp2::PublishDiagnosticsParams {
1497 uri: lsp2::Url::from_file_path("/dir/a.rs").unwrap(),
1498 version: Some(open_notification.text_document.version),
1499 diagnostics: vec![
1500 lsp2::Diagnostic {
1501 range: lsp2::Range::new(lsp2::Position::new(0, 9), lsp2::Position::new(0, 10)),
1502 severity: Some(DiagnosticSeverity::ERROR),
1503 message: "undefined variable 'A'".to_string(),
1504 source: Some("disk".to_string()),
1505 ..Default::default()
1506 },
1507 lsp2::Diagnostic {
1508 range: lsp2::Range::new(lsp2::Position::new(0, 9), lsp2::Position::new(0, 12)),
1509 severity: Some(DiagnosticSeverity::WARNING),
1510 message: "unreachable statement".to_string(),
1511 source: Some("disk".to_string()),
1512 ..Default::default()
1513 },
1514 ],
1515 });
1516
1517 cx.executor().run_until_parked();
1518 buffer.update(cx, |buffer, _| {
1519 assert_eq!(
1520 buffer
1521 .snapshot()
1522 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1523 .collect::<Vec<_>>(),
1524 &[
1525 DiagnosticEntry {
1526 range: Point::new(2, 9)..Point::new(2, 12),
1527 diagnostic: Diagnostic {
1528 source: Some("disk".into()),
1529 severity: DiagnosticSeverity::WARNING,
1530 message: "unreachable statement".to_string(),
1531 is_disk_based: true,
1532 group_id: 4,
1533 is_primary: true,
1534 ..Default::default()
1535 }
1536 },
1537 DiagnosticEntry {
1538 range: Point::new(2, 9)..Point::new(2, 10),
1539 diagnostic: Diagnostic {
1540 source: Some("disk".into()),
1541 severity: DiagnosticSeverity::ERROR,
1542 message: "undefined variable 'A'".to_string(),
1543 is_disk_based: true,
1544 group_id: 3,
1545 is_primary: true,
1546 ..Default::default()
1547 },
1548 }
1549 ]
1550 );
1551 assert_eq!(
1552 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1553 [
1554 ("fn a() { ".to_string(), None),
1555 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1556 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1557 ("\n".to_string(), None),
1558 ]
1559 );
1560 assert_eq!(
1561 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1562 [
1563 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1564 ("\n".to_string(), None),
1565 ]
1566 );
1567 });
1568
1569 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1570 // changes since the last save.
1571 buffer.update(cx, |buffer, cx| {
1572 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1573 buffer.edit(
1574 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1575 None,
1576 cx,
1577 );
1578 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1579 });
1580 let change_notification_2 = fake_server
1581 .receive_notification::<lsp2::notification::DidChangeTextDocument>()
1582 .await;
1583 assert!(
1584 change_notification_2.text_document.version > change_notification_1.text_document.version
1585 );
1586
1587 // Handle out-of-order diagnostics
1588 fake_server.notify::<lsp2::notification::PublishDiagnostics>(lsp2::PublishDiagnosticsParams {
1589 uri: lsp2::Url::from_file_path("/dir/a.rs").unwrap(),
1590 version: Some(change_notification_2.text_document.version),
1591 diagnostics: vec![
1592 lsp2::Diagnostic {
1593 range: lsp2::Range::new(lsp2::Position::new(1, 9), lsp2::Position::new(1, 11)),
1594 severity: Some(DiagnosticSeverity::ERROR),
1595 message: "undefined variable 'BB'".to_string(),
1596 source: Some("disk".to_string()),
1597 ..Default::default()
1598 },
1599 lsp2::Diagnostic {
1600 range: lsp2::Range::new(lsp2::Position::new(0, 9), lsp2::Position::new(0, 10)),
1601 severity: Some(DiagnosticSeverity::WARNING),
1602 message: "undefined variable 'A'".to_string(),
1603 source: Some("disk".to_string()),
1604 ..Default::default()
1605 },
1606 ],
1607 });
1608
1609 cx.executor().run_until_parked();
1610 buffer.update(cx, |buffer, _| {
1611 assert_eq!(
1612 buffer
1613 .snapshot()
1614 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1615 .collect::<Vec<_>>(),
1616 &[
1617 DiagnosticEntry {
1618 range: Point::new(2, 21)..Point::new(2, 22),
1619 diagnostic: Diagnostic {
1620 source: Some("disk".into()),
1621 severity: DiagnosticSeverity::WARNING,
1622 message: "undefined variable 'A'".to_string(),
1623 is_disk_based: true,
1624 group_id: 6,
1625 is_primary: true,
1626 ..Default::default()
1627 }
1628 },
1629 DiagnosticEntry {
1630 range: Point::new(3, 9)..Point::new(3, 14),
1631 diagnostic: Diagnostic {
1632 source: Some("disk".into()),
1633 severity: DiagnosticSeverity::ERROR,
1634 message: "undefined variable 'BB'".to_string(),
1635 is_disk_based: true,
1636 group_id: 5,
1637 is_primary: true,
1638 ..Default::default()
1639 },
1640 }
1641 ]
1642 );
1643 });
1644}
1645
1646#[gpui2::test]
1647async fn test_empty_diagnostic_ranges(cx: &mut gpui2::TestAppContext) {
1648 init_test(cx);
1649
1650 let text = concat!(
1651 "let one = ;\n", //
1652 "let two = \n",
1653 "let three = 3;\n",
1654 );
1655
1656 let fs = FakeFs::new(cx.executor().clone());
1657 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1658
1659 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1660 let buffer = project
1661 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1662 .await
1663 .unwrap();
1664
1665 project.update(cx, |project, cx| {
1666 project
1667 .update_buffer_diagnostics(
1668 &buffer,
1669 LanguageServerId(0),
1670 None,
1671 vec![
1672 DiagnosticEntry {
1673 range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 10)),
1674 diagnostic: Diagnostic {
1675 severity: DiagnosticSeverity::ERROR,
1676 message: "syntax error 1".to_string(),
1677 ..Default::default()
1678 },
1679 },
1680 DiagnosticEntry {
1681 range: Unclipped(PointUtf16::new(1, 10))..Unclipped(PointUtf16::new(1, 10)),
1682 diagnostic: Diagnostic {
1683 severity: DiagnosticSeverity::ERROR,
1684 message: "syntax error 2".to_string(),
1685 ..Default::default()
1686 },
1687 },
1688 ],
1689 cx,
1690 )
1691 .unwrap();
1692 });
1693
1694 // An empty range is extended forward to include the following character.
1695 // At the end of a line, an empty range is extended backward to include
1696 // the preceding character.
1697 buffer.update(cx, |buffer, _| {
1698 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1699 assert_eq!(
1700 chunks
1701 .iter()
1702 .map(|(s, d)| (s.as_str(), *d))
1703 .collect::<Vec<_>>(),
1704 &[
1705 ("let one = ", None),
1706 (";", Some(DiagnosticSeverity::ERROR)),
1707 ("\nlet two =", None),
1708 (" ", Some(DiagnosticSeverity::ERROR)),
1709 ("\nlet three = 3;\n", None)
1710 ]
1711 );
1712 });
1713}
1714
1715#[gpui2::test]
1716async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui2::TestAppContext) {
1717 init_test(cx);
1718
1719 let fs = FakeFs::new(cx.executor().clone());
1720 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
1721 .await;
1722
1723 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1724
1725 project.update(cx, |project, cx| {
1726 project
1727 .update_diagnostic_entries(
1728 LanguageServerId(0),
1729 Path::new("/dir/a.rs").to_owned(),
1730 None,
1731 vec![DiagnosticEntry {
1732 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1733 diagnostic: Diagnostic {
1734 severity: DiagnosticSeverity::ERROR,
1735 is_primary: true,
1736 message: "syntax error a1".to_string(),
1737 ..Default::default()
1738 },
1739 }],
1740 cx,
1741 )
1742 .unwrap();
1743 project
1744 .update_diagnostic_entries(
1745 LanguageServerId(1),
1746 Path::new("/dir/a.rs").to_owned(),
1747 None,
1748 vec![DiagnosticEntry {
1749 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1750 diagnostic: Diagnostic {
1751 severity: DiagnosticSeverity::ERROR,
1752 is_primary: true,
1753 message: "syntax error b1".to_string(),
1754 ..Default::default()
1755 },
1756 }],
1757 cx,
1758 )
1759 .unwrap();
1760
1761 assert_eq!(
1762 project.diagnostic_summary(cx),
1763 DiagnosticSummary {
1764 error_count: 2,
1765 warning_count: 0,
1766 }
1767 );
1768 });
1769}
1770
1771#[gpui2::test]
1772async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui2::TestAppContext) {
1773 init_test(cx);
1774
1775 let mut language = Language::new(
1776 LanguageConfig {
1777 name: "Rust".into(),
1778 path_suffixes: vec!["rs".to_string()],
1779 ..Default::default()
1780 },
1781 Some(tree_sitter_rust::language()),
1782 );
1783 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
1784
1785 let text = "
1786 fn a() {
1787 f1();
1788 }
1789 fn b() {
1790 f2();
1791 }
1792 fn c() {
1793 f3();
1794 }
1795 "
1796 .unindent();
1797
1798 let fs = FakeFs::new(cx.executor().clone());
1799 fs.insert_tree(
1800 "/dir",
1801 json!({
1802 "a.rs": text.clone(),
1803 }),
1804 )
1805 .await;
1806
1807 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1808 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1809 let buffer = project
1810 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1811 .await
1812 .unwrap();
1813
1814 let mut fake_server = fake_servers.next().await.unwrap();
1815 let lsp_document_version = fake_server
1816 .receive_notification::<lsp2::notification::DidOpenTextDocument>()
1817 .await
1818 .text_document
1819 .version;
1820
1821 // Simulate editing the buffer after the language server computes some edits.
1822 buffer.update(cx, |buffer, cx| {
1823 buffer.edit(
1824 [(
1825 Point::new(0, 0)..Point::new(0, 0),
1826 "// above first function\n",
1827 )],
1828 None,
1829 cx,
1830 );
1831 buffer.edit(
1832 [(
1833 Point::new(2, 0)..Point::new(2, 0),
1834 " // inside first function\n",
1835 )],
1836 None,
1837 cx,
1838 );
1839 buffer.edit(
1840 [(
1841 Point::new(6, 4)..Point::new(6, 4),
1842 "// inside second function ",
1843 )],
1844 None,
1845 cx,
1846 );
1847
1848 assert_eq!(
1849 buffer.text(),
1850 "
1851 // above first function
1852 fn a() {
1853 // inside first function
1854 f1();
1855 }
1856 fn b() {
1857 // inside second function f2();
1858 }
1859 fn c() {
1860 f3();
1861 }
1862 "
1863 .unindent()
1864 );
1865 });
1866
1867 let edits = project
1868 .update(cx, |project, cx| {
1869 project.edits_from_lsp(
1870 &buffer,
1871 vec![
1872 // replace body of first function
1873 lsp2::TextEdit {
1874 range: lsp2::Range::new(
1875 lsp2::Position::new(0, 0),
1876 lsp2::Position::new(3, 0),
1877 ),
1878 new_text: "
1879 fn a() {
1880 f10();
1881 }
1882 "
1883 .unindent(),
1884 },
1885 // edit inside second function
1886 lsp2::TextEdit {
1887 range: lsp2::Range::new(
1888 lsp2::Position::new(4, 6),
1889 lsp2::Position::new(4, 6),
1890 ),
1891 new_text: "00".into(),
1892 },
1893 // edit inside third function via two distinct edits
1894 lsp2::TextEdit {
1895 range: lsp2::Range::new(
1896 lsp2::Position::new(7, 5),
1897 lsp2::Position::new(7, 5),
1898 ),
1899 new_text: "4000".into(),
1900 },
1901 lsp2::TextEdit {
1902 range: lsp2::Range::new(
1903 lsp2::Position::new(7, 5),
1904 lsp2::Position::new(7, 6),
1905 ),
1906 new_text: "".into(),
1907 },
1908 ],
1909 LanguageServerId(0),
1910 Some(lsp_document_version),
1911 cx,
1912 )
1913 })
1914 .await
1915 .unwrap();
1916
1917 buffer.update(cx, |buffer, cx| {
1918 for (range, new_text) in edits {
1919 buffer.edit([(range, new_text)], None, cx);
1920 }
1921 assert_eq!(
1922 buffer.text(),
1923 "
1924 // above first function
1925 fn a() {
1926 // inside first function
1927 f10();
1928 }
1929 fn b() {
1930 // inside second function f200();
1931 }
1932 fn c() {
1933 f4000();
1934 }
1935 "
1936 .unindent()
1937 );
1938 });
1939}
1940
1941#[gpui2::test]
1942async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui2::TestAppContext) {
1943 init_test(cx);
1944
1945 let text = "
1946 use a::b;
1947 use a::c;
1948
1949 fn f() {
1950 b();
1951 c();
1952 }
1953 "
1954 .unindent();
1955
1956 let fs = FakeFs::new(cx.executor().clone());
1957 fs.insert_tree(
1958 "/dir",
1959 json!({
1960 "a.rs": text.clone(),
1961 }),
1962 )
1963 .await;
1964
1965 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1966 let buffer = project
1967 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1968 .await
1969 .unwrap();
1970
1971 // Simulate the language server sending us a small edit in the form of a very large diff.
1972 // Rust-analyzer does this when performing a merge-imports code action.
1973 let edits = project
1974 .update(cx, |project, cx| {
1975 project.edits_from_lsp(
1976 &buffer,
1977 [
1978 // Replace the first use statement without editing the semicolon.
1979 lsp2::TextEdit {
1980 range: lsp2::Range::new(
1981 lsp2::Position::new(0, 4),
1982 lsp2::Position::new(0, 8),
1983 ),
1984 new_text: "a::{b, c}".into(),
1985 },
1986 // Reinsert the remainder of the file between the semicolon and the final
1987 // newline of the file.
1988 lsp2::TextEdit {
1989 range: lsp2::Range::new(
1990 lsp2::Position::new(0, 9),
1991 lsp2::Position::new(0, 9),
1992 ),
1993 new_text: "\n\n".into(),
1994 },
1995 lsp2::TextEdit {
1996 range: lsp2::Range::new(
1997 lsp2::Position::new(0, 9),
1998 lsp2::Position::new(0, 9),
1999 ),
2000 new_text: "
2001 fn f() {
2002 b();
2003 c();
2004 }"
2005 .unindent(),
2006 },
2007 // Delete everything after the first newline of the file.
2008 lsp2::TextEdit {
2009 range: lsp2::Range::new(
2010 lsp2::Position::new(1, 0),
2011 lsp2::Position::new(7, 0),
2012 ),
2013 new_text: "".into(),
2014 },
2015 ],
2016 LanguageServerId(0),
2017 None,
2018 cx,
2019 )
2020 })
2021 .await
2022 .unwrap();
2023
2024 buffer.update(cx, |buffer, cx| {
2025 let edits = edits
2026 .into_iter()
2027 .map(|(range, text)| {
2028 (
2029 range.start.to_point(buffer)..range.end.to_point(buffer),
2030 text,
2031 )
2032 })
2033 .collect::<Vec<_>>();
2034
2035 assert_eq!(
2036 edits,
2037 [
2038 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2039 (Point::new(1, 0)..Point::new(2, 0), "".into())
2040 ]
2041 );
2042
2043 for (range, new_text) in edits {
2044 buffer.edit([(range, new_text)], None, cx);
2045 }
2046 assert_eq!(
2047 buffer.text(),
2048 "
2049 use a::{b, c};
2050
2051 fn f() {
2052 b();
2053 c();
2054 }
2055 "
2056 .unindent()
2057 );
2058 });
2059}
2060
2061#[gpui2::test]
2062async fn test_invalid_edits_from_lsp2(cx: &mut gpui2::TestAppContext) {
2063 init_test(cx);
2064
2065 let text = "
2066 use a::b;
2067 use a::c;
2068
2069 fn f() {
2070 b();
2071 c();
2072 }
2073 "
2074 .unindent();
2075
2076 let fs = FakeFs::new(cx.executor().clone());
2077 fs.insert_tree(
2078 "/dir",
2079 json!({
2080 "a.rs": text.clone(),
2081 }),
2082 )
2083 .await;
2084
2085 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2086 let buffer = project
2087 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2088 .await
2089 .unwrap();
2090
2091 // Simulate the language server sending us edits in a non-ordered fashion,
2092 // with ranges sometimes being inverted or pointing to invalid locations.
2093 let edits = project
2094 .update(cx, |project, cx| {
2095 project.edits_from_lsp(
2096 &buffer,
2097 [
2098 lsp2::TextEdit {
2099 range: lsp2::Range::new(
2100 lsp2::Position::new(0, 9),
2101 lsp2::Position::new(0, 9),
2102 ),
2103 new_text: "\n\n".into(),
2104 },
2105 lsp2::TextEdit {
2106 range: lsp2::Range::new(
2107 lsp2::Position::new(0, 8),
2108 lsp2::Position::new(0, 4),
2109 ),
2110 new_text: "a::{b, c}".into(),
2111 },
2112 lsp2::TextEdit {
2113 range: lsp2::Range::new(
2114 lsp2::Position::new(1, 0),
2115 lsp2::Position::new(99, 0),
2116 ),
2117 new_text: "".into(),
2118 },
2119 lsp2::TextEdit {
2120 range: lsp2::Range::new(
2121 lsp2::Position::new(0, 9),
2122 lsp2::Position::new(0, 9),
2123 ),
2124 new_text: "
2125 fn f() {
2126 b();
2127 c();
2128 }"
2129 .unindent(),
2130 },
2131 ],
2132 LanguageServerId(0),
2133 None,
2134 cx,
2135 )
2136 })
2137 .await
2138 .unwrap();
2139
2140 buffer.update(cx, |buffer, cx| {
2141 let edits = edits
2142 .into_iter()
2143 .map(|(range, text)| {
2144 (
2145 range.start.to_point(buffer)..range.end.to_point(buffer),
2146 text,
2147 )
2148 })
2149 .collect::<Vec<_>>();
2150
2151 assert_eq!(
2152 edits,
2153 [
2154 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2155 (Point::new(1, 0)..Point::new(2, 0), "".into())
2156 ]
2157 );
2158
2159 for (range, new_text) in edits {
2160 buffer.edit([(range, new_text)], None, cx);
2161 }
2162 assert_eq!(
2163 buffer.text(),
2164 "
2165 use a::{b, c};
2166
2167 fn f() {
2168 b();
2169 c();
2170 }
2171 "
2172 .unindent()
2173 );
2174 });
2175}
2176
2177fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2178 buffer: &Buffer,
2179 range: Range<T>,
2180) -> Vec<(String, Option<DiagnosticSeverity>)> {
2181 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2182 for chunk in buffer.snapshot().chunks(range, true) {
2183 if chunks.last().map_or(false, |prev_chunk| {
2184 prev_chunk.1 == chunk.diagnostic_severity
2185 }) {
2186 chunks.last_mut().unwrap().0.push_str(chunk.text);
2187 } else {
2188 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2189 }
2190 }
2191 chunks
2192}
2193
2194#[gpui2::test(iterations = 10)]
2195async fn test_definition(cx: &mut gpui2::TestAppContext) {
2196 init_test(cx);
2197
2198 let mut language = Language::new(
2199 LanguageConfig {
2200 name: "Rust".into(),
2201 path_suffixes: vec!["rs".to_string()],
2202 ..Default::default()
2203 },
2204 Some(tree_sitter_rust::language()),
2205 );
2206 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
2207
2208 let fs = FakeFs::new(cx.executor().clone());
2209 fs.insert_tree(
2210 "/dir",
2211 json!({
2212 "a.rs": "const fn a() { A }",
2213 "b.rs": "const y: i32 = crate::a()",
2214 }),
2215 )
2216 .await;
2217
2218 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2219 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2220
2221 let buffer = project
2222 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
2223 .await
2224 .unwrap();
2225
2226 let fake_server = fake_servers.next().await.unwrap();
2227 fake_server.handle_request::<lsp2::request::GotoDefinition, _, _>(|params, _| async move {
2228 let params = params.text_document_position_params;
2229 assert_eq!(
2230 params.text_document.uri.to_file_path().unwrap(),
2231 Path::new("/dir/b.rs"),
2232 );
2233 assert_eq!(params.position, lsp2::Position::new(0, 22));
2234
2235 Ok(Some(lsp2::GotoDefinitionResponse::Scalar(
2236 lsp2::Location::new(
2237 lsp2::Url::from_file_path("/dir/a.rs").unwrap(),
2238 lsp2::Range::new(lsp2::Position::new(0, 9), lsp2::Position::new(0, 10)),
2239 ),
2240 )))
2241 });
2242
2243 let mut definitions = project
2244 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2245 .await
2246 .unwrap();
2247
2248 // Assert no new language server started
2249 cx.executor().run_until_parked();
2250 assert!(fake_servers.try_next().is_err());
2251
2252 assert_eq!(definitions.len(), 1);
2253 let definition = definitions.pop().unwrap();
2254 cx.update(|cx| {
2255 let target_buffer = definition.target.buffer.read(cx);
2256 assert_eq!(
2257 target_buffer
2258 .file()
2259 .unwrap()
2260 .as_local()
2261 .unwrap()
2262 .abs_path(cx),
2263 Path::new("/dir/a.rs"),
2264 );
2265 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2266 assert_eq!(
2267 list_worktrees(&project, cx),
2268 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
2269 );
2270
2271 drop(definition);
2272 });
2273 cx.update(|cx| {
2274 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2275 });
2276
2277 fn list_worktrees<'a>(
2278 project: &'a Handle<Project>,
2279 cx: &'a AppContext,
2280 ) -> Vec<(&'a Path, bool)> {
2281 project
2282 .read(cx)
2283 .worktrees()
2284 .map(|worktree| {
2285 let worktree = worktree.read(cx);
2286 (
2287 worktree.as_local().unwrap().abs_path().as_ref(),
2288 worktree.is_visible(),
2289 )
2290 })
2291 .collect::<Vec<_>>()
2292 }
2293}
2294
2295#[gpui2::test]
2296async fn test_completions_without_edit_ranges(cx: &mut gpui2::TestAppContext) {
2297 init_test(cx);
2298
2299 let mut language = Language::new(
2300 LanguageConfig {
2301 name: "TypeScript".into(),
2302 path_suffixes: vec!["ts".to_string()],
2303 ..Default::default()
2304 },
2305 Some(tree_sitter_typescript::language_typescript()),
2306 );
2307 let mut fake_language_servers = language
2308 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
2309 capabilities: lsp2::ServerCapabilities {
2310 completion_provider: Some(lsp2::CompletionOptions {
2311 trigger_characters: Some(vec![":".to_string()]),
2312 ..Default::default()
2313 }),
2314 ..Default::default()
2315 },
2316 ..Default::default()
2317 }))
2318 .await;
2319
2320 let fs = FakeFs::new(cx.executor().clone());
2321 fs.insert_tree(
2322 "/dir",
2323 json!({
2324 "a.ts": "",
2325 }),
2326 )
2327 .await;
2328
2329 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2330 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2331 let buffer = project
2332 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2333 .await
2334 .unwrap();
2335
2336 let fake_server = fake_language_servers.next().await.unwrap();
2337
2338 let text = "let a = b.fqn";
2339 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2340 let completions = project.update(cx, |project, cx| {
2341 project.completions(&buffer, text.len(), cx)
2342 });
2343
2344 fake_server
2345 .handle_request::<lsp2::request::Completion, _, _>(|_, _| async move {
2346 Ok(Some(lsp2::CompletionResponse::Array(vec![
2347 lsp2::CompletionItem {
2348 label: "fullyQualifiedName?".into(),
2349 insert_text: Some("fullyQualifiedName".into()),
2350 ..Default::default()
2351 },
2352 ])))
2353 })
2354 .next()
2355 .await;
2356 let completions = completions.await.unwrap();
2357 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2358 assert_eq!(completions.len(), 1);
2359 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2360 assert_eq!(
2361 completions[0].old_range.to_offset(&snapshot),
2362 text.len() - 3..text.len()
2363 );
2364
2365 let text = "let a = \"atoms/cmp\"";
2366 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2367 let completions = project.update(cx, |project, cx| {
2368 project.completions(&buffer, text.len() - 1, cx)
2369 });
2370
2371 fake_server
2372 .handle_request::<lsp2::request::Completion, _, _>(|_, _| async move {
2373 Ok(Some(lsp2::CompletionResponse::Array(vec![
2374 lsp2::CompletionItem {
2375 label: "component".into(),
2376 ..Default::default()
2377 },
2378 ])))
2379 })
2380 .next()
2381 .await;
2382 let completions = completions.await.unwrap();
2383 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2384 assert_eq!(completions.len(), 1);
2385 assert_eq!(completions[0].new_text, "component");
2386 assert_eq!(
2387 completions[0].old_range.to_offset(&snapshot),
2388 text.len() - 4..text.len() - 1
2389 );
2390}
2391
2392#[gpui2::test]
2393async fn test_completions_with_carriage_returns(cx: &mut gpui2::TestAppContext) {
2394 init_test(cx);
2395
2396 let mut language = Language::new(
2397 LanguageConfig {
2398 name: "TypeScript".into(),
2399 path_suffixes: vec!["ts".to_string()],
2400 ..Default::default()
2401 },
2402 Some(tree_sitter_typescript::language_typescript()),
2403 );
2404 let mut fake_language_servers = language
2405 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
2406 capabilities: lsp2::ServerCapabilities {
2407 completion_provider: Some(lsp2::CompletionOptions {
2408 trigger_characters: Some(vec![":".to_string()]),
2409 ..Default::default()
2410 }),
2411 ..Default::default()
2412 },
2413 ..Default::default()
2414 }))
2415 .await;
2416
2417 let fs = FakeFs::new(cx.executor().clone());
2418 fs.insert_tree(
2419 "/dir",
2420 json!({
2421 "a.ts": "",
2422 }),
2423 )
2424 .await;
2425
2426 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2427 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2428 let buffer = project
2429 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2430 .await
2431 .unwrap();
2432
2433 let fake_server = fake_language_servers.next().await.unwrap();
2434
2435 let text = "let a = b.fqn";
2436 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2437 let completions = project.update(cx, |project, cx| {
2438 project.completions(&buffer, text.len(), cx)
2439 });
2440
2441 fake_server
2442 .handle_request::<lsp2::request::Completion, _, _>(|_, _| async move {
2443 Ok(Some(lsp2::CompletionResponse::Array(vec![
2444 lsp2::CompletionItem {
2445 label: "fullyQualifiedName?".into(),
2446 insert_text: Some("fully\rQualified\r\nName".into()),
2447 ..Default::default()
2448 },
2449 ])))
2450 })
2451 .next()
2452 .await;
2453 let completions = completions.await.unwrap();
2454 assert_eq!(completions.len(), 1);
2455 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2456}
2457
2458#[gpui2::test(iterations = 10)]
2459async fn test_apply_code_actions_with_commands(cx: &mut gpui2::TestAppContext) {
2460 init_test(cx);
2461
2462 let mut language = Language::new(
2463 LanguageConfig {
2464 name: "TypeScript".into(),
2465 path_suffixes: vec!["ts".to_string()],
2466 ..Default::default()
2467 },
2468 None,
2469 );
2470 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
2471
2472 let fs = FakeFs::new(cx.executor().clone());
2473 fs.insert_tree(
2474 "/dir",
2475 json!({
2476 "a.ts": "a",
2477 }),
2478 )
2479 .await;
2480
2481 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2482 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2483 let buffer = project
2484 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2485 .await
2486 .unwrap();
2487
2488 let fake_server = fake_language_servers.next().await.unwrap();
2489
2490 // Language server returns code actions that contain commands, and not edits.
2491 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
2492 fake_server
2493 .handle_request::<lsp2::request::CodeActionRequest, _, _>(|_, _| async move {
2494 Ok(Some(vec![
2495 lsp2::CodeActionOrCommand::CodeAction(lsp2::CodeAction {
2496 title: "The code action".into(),
2497 command: Some(lsp2::Command {
2498 title: "The command".into(),
2499 command: "_the/command".into(),
2500 arguments: Some(vec![json!("the-argument")]),
2501 }),
2502 ..Default::default()
2503 }),
2504 lsp2::CodeActionOrCommand::CodeAction(lsp2::CodeAction {
2505 title: "two".into(),
2506 ..Default::default()
2507 }),
2508 ]))
2509 })
2510 .next()
2511 .await;
2512
2513 let action = actions.await.unwrap()[0].clone();
2514 let apply = project.update(cx, |project, cx| {
2515 project.apply_code_action(buffer.clone(), action, true, cx)
2516 });
2517
2518 // Resolving the code action does not populate its edits. In absence of
2519 // edits, we must execute the given command.
2520 fake_server.handle_request::<lsp2::request::CodeActionResolveRequest, _, _>(
2521 |action, _| async move { Ok(action) },
2522 );
2523
2524 // While executing the command, the language server sends the editor
2525 // a `workspaceEdit` request.
2526 fake_server
2527 .handle_request::<lsp2::request::ExecuteCommand, _, _>({
2528 let fake = fake_server.clone();
2529 move |params, _| {
2530 assert_eq!(params.command, "_the/command");
2531 let fake = fake.clone();
2532 async move {
2533 fake.server
2534 .request::<lsp2::request::ApplyWorkspaceEdit>(
2535 lsp2::ApplyWorkspaceEditParams {
2536 label: None,
2537 edit: lsp2::WorkspaceEdit {
2538 changes: Some(
2539 [(
2540 lsp2::Url::from_file_path("/dir/a.ts").unwrap(),
2541 vec![lsp2::TextEdit {
2542 range: lsp2::Range::new(
2543 lsp2::Position::new(0, 0),
2544 lsp2::Position::new(0, 0),
2545 ),
2546 new_text: "X".into(),
2547 }],
2548 )]
2549 .into_iter()
2550 .collect(),
2551 ),
2552 ..Default::default()
2553 },
2554 },
2555 )
2556 .await
2557 .unwrap();
2558 Ok(Some(json!(null)))
2559 }
2560 }
2561 })
2562 .next()
2563 .await;
2564
2565 // Applying the code action returns a project transaction containing the edits
2566 // sent by the language server in its `workspaceEdit` request.
2567 let transaction = apply.await.unwrap();
2568 assert!(transaction.0.contains_key(&buffer));
2569 buffer.update(cx, |buffer, cx| {
2570 assert_eq!(buffer.text(), "Xa");
2571 buffer.undo(cx);
2572 assert_eq!(buffer.text(), "a");
2573 });
2574}
2575
2576#[gpui2::test(iterations = 10)]
2577async fn test_save_file(cx: &mut gpui2::TestAppContext) {
2578 init_test(cx);
2579
2580 let fs = FakeFs::new(cx.executor().clone());
2581 fs.insert_tree(
2582 "/dir",
2583 json!({
2584 "file1": "the old contents",
2585 }),
2586 )
2587 .await;
2588
2589 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2590 let buffer = project
2591 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2592 .await
2593 .unwrap();
2594 buffer.update(cx, |buffer, cx| {
2595 assert_eq!(buffer.text(), "the old contents");
2596 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2597 });
2598
2599 project
2600 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2601 .await
2602 .unwrap();
2603
2604 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2605 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2606}
2607
2608#[gpui2::test]
2609async fn test_save_in_single_file_worktree(cx: &mut gpui2::TestAppContext) {
2610 init_test(cx);
2611
2612 let fs = FakeFs::new(cx.executor().clone());
2613 fs.insert_tree(
2614 "/dir",
2615 json!({
2616 "file1": "the old contents",
2617 }),
2618 )
2619 .await;
2620
2621 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2622 let buffer = project
2623 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2624 .await
2625 .unwrap();
2626 buffer.update(cx, |buffer, cx| {
2627 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2628 });
2629
2630 project
2631 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2632 .await
2633 .unwrap();
2634
2635 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2636 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2637}
2638
2639#[gpui2::test]
2640async fn test_save_as(cx: &mut gpui2::TestAppContext) {
2641 init_test(cx);
2642
2643 let fs = FakeFs::new(cx.executor().clone());
2644 fs.insert_tree("/dir", json!({})).await;
2645
2646 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2647
2648 let languages = project.update(cx, |project, _| project.languages().clone());
2649 languages.register(
2650 "/some/path",
2651 LanguageConfig {
2652 name: "Rust".into(),
2653 path_suffixes: vec!["rs".into()],
2654 ..Default::default()
2655 },
2656 tree_sitter_rust::language(),
2657 vec![],
2658 |_| Default::default(),
2659 );
2660
2661 let buffer = project.update(cx, |project, cx| {
2662 project.create_buffer("", None, cx).unwrap()
2663 });
2664 buffer.update(cx, |buffer, cx| {
2665 buffer.edit([(0..0, "abc")], None, cx);
2666 assert!(buffer.is_dirty());
2667 assert!(!buffer.has_conflict());
2668 assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text");
2669 });
2670 project
2671 .update(cx, |project, cx| {
2672 project.save_buffer_as(buffer.clone(), "/dir/file1.rs".into(), cx)
2673 })
2674 .await
2675 .unwrap();
2676 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
2677
2678 cx.executor().run_until_parked();
2679 buffer.update(cx, |buffer, cx| {
2680 assert_eq!(
2681 buffer.file().unwrap().full_path(cx),
2682 Path::new("dir/file1.rs")
2683 );
2684 assert!(!buffer.is_dirty());
2685 assert!(!buffer.has_conflict());
2686 assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust");
2687 });
2688
2689 let opened_buffer = project
2690 .update(cx, |project, cx| {
2691 project.open_local_buffer("/dir/file1.rs", cx)
2692 })
2693 .await
2694 .unwrap();
2695 assert_eq!(opened_buffer, buffer);
2696}
2697
2698#[gpui2::test(retries = 5)]
2699async fn test_rescan_and_remote_updates(cx: &mut gpui2::TestAppContext) {
2700 init_test(cx);
2701 cx.executor().allow_parking();
2702
2703 let dir = temp_tree(json!({
2704 "a": {
2705 "file1": "",
2706 "file2": "",
2707 "file3": "",
2708 },
2709 "b": {
2710 "c": {
2711 "file4": "",
2712 "file5": "",
2713 }
2714 }
2715 }));
2716
2717 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
2718 let rpc = project.update(cx, |p, _| p.client.clone());
2719
2720 let buffer_for_path = |path: &'static str, cx: &mut gpui2::TestAppContext| {
2721 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
2722 async move { buffer.await.unwrap() }
2723 };
2724 let id_for_path = |path: &'static str, cx: &mut gpui2::TestAppContext| {
2725 project.update(cx, |project, cx| {
2726 let tree = project.worktrees().next().unwrap();
2727 tree.read(cx)
2728 .entry_for_path(path)
2729 .unwrap_or_else(|| panic!("no entry for path {}", path))
2730 .id
2731 })
2732 };
2733
2734 let buffer2 = buffer_for_path("a/file2", cx).await;
2735 let buffer3 = buffer_for_path("a/file3", cx).await;
2736 let buffer4 = buffer_for_path("b/c/file4", cx).await;
2737 let buffer5 = buffer_for_path("b/c/file5", cx).await;
2738
2739 let file2_id = id_for_path("a/file2", cx);
2740 let file3_id = id_for_path("a/file3", cx);
2741 let file4_id = id_for_path("b/c/file4", cx);
2742
2743 // Create a remote copy of this worktree.
2744 let tree = project.update(cx, |project, _| project.worktrees().next().unwrap());
2745
2746 let metadata = tree.update(cx, |tree, _| tree.as_local().unwrap().metadata_proto());
2747
2748 let updates = Arc::new(Mutex::new(Vec::new()));
2749 tree.update(cx, |tree, cx| {
2750 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
2751 let updates = updates.clone();
2752 move |update| {
2753 updates.lock().push(update);
2754 async { true }
2755 }
2756 });
2757 });
2758
2759 let remote = cx.update(|cx| Worktree::remote(1, 1, metadata, rpc.clone(), cx));
2760 cx.executor().run_until_parked();
2761
2762 cx.update(|cx| {
2763 assert!(!buffer2.read(cx).is_dirty());
2764 assert!(!buffer3.read(cx).is_dirty());
2765 assert!(!buffer4.read(cx).is_dirty());
2766 assert!(!buffer5.read(cx).is_dirty());
2767 });
2768
2769 // Rename and delete files and directories.
2770 tree.flush_fs_events(cx).await;
2771 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
2772 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
2773 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
2774 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
2775 tree.flush_fs_events(cx).await;
2776
2777 let expected_paths = vec![
2778 "a",
2779 "a/file1",
2780 "a/file2.new",
2781 "b",
2782 "d",
2783 "d/file3",
2784 "d/file4",
2785 ];
2786
2787 cx.update(|app| {
2788 assert_eq!(
2789 tree.read(app)
2790 .paths()
2791 .map(|p| p.to_str().unwrap())
2792 .collect::<Vec<_>>(),
2793 expected_paths
2794 );
2795 });
2796
2797 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
2798 assert_eq!(id_for_path("d/file3", cx), file3_id);
2799 assert_eq!(id_for_path("d/file4", cx), file4_id);
2800
2801 cx.update(|cx| {
2802 assert_eq!(
2803 buffer2.read(cx).file().unwrap().path().as_ref(),
2804 Path::new("a/file2.new")
2805 );
2806 assert_eq!(
2807 buffer3.read(cx).file().unwrap().path().as_ref(),
2808 Path::new("d/file3")
2809 );
2810 assert_eq!(
2811 buffer4.read(cx).file().unwrap().path().as_ref(),
2812 Path::new("d/file4")
2813 );
2814 assert_eq!(
2815 buffer5.read(cx).file().unwrap().path().as_ref(),
2816 Path::new("b/c/file5")
2817 );
2818
2819 assert!(!buffer2.read(cx).file().unwrap().is_deleted());
2820 assert!(!buffer3.read(cx).file().unwrap().is_deleted());
2821 assert!(!buffer4.read(cx).file().unwrap().is_deleted());
2822 assert!(buffer5.read(cx).file().unwrap().is_deleted());
2823 });
2824
2825 // Update the remote worktree. Check that it becomes consistent with the
2826 // local worktree.
2827 cx.executor().run_until_parked();
2828
2829 remote.update(cx, |remote, _| {
2830 for update in updates.lock().drain(..) {
2831 remote.as_remote_mut().unwrap().update_from_remote(update);
2832 }
2833 });
2834 cx.executor().run_until_parked();
2835 remote.update(cx, |remote, _| {
2836 assert_eq!(
2837 remote
2838 .paths()
2839 .map(|p| p.to_str().unwrap())
2840 .collect::<Vec<_>>(),
2841 expected_paths
2842 );
2843 });
2844}
2845
2846#[gpui2::test(iterations = 10)]
2847async fn test_buffer_identity_across_renames(cx: &mut gpui2::TestAppContext) {
2848 init_test(cx);
2849
2850 let fs = FakeFs::new(cx.executor().clone());
2851 fs.insert_tree(
2852 "/dir",
2853 json!({
2854 "a": {
2855 "file1": "",
2856 }
2857 }),
2858 )
2859 .await;
2860
2861 let project = Project::test(fs, [Path::new("/dir")], cx).await;
2862 let tree = project.update(cx, |project, _| project.worktrees().next().unwrap());
2863 let tree_id = tree.update(cx, |tree, _| tree.id());
2864
2865 let id_for_path = |path: &'static str, cx: &mut gpui2::TestAppContext| {
2866 project.update(cx, |project, cx| {
2867 let tree = project.worktrees().next().unwrap();
2868 tree.read(cx)
2869 .entry_for_path(path)
2870 .unwrap_or_else(|| panic!("no entry for path {}", path))
2871 .id
2872 })
2873 };
2874
2875 let dir_id = id_for_path("a", cx);
2876 let file_id = id_for_path("a/file1", cx);
2877 let buffer = project
2878 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
2879 .await
2880 .unwrap();
2881 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
2882
2883 project
2884 .update(cx, |project, cx| {
2885 project.rename_entry(dir_id, Path::new("b"), cx)
2886 })
2887 .unwrap()
2888 .await
2889 .unwrap();
2890 cx.executor().run_until_parked();
2891
2892 assert_eq!(id_for_path("b", cx), dir_id);
2893 assert_eq!(id_for_path("b/file1", cx), file_id);
2894 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
2895}
2896
2897#[gpui2::test]
2898async fn test_buffer_deduping(cx: &mut gpui2::TestAppContext) {
2899 init_test(cx);
2900
2901 let fs = FakeFs::new(cx.executor().clone());
2902 fs.insert_tree(
2903 "/dir",
2904 json!({
2905 "a.txt": "a-contents",
2906 "b.txt": "b-contents",
2907 }),
2908 )
2909 .await;
2910
2911 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2912
2913 // Spawn multiple tasks to open paths, repeating some paths.
2914 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
2915 (
2916 p.open_local_buffer("/dir/a.txt", cx),
2917 p.open_local_buffer("/dir/b.txt", cx),
2918 p.open_local_buffer("/dir/a.txt", cx),
2919 )
2920 });
2921
2922 let buffer_a_1 = buffer_a_1.await.unwrap();
2923 let buffer_a_2 = buffer_a_2.await.unwrap();
2924 let buffer_b = buffer_b.await.unwrap();
2925 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
2926 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
2927
2928 // There is only one buffer per path.
2929 let buffer_a_id = buffer_a_1.entity_id();
2930 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
2931
2932 // Open the same path again while it is still open.
2933 drop(buffer_a_1);
2934 let buffer_a_3 = project
2935 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
2936 .await
2937 .unwrap();
2938
2939 // There's still only one buffer per path.
2940 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
2941}
2942
2943#[gpui2::test]
2944async fn test_buffer_is_dirty(cx: &mut gpui2::TestAppContext) {
2945 init_test(cx);
2946 dbg!("GAH");
2947
2948 let fs = FakeFs::new(cx.executor().clone());
2949 fs.insert_tree(
2950 "/dir",
2951 json!({
2952 "file1": "abc",
2953 "file2": "def",
2954 "file3": "ghi",
2955 }),
2956 )
2957 .await;
2958 dbg!("NOOP");
2959
2960 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2961
2962 let buffer1 = project
2963 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2964 .await
2965 .unwrap();
2966 let events = Arc::new(Mutex::new(Vec::new()));
2967
2968 dbg!("BOOP");
2969
2970 // initially, the buffer isn't dirty.
2971 buffer1.update(cx, |buffer, cx| {
2972 cx.subscribe(&buffer1, {
2973 let events = events.clone();
2974 move |_, _, event, _| match event {
2975 BufferEvent::Operation(_) => {}
2976 _ => events.lock().push(event.clone()),
2977 }
2978 })
2979 .detach();
2980
2981 assert!(!buffer.is_dirty());
2982 assert!(events.lock().is_empty());
2983
2984 buffer.edit([(1..2, "")], None, cx);
2985 });
2986 dbg!("ADSASD");
2987
2988 // after the first edit, the buffer is dirty, and emits a dirtied event.
2989 buffer1.update(cx, |buffer, cx| {
2990 assert!(buffer.text() == "ac");
2991 assert!(buffer.is_dirty());
2992 assert_eq!(
2993 *events.lock(),
2994 &[language2::Event::Edited, language2::Event::DirtyChanged]
2995 );
2996 events.lock().clear();
2997 buffer.did_save(
2998 buffer.version(),
2999 buffer.as_rope().fingerprint(),
3000 buffer.file().unwrap().mtime(),
3001 cx,
3002 );
3003 });
3004 dbg!("1111");
3005
3006 // after saving, the buffer is not dirty, and emits a saved event.
3007 buffer1.update(cx, |buffer, cx| {
3008 assert!(!buffer.is_dirty());
3009 assert_eq!(*events.lock(), &[language2::Event::Saved]);
3010 events.lock().clear();
3011
3012 buffer.edit([(1..1, "B")], None, cx);
3013 buffer.edit([(2..2, "D")], None, cx);
3014 });
3015
3016 dbg!("5555555");
3017
3018 // after editing again, the buffer is dirty, and emits another dirty event.
3019 buffer1.update(cx, |buffer, cx| {
3020 assert!(buffer.text() == "aBDc");
3021 assert!(buffer.is_dirty());
3022 assert_eq!(
3023 *events.lock(),
3024 &[
3025 language2::Event::Edited,
3026 language2::Event::DirtyChanged,
3027 language2::Event::Edited,
3028 ],
3029 );
3030 events.lock().clear();
3031
3032 // After restoring the buffer to its previously-saved state,
3033 // the buffer is not considered dirty anymore.
3034 buffer.edit([(1..3, "")], None, cx);
3035 assert!(buffer.text() == "ac");
3036 assert!(!buffer.is_dirty());
3037 });
3038
3039 dbg!("666666");
3040 assert_eq!(
3041 *events.lock(),
3042 &[language2::Event::Edited, language2::Event::DirtyChanged]
3043 );
3044
3045 // When a file is deleted, the buffer is considered dirty.
3046 let events = Arc::new(Mutex::new(Vec::new()));
3047 let buffer2 = project
3048 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3049 .await
3050 .unwrap();
3051 buffer2.update(cx, |_, cx| {
3052 cx.subscribe(&buffer2, {
3053 let events = events.clone();
3054 move |_, _, event, _| events.lock().push(event.clone())
3055 })
3056 .detach();
3057 });
3058
3059 dbg!("0000000");
3060
3061 fs.remove_file("/dir/file2".as_ref(), Default::default())
3062 .await
3063 .unwrap();
3064 cx.executor().run_until_parked();
3065 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3066 assert_eq!(
3067 *events.lock(),
3068 &[
3069 language2::Event::DirtyChanged,
3070 language2::Event::FileHandleChanged
3071 ]
3072 );
3073
3074 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3075 let events = Arc::new(Mutex::new(Vec::new()));
3076 let buffer3 = project
3077 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
3078 .await
3079 .unwrap();
3080 buffer3.update(cx, |_, cx| {
3081 cx.subscribe(&buffer3, {
3082 let events = events.clone();
3083 move |_, _, event, _| events.lock().push(event.clone())
3084 })
3085 .detach();
3086 });
3087
3088 dbg!(";;;;;;");
3089 buffer3.update(cx, |buffer, cx| {
3090 buffer.edit([(0..0, "x")], None, cx);
3091 });
3092 events.lock().clear();
3093 fs.remove_file("/dir/file3".as_ref(), Default::default())
3094 .await
3095 .unwrap();
3096 cx.executor().run_until_parked();
3097 assert_eq!(*events.lock(), &[language2::Event::FileHandleChanged]);
3098 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3099}
3100
3101#[gpui2::test]
3102async fn test_buffer_file_changes_on_disk(cx: &mut gpui2::TestAppContext) {
3103 init_test(cx);
3104
3105 let initial_contents = "aaa\nbbbbb\nc\n";
3106 let fs = FakeFs::new(cx.executor().clone());
3107 fs.insert_tree(
3108 "/dir",
3109 json!({
3110 "the-file": initial_contents,
3111 }),
3112 )
3113 .await;
3114 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3115 let buffer = project
3116 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
3117 .await
3118 .unwrap();
3119
3120 let anchors = (0..3)
3121 .map(|row| buffer.update(cx, |b, _| b.anchor_before(Point::new(row, 1))))
3122 .collect::<Vec<_>>();
3123
3124 // Change the file on disk, adding two new lines of text, and removing
3125 // one line.
3126 buffer.update(cx, |buffer, _| {
3127 assert!(!buffer.is_dirty());
3128 assert!(!buffer.has_conflict());
3129 });
3130 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3131 fs.save(
3132 "/dir/the-file".as_ref(),
3133 &new_contents.into(),
3134 LineEnding::Unix,
3135 )
3136 .await
3137 .unwrap();
3138
3139 // Because the buffer was not modified, it is reloaded from disk. Its
3140 // contents are edited according to the diff between the old and new
3141 // file contents.
3142 cx.executor().run_until_parked();
3143 buffer.update(cx, |buffer, _| {
3144 assert_eq!(buffer.text(), new_contents);
3145 assert!(!buffer.is_dirty());
3146 assert!(!buffer.has_conflict());
3147
3148 let anchor_positions = anchors
3149 .iter()
3150 .map(|anchor| anchor.to_point(&*buffer))
3151 .collect::<Vec<_>>();
3152 assert_eq!(
3153 anchor_positions,
3154 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
3155 );
3156 });
3157
3158 // Modify the buffer
3159 buffer.update(cx, |buffer, cx| {
3160 buffer.edit([(0..0, " ")], None, cx);
3161 assert!(buffer.is_dirty());
3162 assert!(!buffer.has_conflict());
3163 });
3164
3165 // Change the file on disk again, adding blank lines to the beginning.
3166 fs.save(
3167 "/dir/the-file".as_ref(),
3168 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3169 LineEnding::Unix,
3170 )
3171 .await
3172 .unwrap();
3173
3174 // Because the buffer is modified, it doesn't reload from disk, but is
3175 // marked as having a conflict.
3176 cx.executor().run_until_parked();
3177 buffer.update(cx, |buffer, _| {
3178 assert!(buffer.has_conflict());
3179 });
3180}
3181
3182#[gpui2::test]
3183async fn test_buffer_line_endings(cx: &mut gpui2::TestAppContext) {
3184 init_test(cx);
3185
3186 let fs = FakeFs::new(cx.executor().clone());
3187 fs.insert_tree(
3188 "/dir",
3189 json!({
3190 "file1": "a\nb\nc\n",
3191 "file2": "one\r\ntwo\r\nthree\r\n",
3192 }),
3193 )
3194 .await;
3195
3196 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3197 let buffer1 = project
3198 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3199 .await
3200 .unwrap();
3201 let buffer2 = project
3202 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3203 .await
3204 .unwrap();
3205
3206 buffer1.update(cx, |buffer, _| {
3207 assert_eq!(buffer.text(), "a\nb\nc\n");
3208 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3209 });
3210 buffer2.update(cx, |buffer, _| {
3211 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3212 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3213 });
3214
3215 // Change a file's line endings on disk from unix to windows. The buffer's
3216 // state updates correctly.
3217 fs.save(
3218 "/dir/file1".as_ref(),
3219 &"aaa\nb\nc\n".into(),
3220 LineEnding::Windows,
3221 )
3222 .await
3223 .unwrap();
3224 cx.executor().run_until_parked();
3225 buffer1.update(cx, |buffer, _| {
3226 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3227 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3228 });
3229
3230 // Save a file with windows line endings. The file is written correctly.
3231 buffer2.update(cx, |buffer, cx| {
3232 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3233 });
3234 project
3235 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3236 .await
3237 .unwrap();
3238 assert_eq!(
3239 fs.load("/dir/file2".as_ref()).await.unwrap(),
3240 "one\r\ntwo\r\nthree\r\nfour\r\n",
3241 );
3242}
3243
3244#[gpui2::test]
3245async fn test_grouped_diagnostics(cx: &mut gpui2::TestAppContext) {
3246 init_test(cx);
3247
3248 let fs = FakeFs::new(cx.executor().clone());
3249 fs.insert_tree(
3250 "/the-dir",
3251 json!({
3252 "a.rs": "
3253 fn foo(mut v: Vec<usize>) {
3254 for x in &v {
3255 v.push(1);
3256 }
3257 }
3258 "
3259 .unindent(),
3260 }),
3261 )
3262 .await;
3263
3264 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3265 let buffer = project
3266 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3267 .await
3268 .unwrap();
3269
3270 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3271 let message = lsp2::PublishDiagnosticsParams {
3272 uri: buffer_uri.clone(),
3273 diagnostics: vec![
3274 lsp2::Diagnostic {
3275 range: lsp2::Range::new(lsp2::Position::new(1, 8), lsp2::Position::new(1, 9)),
3276 severity: Some(DiagnosticSeverity::WARNING),
3277 message: "error 1".to_string(),
3278 related_information: Some(vec![lsp2::DiagnosticRelatedInformation {
3279 location: lsp2::Location {
3280 uri: buffer_uri.clone(),
3281 range: lsp2::Range::new(
3282 lsp2::Position::new(1, 8),
3283 lsp2::Position::new(1, 9),
3284 ),
3285 },
3286 message: "error 1 hint 1".to_string(),
3287 }]),
3288 ..Default::default()
3289 },
3290 lsp2::Diagnostic {
3291 range: lsp2::Range::new(lsp2::Position::new(1, 8), lsp2::Position::new(1, 9)),
3292 severity: Some(DiagnosticSeverity::HINT),
3293 message: "error 1 hint 1".to_string(),
3294 related_information: Some(vec![lsp2::DiagnosticRelatedInformation {
3295 location: lsp2::Location {
3296 uri: buffer_uri.clone(),
3297 range: lsp2::Range::new(
3298 lsp2::Position::new(1, 8),
3299 lsp2::Position::new(1, 9),
3300 ),
3301 },
3302 message: "original diagnostic".to_string(),
3303 }]),
3304 ..Default::default()
3305 },
3306 lsp2::Diagnostic {
3307 range: lsp2::Range::new(lsp2::Position::new(2, 8), lsp2::Position::new(2, 17)),
3308 severity: Some(DiagnosticSeverity::ERROR),
3309 message: "error 2".to_string(),
3310 related_information: Some(vec![
3311 lsp2::DiagnosticRelatedInformation {
3312 location: lsp2::Location {
3313 uri: buffer_uri.clone(),
3314 range: lsp2::Range::new(
3315 lsp2::Position::new(1, 13),
3316 lsp2::Position::new(1, 15),
3317 ),
3318 },
3319 message: "error 2 hint 1".to_string(),
3320 },
3321 lsp2::DiagnosticRelatedInformation {
3322 location: lsp2::Location {
3323 uri: buffer_uri.clone(),
3324 range: lsp2::Range::new(
3325 lsp2::Position::new(1, 13),
3326 lsp2::Position::new(1, 15),
3327 ),
3328 },
3329 message: "error 2 hint 2".to_string(),
3330 },
3331 ]),
3332 ..Default::default()
3333 },
3334 lsp2::Diagnostic {
3335 range: lsp2::Range::new(lsp2::Position::new(1, 13), lsp2::Position::new(1, 15)),
3336 severity: Some(DiagnosticSeverity::HINT),
3337 message: "error 2 hint 1".to_string(),
3338 related_information: Some(vec![lsp2::DiagnosticRelatedInformation {
3339 location: lsp2::Location {
3340 uri: buffer_uri.clone(),
3341 range: lsp2::Range::new(
3342 lsp2::Position::new(2, 8),
3343 lsp2::Position::new(2, 17),
3344 ),
3345 },
3346 message: "original diagnostic".to_string(),
3347 }]),
3348 ..Default::default()
3349 },
3350 lsp2::Diagnostic {
3351 range: lsp2::Range::new(lsp2::Position::new(1, 13), lsp2::Position::new(1, 15)),
3352 severity: Some(DiagnosticSeverity::HINT),
3353 message: "error 2 hint 2".to_string(),
3354 related_information: Some(vec![lsp2::DiagnosticRelatedInformation {
3355 location: lsp2::Location {
3356 uri: buffer_uri,
3357 range: lsp2::Range::new(
3358 lsp2::Position::new(2, 8),
3359 lsp2::Position::new(2, 17),
3360 ),
3361 },
3362 message: "original diagnostic".to_string(),
3363 }]),
3364 ..Default::default()
3365 },
3366 ],
3367 version: None,
3368 };
3369
3370 project
3371 .update(cx, |p, cx| {
3372 p.update_diagnostics(LanguageServerId(0), message, &[], cx)
3373 })
3374 .unwrap();
3375 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3376
3377 assert_eq!(
3378 buffer
3379 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3380 .collect::<Vec<_>>(),
3381 &[
3382 DiagnosticEntry {
3383 range: Point::new(1, 8)..Point::new(1, 9),
3384 diagnostic: Diagnostic {
3385 severity: DiagnosticSeverity::WARNING,
3386 message: "error 1".to_string(),
3387 group_id: 1,
3388 is_primary: true,
3389 ..Default::default()
3390 }
3391 },
3392 DiagnosticEntry {
3393 range: Point::new(1, 8)..Point::new(1, 9),
3394 diagnostic: Diagnostic {
3395 severity: DiagnosticSeverity::HINT,
3396 message: "error 1 hint 1".to_string(),
3397 group_id: 1,
3398 is_primary: false,
3399 ..Default::default()
3400 }
3401 },
3402 DiagnosticEntry {
3403 range: Point::new(1, 13)..Point::new(1, 15),
3404 diagnostic: Diagnostic {
3405 severity: DiagnosticSeverity::HINT,
3406 message: "error 2 hint 1".to_string(),
3407 group_id: 0,
3408 is_primary: false,
3409 ..Default::default()
3410 }
3411 },
3412 DiagnosticEntry {
3413 range: Point::new(1, 13)..Point::new(1, 15),
3414 diagnostic: Diagnostic {
3415 severity: DiagnosticSeverity::HINT,
3416 message: "error 2 hint 2".to_string(),
3417 group_id: 0,
3418 is_primary: false,
3419 ..Default::default()
3420 }
3421 },
3422 DiagnosticEntry {
3423 range: Point::new(2, 8)..Point::new(2, 17),
3424 diagnostic: Diagnostic {
3425 severity: DiagnosticSeverity::ERROR,
3426 message: "error 2".to_string(),
3427 group_id: 0,
3428 is_primary: true,
3429 ..Default::default()
3430 }
3431 }
3432 ]
3433 );
3434
3435 assert_eq!(
3436 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3437 &[
3438 DiagnosticEntry {
3439 range: Point::new(1, 13)..Point::new(1, 15),
3440 diagnostic: Diagnostic {
3441 severity: DiagnosticSeverity::HINT,
3442 message: "error 2 hint 1".to_string(),
3443 group_id: 0,
3444 is_primary: false,
3445 ..Default::default()
3446 }
3447 },
3448 DiagnosticEntry {
3449 range: Point::new(1, 13)..Point::new(1, 15),
3450 diagnostic: Diagnostic {
3451 severity: DiagnosticSeverity::HINT,
3452 message: "error 2 hint 2".to_string(),
3453 group_id: 0,
3454 is_primary: false,
3455 ..Default::default()
3456 }
3457 },
3458 DiagnosticEntry {
3459 range: Point::new(2, 8)..Point::new(2, 17),
3460 diagnostic: Diagnostic {
3461 severity: DiagnosticSeverity::ERROR,
3462 message: "error 2".to_string(),
3463 group_id: 0,
3464 is_primary: true,
3465 ..Default::default()
3466 }
3467 }
3468 ]
3469 );
3470
3471 assert_eq!(
3472 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3473 &[
3474 DiagnosticEntry {
3475 range: Point::new(1, 8)..Point::new(1, 9),
3476 diagnostic: Diagnostic {
3477 severity: DiagnosticSeverity::WARNING,
3478 message: "error 1".to_string(),
3479 group_id: 1,
3480 is_primary: true,
3481 ..Default::default()
3482 }
3483 },
3484 DiagnosticEntry {
3485 range: Point::new(1, 8)..Point::new(1, 9),
3486 diagnostic: Diagnostic {
3487 severity: DiagnosticSeverity::HINT,
3488 message: "error 1 hint 1".to_string(),
3489 group_id: 1,
3490 is_primary: false,
3491 ..Default::default()
3492 }
3493 },
3494 ]
3495 );
3496}
3497
3498#[gpui2::test]
3499async fn test_rename(cx: &mut gpui2::TestAppContext) {
3500 init_test(cx);
3501
3502 let mut language = Language::new(
3503 LanguageConfig {
3504 name: "Rust".into(),
3505 path_suffixes: vec!["rs".to_string()],
3506 ..Default::default()
3507 },
3508 Some(tree_sitter_rust::language()),
3509 );
3510 let mut fake_servers = language
3511 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
3512 capabilities: lsp2::ServerCapabilities {
3513 rename_provider: Some(lsp2::OneOf::Right(lsp2::RenameOptions {
3514 prepare_provider: Some(true),
3515 work_done_progress_options: Default::default(),
3516 })),
3517 ..Default::default()
3518 },
3519 ..Default::default()
3520 }))
3521 .await;
3522
3523 let fs = FakeFs::new(cx.executor().clone());
3524 fs.insert_tree(
3525 "/dir",
3526 json!({
3527 "one.rs": "const ONE: usize = 1;",
3528 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3529 }),
3530 )
3531 .await;
3532
3533 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3534 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
3535 let buffer = project
3536 .update(cx, |project, cx| {
3537 project.open_local_buffer("/dir/one.rs", cx)
3538 })
3539 .await
3540 .unwrap();
3541
3542 let fake_server = fake_servers.next().await.unwrap();
3543
3544 let response = project.update(cx, |project, cx| {
3545 project.prepare_rename(buffer.clone(), 7, cx)
3546 });
3547 fake_server
3548 .handle_request::<lsp2::request::PrepareRenameRequest, _, _>(|params, _| async move {
3549 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3550 assert_eq!(params.position, lsp2::Position::new(0, 7));
3551 Ok(Some(lsp2::PrepareRenameResponse::Range(lsp2::Range::new(
3552 lsp2::Position::new(0, 6),
3553 lsp2::Position::new(0, 9),
3554 ))))
3555 })
3556 .next()
3557 .await
3558 .unwrap();
3559 let range = response.await.unwrap().unwrap();
3560 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
3561 assert_eq!(range, 6..9);
3562
3563 let response = project.update(cx, |project, cx| {
3564 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
3565 });
3566 fake_server
3567 .handle_request::<lsp2::request::Rename, _, _>(|params, _| async move {
3568 assert_eq!(
3569 params.text_document_position.text_document.uri.as_str(),
3570 "file:///dir/one.rs"
3571 );
3572 assert_eq!(
3573 params.text_document_position.position,
3574 lsp2::Position::new(0, 7)
3575 );
3576 assert_eq!(params.new_name, "THREE");
3577 Ok(Some(lsp2::WorkspaceEdit {
3578 changes: Some(
3579 [
3580 (
3581 lsp2::Url::from_file_path("/dir/one.rs").unwrap(),
3582 vec![lsp2::TextEdit::new(
3583 lsp2::Range::new(
3584 lsp2::Position::new(0, 6),
3585 lsp2::Position::new(0, 9),
3586 ),
3587 "THREE".to_string(),
3588 )],
3589 ),
3590 (
3591 lsp2::Url::from_file_path("/dir/two.rs").unwrap(),
3592 vec![
3593 lsp2::TextEdit::new(
3594 lsp2::Range::new(
3595 lsp2::Position::new(0, 24),
3596 lsp2::Position::new(0, 27),
3597 ),
3598 "THREE".to_string(),
3599 ),
3600 lsp2::TextEdit::new(
3601 lsp2::Range::new(
3602 lsp2::Position::new(0, 35),
3603 lsp2::Position::new(0, 38),
3604 ),
3605 "THREE".to_string(),
3606 ),
3607 ],
3608 ),
3609 ]
3610 .into_iter()
3611 .collect(),
3612 ),
3613 ..Default::default()
3614 }))
3615 })
3616 .next()
3617 .await
3618 .unwrap();
3619 let mut transaction = response.await.unwrap().0;
3620 assert_eq!(transaction.len(), 2);
3621 assert_eq!(
3622 transaction
3623 .remove_entry(&buffer)
3624 .unwrap()
3625 .0
3626 .update(cx, |buffer, _| buffer.text()),
3627 "const THREE: usize = 1;"
3628 );
3629 assert_eq!(
3630 transaction
3631 .into_keys()
3632 .next()
3633 .unwrap()
3634 .update(cx, |buffer, _| buffer.text()),
3635 "const TWO: usize = one::THREE + one::THREE;"
3636 );
3637}
3638
3639#[gpui2::test]
3640async fn test_search(cx: &mut gpui2::TestAppContext) {
3641 init_test(cx);
3642
3643 let fs = FakeFs::new(cx.executor().clone());
3644 fs.insert_tree(
3645 "/dir",
3646 json!({
3647 "one.rs": "const ONE: usize = 1;",
3648 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3649 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3650 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3651 }),
3652 )
3653 .await;
3654 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3655 assert_eq!(
3656 search(
3657 &project,
3658 SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()).unwrap(),
3659 cx
3660 )
3661 .await
3662 .unwrap(),
3663 HashMap::from_iter([
3664 ("two.rs".to_string(), vec![6..9]),
3665 ("three.rs".to_string(), vec![37..40])
3666 ])
3667 );
3668
3669 let buffer_4 = project
3670 .update(cx, |project, cx| {
3671 project.open_local_buffer("/dir/four.rs", cx)
3672 })
3673 .await
3674 .unwrap();
3675 buffer_4.update(cx, |buffer, cx| {
3676 let text = "two::TWO";
3677 buffer.edit([(20..28, text), (31..43, text)], None, cx);
3678 });
3679
3680 assert_eq!(
3681 search(
3682 &project,
3683 SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()).unwrap(),
3684 cx
3685 )
3686 .await
3687 .unwrap(),
3688 HashMap::from_iter([
3689 ("two.rs".to_string(), vec![6..9]),
3690 ("three.rs".to_string(), vec![37..40]),
3691 ("four.rs".to_string(), vec![25..28, 36..39])
3692 ])
3693 );
3694}
3695
3696#[gpui2::test]
3697async fn test_search_with_inclusions(cx: &mut gpui2::TestAppContext) {
3698 init_test(cx);
3699
3700 let search_query = "file";
3701
3702 let fs = FakeFs::new(cx.executor().clone());
3703 fs.insert_tree(
3704 "/dir",
3705 json!({
3706 "one.rs": r#"// Rust file one"#,
3707 "one.ts": r#"// TypeScript file one"#,
3708 "two.rs": r#"// Rust file two"#,
3709 "two.ts": r#"// TypeScript file two"#,
3710 }),
3711 )
3712 .await;
3713 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3714
3715 assert!(
3716 search(
3717 &project,
3718 SearchQuery::text(
3719 search_query,
3720 false,
3721 true,
3722 vec![PathMatcher::new("*.odd").unwrap()],
3723 Vec::new()
3724 )
3725 .unwrap(),
3726 cx
3727 )
3728 .await
3729 .unwrap()
3730 .is_empty(),
3731 "If no inclusions match, no files should be returned"
3732 );
3733
3734 assert_eq!(
3735 search(
3736 &project,
3737 SearchQuery::text(
3738 search_query,
3739 false,
3740 true,
3741 vec![PathMatcher::new("*.rs").unwrap()],
3742 Vec::new()
3743 )
3744 .unwrap(),
3745 cx
3746 )
3747 .await
3748 .unwrap(),
3749 HashMap::from_iter([
3750 ("one.rs".to_string(), vec![8..12]),
3751 ("two.rs".to_string(), vec![8..12]),
3752 ]),
3753 "Rust only search should give only Rust files"
3754 );
3755
3756 assert_eq!(
3757 search(
3758 &project,
3759 SearchQuery::text(
3760 search_query,
3761 false,
3762 true,
3763 vec![
3764 PathMatcher::new("*.ts").unwrap(),
3765 PathMatcher::new("*.odd").unwrap(),
3766 ],
3767 Vec::new()
3768 ).unwrap(),
3769 cx
3770 )
3771 .await
3772 .unwrap(),
3773 HashMap::from_iter([
3774 ("one.ts".to_string(), vec![14..18]),
3775 ("two.ts".to_string(), vec![14..18]),
3776 ]),
3777 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
3778 );
3779
3780 assert_eq!(
3781 search(
3782 &project,
3783 SearchQuery::text(
3784 search_query,
3785 false,
3786 true,
3787 vec![
3788 PathMatcher::new("*.rs").unwrap(),
3789 PathMatcher::new("*.ts").unwrap(),
3790 PathMatcher::new("*.odd").unwrap(),
3791 ],
3792 Vec::new()
3793 ).unwrap(),
3794 cx
3795 )
3796 .await
3797 .unwrap(),
3798 HashMap::from_iter([
3799 ("one.rs".to_string(), vec![8..12]),
3800 ("one.ts".to_string(), vec![14..18]),
3801 ("two.rs".to_string(), vec![8..12]),
3802 ("two.ts".to_string(), vec![14..18]),
3803 ]),
3804 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
3805 );
3806}
3807
3808#[gpui2::test]
3809async fn test_search_with_exclusions(cx: &mut gpui2::TestAppContext) {
3810 init_test(cx);
3811
3812 let search_query = "file";
3813
3814 let fs = FakeFs::new(cx.executor().clone());
3815 fs.insert_tree(
3816 "/dir",
3817 json!({
3818 "one.rs": r#"// Rust file one"#,
3819 "one.ts": r#"// TypeScript file one"#,
3820 "two.rs": r#"// Rust file two"#,
3821 "two.ts": r#"// TypeScript file two"#,
3822 }),
3823 )
3824 .await;
3825 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3826
3827 assert_eq!(
3828 search(
3829 &project,
3830 SearchQuery::text(
3831 search_query,
3832 false,
3833 true,
3834 Vec::new(),
3835 vec![PathMatcher::new("*.odd").unwrap()],
3836 )
3837 .unwrap(),
3838 cx
3839 )
3840 .await
3841 .unwrap(),
3842 HashMap::from_iter([
3843 ("one.rs".to_string(), vec![8..12]),
3844 ("one.ts".to_string(), vec![14..18]),
3845 ("two.rs".to_string(), vec![8..12]),
3846 ("two.ts".to_string(), vec![14..18]),
3847 ]),
3848 "If no exclusions match, all files should be returned"
3849 );
3850
3851 assert_eq!(
3852 search(
3853 &project,
3854 SearchQuery::text(
3855 search_query,
3856 false,
3857 true,
3858 Vec::new(),
3859 vec![PathMatcher::new("*.rs").unwrap()],
3860 )
3861 .unwrap(),
3862 cx
3863 )
3864 .await
3865 .unwrap(),
3866 HashMap::from_iter([
3867 ("one.ts".to_string(), vec![14..18]),
3868 ("two.ts".to_string(), vec![14..18]),
3869 ]),
3870 "Rust exclusion search should give only TypeScript files"
3871 );
3872
3873 assert_eq!(
3874 search(
3875 &project,
3876 SearchQuery::text(
3877 search_query,
3878 false,
3879 true,
3880 Vec::new(),
3881 vec![
3882 PathMatcher::new("*.ts").unwrap(),
3883 PathMatcher::new("*.odd").unwrap(),
3884 ],
3885 ).unwrap(),
3886 cx
3887 )
3888 .await
3889 .unwrap(),
3890 HashMap::from_iter([
3891 ("one.rs".to_string(), vec![8..12]),
3892 ("two.rs".to_string(), vec![8..12]),
3893 ]),
3894 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
3895 );
3896
3897 assert!(
3898 search(
3899 &project,
3900 SearchQuery::text(
3901 search_query,
3902 false,
3903 true,
3904 Vec::new(),
3905 vec![
3906 PathMatcher::new("*.rs").unwrap(),
3907 PathMatcher::new("*.ts").unwrap(),
3908 PathMatcher::new("*.odd").unwrap(),
3909 ],
3910 ).unwrap(),
3911 cx
3912 )
3913 .await
3914 .unwrap().is_empty(),
3915 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
3916 );
3917}
3918
3919#[gpui2::test]
3920async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui2::TestAppContext) {
3921 init_test(cx);
3922
3923 let search_query = "file";
3924
3925 let fs = FakeFs::new(cx.executor().clone());
3926 fs.insert_tree(
3927 "/dir",
3928 json!({
3929 "one.rs": r#"// Rust file one"#,
3930 "one.ts": r#"// TypeScript file one"#,
3931 "two.rs": r#"// Rust file two"#,
3932 "two.ts": r#"// TypeScript file two"#,
3933 }),
3934 )
3935 .await;
3936 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3937
3938 assert!(
3939 search(
3940 &project,
3941 SearchQuery::text(
3942 search_query,
3943 false,
3944 true,
3945 vec![PathMatcher::new("*.odd").unwrap()],
3946 vec![PathMatcher::new("*.odd").unwrap()],
3947 )
3948 .unwrap(),
3949 cx
3950 )
3951 .await
3952 .unwrap()
3953 .is_empty(),
3954 "If both no exclusions and inclusions match, exclusions should win and return nothing"
3955 );
3956
3957 assert!(
3958 search(
3959 &project,
3960 SearchQuery::text(
3961 search_query,
3962 false,
3963 true,
3964 vec![PathMatcher::new("*.ts").unwrap()],
3965 vec![PathMatcher::new("*.ts").unwrap()],
3966 ).unwrap(),
3967 cx
3968 )
3969 .await
3970 .unwrap()
3971 .is_empty(),
3972 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
3973 );
3974
3975 assert!(
3976 search(
3977 &project,
3978 SearchQuery::text(
3979 search_query,
3980 false,
3981 true,
3982 vec![
3983 PathMatcher::new("*.ts").unwrap(),
3984 PathMatcher::new("*.odd").unwrap()
3985 ],
3986 vec![
3987 PathMatcher::new("*.ts").unwrap(),
3988 PathMatcher::new("*.odd").unwrap()
3989 ],
3990 )
3991 .unwrap(),
3992 cx
3993 )
3994 .await
3995 .unwrap()
3996 .is_empty(),
3997 "Non-matching inclusions and exclusions should not change that."
3998 );
3999
4000 assert_eq!(
4001 search(
4002 &project,
4003 SearchQuery::text(
4004 search_query,
4005 false,
4006 true,
4007 vec![
4008 PathMatcher::new("*.ts").unwrap(),
4009 PathMatcher::new("*.odd").unwrap()
4010 ],
4011 vec![
4012 PathMatcher::new("*.rs").unwrap(),
4013 PathMatcher::new("*.odd").unwrap()
4014 ],
4015 )
4016 .unwrap(),
4017 cx
4018 )
4019 .await
4020 .unwrap(),
4021 HashMap::from_iter([
4022 ("one.ts".to_string(), vec![14..18]),
4023 ("two.ts".to_string(), vec![14..18]),
4024 ]),
4025 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4026 );
4027}
4028
4029#[test]
4030fn test_glob_literal_prefix() {
4031 assert_eq!(glob_literal_prefix("**/*.js"), "");
4032 assert_eq!(glob_literal_prefix("node_modules/**/*.js"), "node_modules");
4033 assert_eq!(glob_literal_prefix("foo/{bar,baz}.js"), "foo");
4034 assert_eq!(glob_literal_prefix("foo/bar/baz.js"), "foo/bar/baz.js");
4035}
4036
4037async fn search(
4038 project: &Handle<Project>,
4039 query: SearchQuery,
4040 cx: &mut gpui2::TestAppContext,
4041) -> Result<HashMap<String, Vec<Range<usize>>>> {
4042 let mut search_rx = project.update(cx, |project, cx| project.search(query, cx));
4043 let mut result = HashMap::default();
4044 while let Some((buffer, range)) = search_rx.next().await {
4045 result.entry(buffer).or_insert(range);
4046 }
4047 Ok(result
4048 .into_iter()
4049 .map(|(buffer, ranges)| {
4050 buffer.update(cx, |buffer, _| {
4051 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
4052 let ranges = ranges
4053 .into_iter()
4054 .map(|range| range.to_offset(buffer))
4055 .collect::<Vec<_>>();
4056 (path, ranges)
4057 })
4058 })
4059 .collect())
4060}
4061
4062fn init_test(cx: &mut gpui2::TestAppContext) {
4063 if std::env::var("RUST_LOG").is_ok() {
4064 env_logger::init();
4065 }
4066
4067 cx.update(|cx| {
4068 let settings_store = SettingsStore::test(cx);
4069 cx.set_global(settings_store);
4070 language2::init(cx);
4071 Project::init_settings(cx);
4072 });
4073}