1use crate::{worktree::WorktreeModelHandle, Event, *};
2use fs::{FakeFs, RealFs};
3use futures::{future, StreamExt};
4use gpui::{executor::Deterministic, test::subscribe, AppContext};
5use language::{
6 language_settings::{AllLanguageSettings, LanguageSettingsContent},
7 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
8 LineEnding, OffsetRangeExt, Point, ToPoint,
9};
10use lsp::Url;
11use parking_lot::Mutex;
12use pretty_assertions::assert_eq;
13use serde_json::json;
14use std::{cell::RefCell, os::unix, rc::Rc, task::Poll};
15use unindent::Unindent as _;
16use util::{assert_set_eq, paths::PathMatcher, test::temp_tree};
17
18#[cfg(test)]
19#[ctor::ctor]
20fn init_logger() {
21 if std::env::var("RUST_LOG").is_ok() {
22 env_logger::init();
23 }
24}
25
26#[gpui::test]
27async fn test_symlinks(cx: &mut gpui::TestAppContext) {
28 init_test(cx);
29 cx.foreground().allow_parking();
30
31 let dir = temp_tree(json!({
32 "root": {
33 "apple": "",
34 "banana": {
35 "carrot": {
36 "date": "",
37 "endive": "",
38 }
39 },
40 "fennel": {
41 "grape": "",
42 }
43 }
44 }));
45
46 let root_link_path = dir.path().join("root_link");
47 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
48 unix::fs::symlink(
49 &dir.path().join("root/fennel"),
50 &dir.path().join("root/finnochio"),
51 )
52 .unwrap();
53
54 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
55 project.read_with(cx, |project, cx| {
56 let tree = project.worktrees(cx).next().unwrap().read(cx);
57 assert_eq!(tree.file_count(), 5);
58 assert_eq!(
59 tree.inode_for_path("fennel/grape"),
60 tree.inode_for_path("finnochio/grape")
61 );
62 });
63}
64
65#[gpui::test]
66async fn test_managing_project_specific_settings(
67 deterministic: Arc<Deterministic>,
68 cx: &mut gpui::TestAppContext,
69) {
70 init_test(cx);
71
72 let fs = FakeFs::new(cx.background());
73 fs.insert_tree(
74 "/the-root",
75 json!({
76 ".zed": {
77 "settings.json": r#"{ "tab_size": 8 }"#
78 },
79 "a": {
80 "a.rs": "fn a() {\n A\n}"
81 },
82 "b": {
83 ".zed": {
84 "settings.json": r#"{ "tab_size": 2 }"#
85 },
86 "b.rs": "fn b() {\n B\n}"
87 }
88 }),
89 )
90 .await;
91
92 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
93 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
94
95 deterministic.run_until_parked();
96 cx.read(|cx| {
97 let tree = worktree.read(cx);
98
99 let settings_a = language_settings(
100 None,
101 Some(
102 &(File::for_entry(
103 tree.entry_for_path("a/a.rs").unwrap().clone(),
104 worktree.clone(),
105 ) as _),
106 ),
107 cx,
108 );
109 let settings_b = language_settings(
110 None,
111 Some(
112 &(File::for_entry(
113 tree.entry_for_path("b/b.rs").unwrap().clone(),
114 worktree.clone(),
115 ) as _),
116 ),
117 cx,
118 );
119
120 assert_eq!(settings_a.tab_size.get(), 8);
121 assert_eq!(settings_b.tab_size.get(), 2);
122 });
123}
124
125#[gpui::test]
126async fn test_managing_language_servers(
127 deterministic: Arc<Deterministic>,
128 cx: &mut gpui::TestAppContext,
129) {
130 init_test(cx);
131
132 let mut rust_language = Language::new(
133 LanguageConfig {
134 name: "Rust".into(),
135 path_suffixes: vec!["rs".to_string()],
136 ..Default::default()
137 },
138 Some(tree_sitter_rust::language()),
139 );
140 let mut json_language = Language::new(
141 LanguageConfig {
142 name: "JSON".into(),
143 path_suffixes: vec!["json".to_string()],
144 ..Default::default()
145 },
146 None,
147 );
148 let mut fake_rust_servers = rust_language
149 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
150 name: "the-rust-language-server",
151 capabilities: lsp::ServerCapabilities {
152 completion_provider: Some(lsp::CompletionOptions {
153 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
154 ..Default::default()
155 }),
156 ..Default::default()
157 },
158 ..Default::default()
159 }))
160 .await;
161 let mut fake_json_servers = json_language
162 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
163 name: "the-json-language-server",
164 capabilities: lsp::ServerCapabilities {
165 completion_provider: Some(lsp::CompletionOptions {
166 trigger_characters: Some(vec![":".to_string()]),
167 ..Default::default()
168 }),
169 ..Default::default()
170 },
171 ..Default::default()
172 }))
173 .await;
174
175 let fs = FakeFs::new(cx.background());
176 fs.insert_tree(
177 "/the-root",
178 json!({
179 "test.rs": "const A: i32 = 1;",
180 "test2.rs": "",
181 "Cargo.toml": "a = 1",
182 "package.json": "{\"a\": 1}",
183 }),
184 )
185 .await;
186
187 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
188
189 // Open a buffer without an associated language server.
190 let toml_buffer = project
191 .update(cx, |project, cx| {
192 project.open_local_buffer("/the-root/Cargo.toml", cx)
193 })
194 .await
195 .unwrap();
196
197 // Open a buffer with an associated language server before the language for it has been loaded.
198 let rust_buffer = project
199 .update(cx, |project, cx| {
200 project.open_local_buffer("/the-root/test.rs", cx)
201 })
202 .await
203 .unwrap();
204 rust_buffer.read_with(cx, |buffer, _| {
205 assert_eq!(buffer.language().map(|l| l.name()), None);
206 });
207
208 // Now we add the languages to the project, and ensure they get assigned to all
209 // the relevant open buffers.
210 project.update(cx, |project, _| {
211 project.languages.add(Arc::new(json_language));
212 project.languages.add(Arc::new(rust_language));
213 });
214 deterministic.run_until_parked();
215 rust_buffer.read_with(cx, |buffer, _| {
216 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
217 });
218
219 // A server is started up, and it is notified about Rust files.
220 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
221 assert_eq!(
222 fake_rust_server
223 .receive_notification::<lsp::notification::DidOpenTextDocument>()
224 .await
225 .text_document,
226 lsp::TextDocumentItem {
227 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
228 version: 0,
229 text: "const A: i32 = 1;".to_string(),
230 language_id: Default::default()
231 }
232 );
233
234 // The buffer is configured based on the language server's capabilities.
235 rust_buffer.read_with(cx, |buffer, _| {
236 assert_eq!(
237 buffer.completion_triggers(),
238 &[".".to_string(), "::".to_string()]
239 );
240 });
241 toml_buffer.read_with(cx, |buffer, _| {
242 assert!(buffer.completion_triggers().is_empty());
243 });
244
245 // Edit a buffer. The changes are reported to the language server.
246 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
247 assert_eq!(
248 fake_rust_server
249 .receive_notification::<lsp::notification::DidChangeTextDocument>()
250 .await
251 .text_document,
252 lsp::VersionedTextDocumentIdentifier::new(
253 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
254 1
255 )
256 );
257
258 // Open a third buffer with a different associated language server.
259 let json_buffer = project
260 .update(cx, |project, cx| {
261 project.open_local_buffer("/the-root/package.json", cx)
262 })
263 .await
264 .unwrap();
265
266 // A json language server is started up and is only notified about the json buffer.
267 let mut fake_json_server = fake_json_servers.next().await.unwrap();
268 assert_eq!(
269 fake_json_server
270 .receive_notification::<lsp::notification::DidOpenTextDocument>()
271 .await
272 .text_document,
273 lsp::TextDocumentItem {
274 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
275 version: 0,
276 text: "{\"a\": 1}".to_string(),
277 language_id: Default::default()
278 }
279 );
280
281 // This buffer is configured based on the second language server's
282 // capabilities.
283 json_buffer.read_with(cx, |buffer, _| {
284 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
285 });
286
287 // When opening another buffer whose language server is already running,
288 // it is also configured based on the existing language server's capabilities.
289 let rust_buffer2 = project
290 .update(cx, |project, cx| {
291 project.open_local_buffer("/the-root/test2.rs", cx)
292 })
293 .await
294 .unwrap();
295 rust_buffer2.read_with(cx, |buffer, _| {
296 assert_eq!(
297 buffer.completion_triggers(),
298 &[".".to_string(), "::".to_string()]
299 );
300 });
301
302 // Changes are reported only to servers matching the buffer's language.
303 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
304 rust_buffer2.update(cx, |buffer, cx| {
305 buffer.edit([(0..0, "let x = 1;")], None, cx)
306 });
307 assert_eq!(
308 fake_rust_server
309 .receive_notification::<lsp::notification::DidChangeTextDocument>()
310 .await
311 .text_document,
312 lsp::VersionedTextDocumentIdentifier::new(
313 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
314 1
315 )
316 );
317
318 // Save notifications are reported to all servers.
319 project
320 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
321 .await
322 .unwrap();
323 assert_eq!(
324 fake_rust_server
325 .receive_notification::<lsp::notification::DidSaveTextDocument>()
326 .await
327 .text_document,
328 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
329 );
330 assert_eq!(
331 fake_json_server
332 .receive_notification::<lsp::notification::DidSaveTextDocument>()
333 .await
334 .text_document,
335 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
336 );
337
338 // Renames are reported only to servers matching the buffer's language.
339 fs.rename(
340 Path::new("/the-root/test2.rs"),
341 Path::new("/the-root/test3.rs"),
342 Default::default(),
343 )
344 .await
345 .unwrap();
346 assert_eq!(
347 fake_rust_server
348 .receive_notification::<lsp::notification::DidCloseTextDocument>()
349 .await
350 .text_document,
351 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
352 );
353 assert_eq!(
354 fake_rust_server
355 .receive_notification::<lsp::notification::DidOpenTextDocument>()
356 .await
357 .text_document,
358 lsp::TextDocumentItem {
359 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
360 version: 0,
361 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
362 language_id: Default::default()
363 },
364 );
365
366 rust_buffer2.update(cx, |buffer, cx| {
367 buffer.update_diagnostics(
368 LanguageServerId(0),
369 DiagnosticSet::from_sorted_entries(
370 vec![DiagnosticEntry {
371 diagnostic: Default::default(),
372 range: Anchor::MIN..Anchor::MAX,
373 }],
374 &buffer.snapshot(),
375 ),
376 cx,
377 );
378 assert_eq!(
379 buffer
380 .snapshot()
381 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
382 .count(),
383 1
384 );
385 });
386
387 // When the rename changes the extension of the file, the buffer gets closed on the old
388 // language server and gets opened on the new one.
389 fs.rename(
390 Path::new("/the-root/test3.rs"),
391 Path::new("/the-root/test3.json"),
392 Default::default(),
393 )
394 .await
395 .unwrap();
396 assert_eq!(
397 fake_rust_server
398 .receive_notification::<lsp::notification::DidCloseTextDocument>()
399 .await
400 .text_document,
401 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
402 );
403 assert_eq!(
404 fake_json_server
405 .receive_notification::<lsp::notification::DidOpenTextDocument>()
406 .await
407 .text_document,
408 lsp::TextDocumentItem {
409 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
410 version: 0,
411 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
412 language_id: Default::default()
413 },
414 );
415
416 // We clear the diagnostics, since the language has changed.
417 rust_buffer2.read_with(cx, |buffer, _| {
418 assert_eq!(
419 buffer
420 .snapshot()
421 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
422 .count(),
423 0
424 );
425 });
426
427 // The renamed file's version resets after changing language server.
428 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
429 assert_eq!(
430 fake_json_server
431 .receive_notification::<lsp::notification::DidChangeTextDocument>()
432 .await
433 .text_document,
434 lsp::VersionedTextDocumentIdentifier::new(
435 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
436 1
437 )
438 );
439
440 // Restart language servers
441 project.update(cx, |project, cx| {
442 project.restart_language_servers_for_buffers(
443 vec![rust_buffer.clone(), json_buffer.clone()],
444 cx,
445 );
446 });
447
448 let mut rust_shutdown_requests = fake_rust_server
449 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
450 let mut json_shutdown_requests = fake_json_server
451 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
452 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
453
454 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
455 let mut fake_json_server = fake_json_servers.next().await.unwrap();
456
457 // Ensure rust document is reopened in new rust language server
458 assert_eq!(
459 fake_rust_server
460 .receive_notification::<lsp::notification::DidOpenTextDocument>()
461 .await
462 .text_document,
463 lsp::TextDocumentItem {
464 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
465 version: 0,
466 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
467 language_id: Default::default()
468 }
469 );
470
471 // Ensure json documents are reopened in new json language server
472 assert_set_eq!(
473 [
474 fake_json_server
475 .receive_notification::<lsp::notification::DidOpenTextDocument>()
476 .await
477 .text_document,
478 fake_json_server
479 .receive_notification::<lsp::notification::DidOpenTextDocument>()
480 .await
481 .text_document,
482 ],
483 [
484 lsp::TextDocumentItem {
485 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
486 version: 0,
487 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
488 language_id: Default::default()
489 },
490 lsp::TextDocumentItem {
491 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
492 version: 0,
493 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
494 language_id: Default::default()
495 }
496 ]
497 );
498
499 // Close notifications are reported only to servers matching the buffer's language.
500 cx.update(|_| drop(json_buffer));
501 let close_message = lsp::DidCloseTextDocumentParams {
502 text_document: lsp::TextDocumentIdentifier::new(
503 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
504 ),
505 };
506 assert_eq!(
507 fake_json_server
508 .receive_notification::<lsp::notification::DidCloseTextDocument>()
509 .await,
510 close_message,
511 );
512}
513
514#[gpui::test]
515async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
516 init_test(cx);
517
518 let mut language = Language::new(
519 LanguageConfig {
520 name: "Rust".into(),
521 path_suffixes: vec!["rs".to_string()],
522 ..Default::default()
523 },
524 Some(tree_sitter_rust::language()),
525 );
526 let mut fake_servers = language
527 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
528 name: "the-language-server",
529 ..Default::default()
530 }))
531 .await;
532
533 let fs = FakeFs::new(cx.background());
534 fs.insert_tree(
535 "/the-root",
536 json!({
537 ".gitignore": "target\n",
538 "src": {
539 "a.rs": "",
540 "b.rs": "",
541 },
542 "target": {
543 "x": {
544 "out": {
545 "x.rs": ""
546 }
547 },
548 "y": {
549 "out": {
550 "y.rs": "",
551 }
552 },
553 "z": {
554 "out": {
555 "z.rs": ""
556 }
557 }
558 }
559 }),
560 )
561 .await;
562
563 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
564 project.update(cx, |project, _| {
565 project.languages.add(Arc::new(language));
566 });
567 cx.foreground().run_until_parked();
568
569 // Start the language server by opening a buffer with a compatible file extension.
570 let _buffer = project
571 .update(cx, |project, cx| {
572 project.open_local_buffer("/the-root/src/a.rs", cx)
573 })
574 .await
575 .unwrap();
576
577 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
578 project.read_with(cx, |project, cx| {
579 let worktree = project.worktrees(cx).next().unwrap();
580 assert_eq!(
581 worktree
582 .read(cx)
583 .snapshot()
584 .entries(true)
585 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
586 .collect::<Vec<_>>(),
587 &[
588 (Path::new(""), false),
589 (Path::new(".gitignore"), false),
590 (Path::new("src"), false),
591 (Path::new("src/a.rs"), false),
592 (Path::new("src/b.rs"), false),
593 (Path::new("target"), true),
594 ]
595 );
596 });
597
598 let prev_read_dir_count = fs.read_dir_call_count();
599
600 // Keep track of the FS events reported to the language server.
601 let fake_server = fake_servers.next().await.unwrap();
602 let file_changes = Arc::new(Mutex::new(Vec::new()));
603 fake_server
604 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
605 registrations: vec![lsp::Registration {
606 id: Default::default(),
607 method: "workspace/didChangeWatchedFiles".to_string(),
608 register_options: serde_json::to_value(
609 lsp::DidChangeWatchedFilesRegistrationOptions {
610 watchers: vec![
611 lsp::FileSystemWatcher {
612 glob_pattern: lsp::GlobPattern::String(
613 "/the-root/Cargo.toml".to_string(),
614 ),
615 kind: None,
616 },
617 lsp::FileSystemWatcher {
618 glob_pattern: lsp::GlobPattern::String(
619 "/the-root/src/*.{rs,c}".to_string(),
620 ),
621 kind: None,
622 },
623 lsp::FileSystemWatcher {
624 glob_pattern: lsp::GlobPattern::String(
625 "/the-root/target/y/**/*.rs".to_string(),
626 ),
627 kind: None,
628 },
629 ],
630 },
631 )
632 .ok(),
633 }],
634 })
635 .await
636 .unwrap();
637 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
638 let file_changes = file_changes.clone();
639 move |params, _| {
640 let mut file_changes = file_changes.lock();
641 file_changes.extend(params.changes);
642 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
643 }
644 });
645
646 cx.foreground().run_until_parked();
647 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
648 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
649
650 // Now the language server has asked us to watch an ignored directory path,
651 // so we recursively load it.
652 project.read_with(cx, |project, cx| {
653 let worktree = project.worktrees(cx).next().unwrap();
654 assert_eq!(
655 worktree
656 .read(cx)
657 .snapshot()
658 .entries(true)
659 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
660 .collect::<Vec<_>>(),
661 &[
662 (Path::new(""), false),
663 (Path::new(".gitignore"), false),
664 (Path::new("src"), false),
665 (Path::new("src/a.rs"), false),
666 (Path::new("src/b.rs"), false),
667 (Path::new("target"), true),
668 (Path::new("target/x"), true),
669 (Path::new("target/y"), true),
670 (Path::new("target/y/out"), true),
671 (Path::new("target/y/out/y.rs"), true),
672 (Path::new("target/z"), true),
673 ]
674 );
675 });
676
677 // Perform some file system mutations, two of which match the watched patterns,
678 // and one of which does not.
679 fs.create_file("/the-root/src/c.rs".as_ref(), Default::default())
680 .await
681 .unwrap();
682 fs.create_file("/the-root/src/d.txt".as_ref(), Default::default())
683 .await
684 .unwrap();
685 fs.remove_file("/the-root/src/b.rs".as_ref(), Default::default())
686 .await
687 .unwrap();
688 fs.create_file("/the-root/target/x/out/x2.rs".as_ref(), Default::default())
689 .await
690 .unwrap();
691 fs.create_file("/the-root/target/y/out/y2.rs".as_ref(), Default::default())
692 .await
693 .unwrap();
694
695 // The language server receives events for the FS mutations that match its watch patterns.
696 cx.foreground().run_until_parked();
697 assert_eq!(
698 &*file_changes.lock(),
699 &[
700 lsp::FileEvent {
701 uri: lsp::Url::from_file_path("/the-root/src/b.rs").unwrap(),
702 typ: lsp::FileChangeType::DELETED,
703 },
704 lsp::FileEvent {
705 uri: lsp::Url::from_file_path("/the-root/src/c.rs").unwrap(),
706 typ: lsp::FileChangeType::CREATED,
707 },
708 lsp::FileEvent {
709 uri: lsp::Url::from_file_path("/the-root/target/y/out/y2.rs").unwrap(),
710 typ: lsp::FileChangeType::CREATED,
711 },
712 ]
713 );
714}
715
716#[gpui::test]
717async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
718 init_test(cx);
719
720 let fs = FakeFs::new(cx.background());
721 fs.insert_tree(
722 "/dir",
723 json!({
724 "a.rs": "let a = 1;",
725 "b.rs": "let b = 2;"
726 }),
727 )
728 .await;
729
730 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
731
732 let buffer_a = project
733 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
734 .await
735 .unwrap();
736 let buffer_b = project
737 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
738 .await
739 .unwrap();
740
741 project.update(cx, |project, cx| {
742 project
743 .update_diagnostics(
744 LanguageServerId(0),
745 lsp::PublishDiagnosticsParams {
746 uri: Url::from_file_path("/dir/a.rs").unwrap(),
747 version: None,
748 diagnostics: vec![lsp::Diagnostic {
749 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
750 severity: Some(lsp::DiagnosticSeverity::ERROR),
751 message: "error 1".to_string(),
752 ..Default::default()
753 }],
754 },
755 &[],
756 cx,
757 )
758 .unwrap();
759 project
760 .update_diagnostics(
761 LanguageServerId(0),
762 lsp::PublishDiagnosticsParams {
763 uri: Url::from_file_path("/dir/b.rs").unwrap(),
764 version: None,
765 diagnostics: vec![lsp::Diagnostic {
766 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
767 severity: Some(lsp::DiagnosticSeverity::WARNING),
768 message: "error 2".to_string(),
769 ..Default::default()
770 }],
771 },
772 &[],
773 cx,
774 )
775 .unwrap();
776 });
777
778 buffer_a.read_with(cx, |buffer, _| {
779 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
780 assert_eq!(
781 chunks
782 .iter()
783 .map(|(s, d)| (s.as_str(), *d))
784 .collect::<Vec<_>>(),
785 &[
786 ("let ", None),
787 ("a", Some(DiagnosticSeverity::ERROR)),
788 (" = 1;", None),
789 ]
790 );
791 });
792 buffer_b.read_with(cx, |buffer, _| {
793 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
794 assert_eq!(
795 chunks
796 .iter()
797 .map(|(s, d)| (s.as_str(), *d))
798 .collect::<Vec<_>>(),
799 &[
800 ("let ", None),
801 ("b", Some(DiagnosticSeverity::WARNING)),
802 (" = 2;", None),
803 ]
804 );
805 });
806}
807
808#[gpui::test]
809async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
810 init_test(cx);
811
812 let fs = FakeFs::new(cx.background());
813 fs.insert_tree(
814 "/root",
815 json!({
816 "dir": {
817 ".git": {
818 "HEAD": "ref: refs/heads/main",
819 },
820 ".gitignore": "b.rs",
821 "a.rs": "let a = 1;",
822 "b.rs": "let b = 2;",
823 },
824 "other.rs": "let b = c;"
825 }),
826 )
827 .await;
828
829 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
830 let (worktree, _) = project
831 .update(cx, |project, cx| {
832 project.find_or_create_local_worktree("/root/dir", true, cx)
833 })
834 .await
835 .unwrap();
836 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
837
838 let (worktree, _) = project
839 .update(cx, |project, cx| {
840 project.find_or_create_local_worktree("/root/other.rs", false, cx)
841 })
842 .await
843 .unwrap();
844 let other_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
845
846 let server_id = LanguageServerId(0);
847 project.update(cx, |project, cx| {
848 project
849 .update_diagnostics(
850 server_id,
851 lsp::PublishDiagnosticsParams {
852 uri: Url::from_file_path("/root/dir/b.rs").unwrap(),
853 version: None,
854 diagnostics: vec![lsp::Diagnostic {
855 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
856 severity: Some(lsp::DiagnosticSeverity::ERROR),
857 message: "unused variable 'b'".to_string(),
858 ..Default::default()
859 }],
860 },
861 &[],
862 cx,
863 )
864 .unwrap();
865 project
866 .update_diagnostics(
867 server_id,
868 lsp::PublishDiagnosticsParams {
869 uri: Url::from_file_path("/root/other.rs").unwrap(),
870 version: None,
871 diagnostics: vec![lsp::Diagnostic {
872 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
873 severity: Some(lsp::DiagnosticSeverity::ERROR),
874 message: "unknown variable 'c'".to_string(),
875 ..Default::default()
876 }],
877 },
878 &[],
879 cx,
880 )
881 .unwrap();
882 });
883
884 let main_ignored_buffer = project
885 .update(cx, |project, cx| {
886 project.open_buffer((main_worktree_id, "b.rs"), cx)
887 })
888 .await
889 .unwrap();
890 main_ignored_buffer.read_with(cx, |buffer, _| {
891 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
892 assert_eq!(
893 chunks
894 .iter()
895 .map(|(s, d)| (s.as_str(), *d))
896 .collect::<Vec<_>>(),
897 &[
898 ("let ", None),
899 ("b", Some(DiagnosticSeverity::ERROR)),
900 (" = 2;", None),
901 ],
902 "Gigitnored buffers should still get in-buffer diagnostics",
903 );
904 });
905 let other_buffer = project
906 .update(cx, |project, cx| {
907 project.open_buffer((other_worktree_id, ""), cx)
908 })
909 .await
910 .unwrap();
911 other_buffer.read_with(cx, |buffer, _| {
912 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
913 assert_eq!(
914 chunks
915 .iter()
916 .map(|(s, d)| (s.as_str(), *d))
917 .collect::<Vec<_>>(),
918 &[
919 ("let b = ", None),
920 ("c", Some(DiagnosticSeverity::ERROR)),
921 (";", None),
922 ],
923 "Buffers from hidden projects should still get in-buffer diagnostics"
924 );
925 });
926
927 project.read_with(cx, |project, cx| {
928 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
929 assert_eq!(
930 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
931 vec![(
932 ProjectPath {
933 worktree_id: main_worktree_id,
934 path: Arc::from(Path::new("b.rs")),
935 },
936 server_id,
937 DiagnosticSummary {
938 error_count: 1,
939 warning_count: 0,
940 }
941 )]
942 );
943 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
944 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
945 });
946}
947
948#[gpui::test]
949async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
950 init_test(cx);
951
952 let progress_token = "the-progress-token";
953 let mut language = Language::new(
954 LanguageConfig {
955 name: "Rust".into(),
956 path_suffixes: vec!["rs".to_string()],
957 ..Default::default()
958 },
959 Some(tree_sitter_rust::language()),
960 );
961 let mut fake_servers = language
962 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
963 disk_based_diagnostics_progress_token: Some(progress_token.into()),
964 disk_based_diagnostics_sources: vec!["disk".into()],
965 ..Default::default()
966 }))
967 .await;
968
969 let fs = FakeFs::new(cx.background());
970 fs.insert_tree(
971 "/dir",
972 json!({
973 "a.rs": "fn a() { A }",
974 "b.rs": "const y: i32 = 1",
975 }),
976 )
977 .await;
978
979 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
980 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
981 let worktree_id = project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
982
983 // Cause worktree to start the fake language server
984 let _buffer = project
985 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
986 .await
987 .unwrap();
988
989 let mut events = subscribe(&project, cx);
990
991 let fake_server = fake_servers.next().await.unwrap();
992 assert_eq!(
993 events.next().await.unwrap(),
994 Event::LanguageServerAdded(LanguageServerId(0)),
995 );
996
997 fake_server
998 .start_progress(format!("{}/0", progress_token))
999 .await;
1000 assert_eq!(
1001 events.next().await.unwrap(),
1002 Event::DiskBasedDiagnosticsStarted {
1003 language_server_id: LanguageServerId(0),
1004 }
1005 );
1006
1007 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1008 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1009 version: None,
1010 diagnostics: vec![lsp::Diagnostic {
1011 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1012 severity: Some(lsp::DiagnosticSeverity::ERROR),
1013 message: "undefined variable 'A'".to_string(),
1014 ..Default::default()
1015 }],
1016 });
1017 assert_eq!(
1018 events.next().await.unwrap(),
1019 Event::DiagnosticsUpdated {
1020 language_server_id: LanguageServerId(0),
1021 path: (worktree_id, Path::new("a.rs")).into()
1022 }
1023 );
1024
1025 fake_server.end_progress(format!("{}/0", progress_token));
1026 assert_eq!(
1027 events.next().await.unwrap(),
1028 Event::DiskBasedDiagnosticsFinished {
1029 language_server_id: LanguageServerId(0)
1030 }
1031 );
1032
1033 let buffer = project
1034 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
1035 .await
1036 .unwrap();
1037
1038 buffer.read_with(cx, |buffer, _| {
1039 let snapshot = buffer.snapshot();
1040 let diagnostics = snapshot
1041 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1042 .collect::<Vec<_>>();
1043 assert_eq!(
1044 diagnostics,
1045 &[DiagnosticEntry {
1046 range: Point::new(0, 9)..Point::new(0, 10),
1047 diagnostic: Diagnostic {
1048 severity: lsp::DiagnosticSeverity::ERROR,
1049 message: "undefined variable 'A'".to_string(),
1050 group_id: 0,
1051 is_primary: true,
1052 ..Default::default()
1053 }
1054 }]
1055 )
1056 });
1057
1058 // Ensure publishing empty diagnostics twice only results in one update event.
1059 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1060 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1061 version: None,
1062 diagnostics: Default::default(),
1063 });
1064 assert_eq!(
1065 events.next().await.unwrap(),
1066 Event::DiagnosticsUpdated {
1067 language_server_id: LanguageServerId(0),
1068 path: (worktree_id, Path::new("a.rs")).into()
1069 }
1070 );
1071
1072 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1073 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1074 version: None,
1075 diagnostics: Default::default(),
1076 });
1077 cx.foreground().run_until_parked();
1078 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1079}
1080
1081#[gpui::test]
1082async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1083 init_test(cx);
1084
1085 let progress_token = "the-progress-token";
1086 let mut language = Language::new(
1087 LanguageConfig {
1088 path_suffixes: vec!["rs".to_string()],
1089 ..Default::default()
1090 },
1091 None,
1092 );
1093 let mut fake_servers = language
1094 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1095 disk_based_diagnostics_sources: vec!["disk".into()],
1096 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1097 ..Default::default()
1098 }))
1099 .await;
1100
1101 let fs = FakeFs::new(cx.background());
1102 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1103
1104 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1105 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1106
1107 let buffer = project
1108 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1109 .await
1110 .unwrap();
1111
1112 // Simulate diagnostics starting to update.
1113 let fake_server = fake_servers.next().await.unwrap();
1114 fake_server.start_progress(progress_token).await;
1115
1116 // Restart the server before the diagnostics finish updating.
1117 project.update(cx, |project, cx| {
1118 project.restart_language_servers_for_buffers([buffer], cx);
1119 });
1120 let mut events = subscribe(&project, cx);
1121
1122 // Simulate the newly started server sending more diagnostics.
1123 let fake_server = fake_servers.next().await.unwrap();
1124 assert_eq!(
1125 events.next().await.unwrap(),
1126 Event::LanguageServerAdded(LanguageServerId(1))
1127 );
1128 fake_server.start_progress(progress_token).await;
1129 assert_eq!(
1130 events.next().await.unwrap(),
1131 Event::DiskBasedDiagnosticsStarted {
1132 language_server_id: LanguageServerId(1)
1133 }
1134 );
1135 project.read_with(cx, |project, _| {
1136 assert_eq!(
1137 project
1138 .language_servers_running_disk_based_diagnostics()
1139 .collect::<Vec<_>>(),
1140 [LanguageServerId(1)]
1141 );
1142 });
1143
1144 // All diagnostics are considered done, despite the old server's diagnostic
1145 // task never completing.
1146 fake_server.end_progress(progress_token);
1147 assert_eq!(
1148 events.next().await.unwrap(),
1149 Event::DiskBasedDiagnosticsFinished {
1150 language_server_id: LanguageServerId(1)
1151 }
1152 );
1153 project.read_with(cx, |project, _| {
1154 assert_eq!(
1155 project
1156 .language_servers_running_disk_based_diagnostics()
1157 .collect::<Vec<_>>(),
1158 [LanguageServerId(0); 0]
1159 );
1160 });
1161}
1162
1163#[gpui::test]
1164async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1165 init_test(cx);
1166
1167 let mut language = Language::new(
1168 LanguageConfig {
1169 path_suffixes: vec!["rs".to_string()],
1170 ..Default::default()
1171 },
1172 None,
1173 );
1174 let mut fake_servers = language
1175 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1176 ..Default::default()
1177 }))
1178 .await;
1179
1180 let fs = FakeFs::new(cx.background());
1181 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1182
1183 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1184 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1185
1186 let buffer = project
1187 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1188 .await
1189 .unwrap();
1190
1191 // Publish diagnostics
1192 let fake_server = fake_servers.next().await.unwrap();
1193 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1194 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1195 version: None,
1196 diagnostics: vec![lsp::Diagnostic {
1197 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1198 severity: Some(lsp::DiagnosticSeverity::ERROR),
1199 message: "the message".to_string(),
1200 ..Default::default()
1201 }],
1202 });
1203
1204 cx.foreground().run_until_parked();
1205 buffer.read_with(cx, |buffer, _| {
1206 assert_eq!(
1207 buffer
1208 .snapshot()
1209 .diagnostics_in_range::<_, usize>(0..1, false)
1210 .map(|entry| entry.diagnostic.message.clone())
1211 .collect::<Vec<_>>(),
1212 ["the message".to_string()]
1213 );
1214 });
1215 project.read_with(cx, |project, cx| {
1216 assert_eq!(
1217 project.diagnostic_summary(false, cx),
1218 DiagnosticSummary {
1219 error_count: 1,
1220 warning_count: 0,
1221 }
1222 );
1223 });
1224
1225 project.update(cx, |project, cx| {
1226 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1227 });
1228
1229 // The diagnostics are cleared.
1230 cx.foreground().run_until_parked();
1231 buffer.read_with(cx, |buffer, _| {
1232 assert_eq!(
1233 buffer
1234 .snapshot()
1235 .diagnostics_in_range::<_, usize>(0..1, false)
1236 .map(|entry| entry.diagnostic.message.clone())
1237 .collect::<Vec<_>>(),
1238 Vec::<String>::new(),
1239 );
1240 });
1241 project.read_with(cx, |project, cx| {
1242 assert_eq!(
1243 project.diagnostic_summary(false, cx),
1244 DiagnosticSummary {
1245 error_count: 0,
1246 warning_count: 0,
1247 }
1248 );
1249 });
1250}
1251
1252#[gpui::test]
1253async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1254 init_test(cx);
1255
1256 let mut language = Language::new(
1257 LanguageConfig {
1258 path_suffixes: vec!["rs".to_string()],
1259 ..Default::default()
1260 },
1261 None,
1262 );
1263 let mut fake_servers = language
1264 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1265 name: "the-lsp",
1266 ..Default::default()
1267 }))
1268 .await;
1269
1270 let fs = FakeFs::new(cx.background());
1271 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1272
1273 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1274 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1275
1276 let buffer = project
1277 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1278 .await
1279 .unwrap();
1280
1281 // Before restarting the server, report diagnostics with an unknown buffer version.
1282 let fake_server = fake_servers.next().await.unwrap();
1283 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1284 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1285 version: Some(10000),
1286 diagnostics: Vec::new(),
1287 });
1288 cx.foreground().run_until_parked();
1289
1290 project.update(cx, |project, cx| {
1291 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1292 });
1293 let mut fake_server = fake_servers.next().await.unwrap();
1294 let notification = fake_server
1295 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1296 .await
1297 .text_document;
1298 assert_eq!(notification.version, 0);
1299}
1300
1301#[gpui::test]
1302async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1303 init_test(cx);
1304
1305 let mut rust = Language::new(
1306 LanguageConfig {
1307 name: Arc::from("Rust"),
1308 path_suffixes: vec!["rs".to_string()],
1309 ..Default::default()
1310 },
1311 None,
1312 );
1313 let mut fake_rust_servers = rust
1314 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1315 name: "rust-lsp",
1316 ..Default::default()
1317 }))
1318 .await;
1319 let mut js = Language::new(
1320 LanguageConfig {
1321 name: Arc::from("JavaScript"),
1322 path_suffixes: vec!["js".to_string()],
1323 ..Default::default()
1324 },
1325 None,
1326 );
1327 let mut fake_js_servers = js
1328 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1329 name: "js-lsp",
1330 ..Default::default()
1331 }))
1332 .await;
1333
1334 let fs = FakeFs::new(cx.background());
1335 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1336 .await;
1337
1338 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1339 project.update(cx, |project, _| {
1340 project.languages.add(Arc::new(rust));
1341 project.languages.add(Arc::new(js));
1342 });
1343
1344 let _rs_buffer = project
1345 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1346 .await
1347 .unwrap();
1348 let _js_buffer = project
1349 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
1350 .await
1351 .unwrap();
1352
1353 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1354 assert_eq!(
1355 fake_rust_server_1
1356 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1357 .await
1358 .text_document
1359 .uri
1360 .as_str(),
1361 "file:///dir/a.rs"
1362 );
1363
1364 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1365 assert_eq!(
1366 fake_js_server
1367 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1368 .await
1369 .text_document
1370 .uri
1371 .as_str(),
1372 "file:///dir/b.js"
1373 );
1374
1375 // Disable Rust language server, ensuring only that server gets stopped.
1376 cx.update(|cx| {
1377 cx.update_global(|settings: &mut SettingsStore, cx| {
1378 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1379 settings.languages.insert(
1380 Arc::from("Rust"),
1381 LanguageSettingsContent {
1382 enable_language_server: Some(false),
1383 ..Default::default()
1384 },
1385 );
1386 });
1387 })
1388 });
1389 fake_rust_server_1
1390 .receive_notification::<lsp::notification::Exit>()
1391 .await;
1392
1393 // Enable Rust and disable JavaScript language servers, ensuring that the
1394 // former gets started again and that the latter stops.
1395 cx.update(|cx| {
1396 cx.update_global(|settings: &mut SettingsStore, cx| {
1397 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1398 settings.languages.insert(
1399 Arc::from("Rust"),
1400 LanguageSettingsContent {
1401 enable_language_server: Some(true),
1402 ..Default::default()
1403 },
1404 );
1405 settings.languages.insert(
1406 Arc::from("JavaScript"),
1407 LanguageSettingsContent {
1408 enable_language_server: Some(false),
1409 ..Default::default()
1410 },
1411 );
1412 });
1413 })
1414 });
1415 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1416 assert_eq!(
1417 fake_rust_server_2
1418 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1419 .await
1420 .text_document
1421 .uri
1422 .as_str(),
1423 "file:///dir/a.rs"
1424 );
1425 fake_js_server
1426 .receive_notification::<lsp::notification::Exit>()
1427 .await;
1428}
1429
1430#[gpui::test(iterations = 3)]
1431async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1432 init_test(cx);
1433
1434 let mut language = Language::new(
1435 LanguageConfig {
1436 name: "Rust".into(),
1437 path_suffixes: vec!["rs".to_string()],
1438 ..Default::default()
1439 },
1440 Some(tree_sitter_rust::language()),
1441 );
1442 let mut fake_servers = language
1443 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1444 disk_based_diagnostics_sources: vec!["disk".into()],
1445 ..Default::default()
1446 }))
1447 .await;
1448
1449 let text = "
1450 fn a() { A }
1451 fn b() { BB }
1452 fn c() { CCC }
1453 "
1454 .unindent();
1455
1456 let fs = FakeFs::new(cx.background());
1457 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1458
1459 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1460 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1461
1462 let buffer = project
1463 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1464 .await
1465 .unwrap();
1466
1467 let mut fake_server = fake_servers.next().await.unwrap();
1468 let open_notification = fake_server
1469 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1470 .await;
1471
1472 // Edit the buffer, moving the content down
1473 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1474 let change_notification_1 = fake_server
1475 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1476 .await;
1477 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1478
1479 // Report some diagnostics for the initial version of the buffer
1480 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1481 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1482 version: Some(open_notification.text_document.version),
1483 diagnostics: vec![
1484 lsp::Diagnostic {
1485 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1486 severity: Some(DiagnosticSeverity::ERROR),
1487 message: "undefined variable 'A'".to_string(),
1488 source: Some("disk".to_string()),
1489 ..Default::default()
1490 },
1491 lsp::Diagnostic {
1492 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1493 severity: Some(DiagnosticSeverity::ERROR),
1494 message: "undefined variable 'BB'".to_string(),
1495 source: Some("disk".to_string()),
1496 ..Default::default()
1497 },
1498 lsp::Diagnostic {
1499 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1500 severity: Some(DiagnosticSeverity::ERROR),
1501 source: Some("disk".to_string()),
1502 message: "undefined variable 'CCC'".to_string(),
1503 ..Default::default()
1504 },
1505 ],
1506 });
1507
1508 // The diagnostics have moved down since they were created.
1509 buffer.next_notification(cx).await;
1510 cx.foreground().run_until_parked();
1511 buffer.read_with(cx, |buffer, _| {
1512 assert_eq!(
1513 buffer
1514 .snapshot()
1515 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1516 .collect::<Vec<_>>(),
1517 &[
1518 DiagnosticEntry {
1519 range: Point::new(3, 9)..Point::new(3, 11),
1520 diagnostic: Diagnostic {
1521 source: Some("disk".into()),
1522 severity: DiagnosticSeverity::ERROR,
1523 message: "undefined variable 'BB'".to_string(),
1524 is_disk_based: true,
1525 group_id: 1,
1526 is_primary: true,
1527 ..Default::default()
1528 },
1529 },
1530 DiagnosticEntry {
1531 range: Point::new(4, 9)..Point::new(4, 12),
1532 diagnostic: Diagnostic {
1533 source: Some("disk".into()),
1534 severity: DiagnosticSeverity::ERROR,
1535 message: "undefined variable 'CCC'".to_string(),
1536 is_disk_based: true,
1537 group_id: 2,
1538 is_primary: true,
1539 ..Default::default()
1540 }
1541 }
1542 ]
1543 );
1544 assert_eq!(
1545 chunks_with_diagnostics(buffer, 0..buffer.len()),
1546 [
1547 ("\n\nfn a() { ".to_string(), None),
1548 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1549 (" }\nfn b() { ".to_string(), None),
1550 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1551 (" }\nfn c() { ".to_string(), None),
1552 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1553 (" }\n".to_string(), None),
1554 ]
1555 );
1556 assert_eq!(
1557 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1558 [
1559 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1560 (" }\nfn c() { ".to_string(), None),
1561 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1562 ]
1563 );
1564 });
1565
1566 // Ensure overlapping diagnostics are highlighted correctly.
1567 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1568 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1569 version: Some(open_notification.text_document.version),
1570 diagnostics: vec![
1571 lsp::Diagnostic {
1572 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1573 severity: Some(DiagnosticSeverity::ERROR),
1574 message: "undefined variable 'A'".to_string(),
1575 source: Some("disk".to_string()),
1576 ..Default::default()
1577 },
1578 lsp::Diagnostic {
1579 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1580 severity: Some(DiagnosticSeverity::WARNING),
1581 message: "unreachable statement".to_string(),
1582 source: Some("disk".to_string()),
1583 ..Default::default()
1584 },
1585 ],
1586 });
1587
1588 buffer.next_notification(cx).await;
1589 cx.foreground().run_until_parked();
1590 buffer.read_with(cx, |buffer, _| {
1591 assert_eq!(
1592 buffer
1593 .snapshot()
1594 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1595 .collect::<Vec<_>>(),
1596 &[
1597 DiagnosticEntry {
1598 range: Point::new(2, 9)..Point::new(2, 12),
1599 diagnostic: Diagnostic {
1600 source: Some("disk".into()),
1601 severity: DiagnosticSeverity::WARNING,
1602 message: "unreachable statement".to_string(),
1603 is_disk_based: true,
1604 group_id: 4,
1605 is_primary: true,
1606 ..Default::default()
1607 }
1608 },
1609 DiagnosticEntry {
1610 range: Point::new(2, 9)..Point::new(2, 10),
1611 diagnostic: Diagnostic {
1612 source: Some("disk".into()),
1613 severity: DiagnosticSeverity::ERROR,
1614 message: "undefined variable 'A'".to_string(),
1615 is_disk_based: true,
1616 group_id: 3,
1617 is_primary: true,
1618 ..Default::default()
1619 },
1620 }
1621 ]
1622 );
1623 assert_eq!(
1624 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1625 [
1626 ("fn a() { ".to_string(), None),
1627 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1628 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1629 ("\n".to_string(), None),
1630 ]
1631 );
1632 assert_eq!(
1633 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1634 [
1635 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1636 ("\n".to_string(), None),
1637 ]
1638 );
1639 });
1640
1641 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1642 // changes since the last save.
1643 buffer.update(cx, |buffer, cx| {
1644 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1645 buffer.edit(
1646 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1647 None,
1648 cx,
1649 );
1650 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1651 });
1652 let change_notification_2 = fake_server
1653 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1654 .await;
1655 assert!(
1656 change_notification_2.text_document.version > change_notification_1.text_document.version
1657 );
1658
1659 // Handle out-of-order diagnostics
1660 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1661 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1662 version: Some(change_notification_2.text_document.version),
1663 diagnostics: vec![
1664 lsp::Diagnostic {
1665 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1666 severity: Some(DiagnosticSeverity::ERROR),
1667 message: "undefined variable 'BB'".to_string(),
1668 source: Some("disk".to_string()),
1669 ..Default::default()
1670 },
1671 lsp::Diagnostic {
1672 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1673 severity: Some(DiagnosticSeverity::WARNING),
1674 message: "undefined variable 'A'".to_string(),
1675 source: Some("disk".to_string()),
1676 ..Default::default()
1677 },
1678 ],
1679 });
1680
1681 buffer.next_notification(cx).await;
1682 cx.foreground().run_until_parked();
1683 buffer.read_with(cx, |buffer, _| {
1684 assert_eq!(
1685 buffer
1686 .snapshot()
1687 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1688 .collect::<Vec<_>>(),
1689 &[
1690 DiagnosticEntry {
1691 range: Point::new(2, 21)..Point::new(2, 22),
1692 diagnostic: Diagnostic {
1693 source: Some("disk".into()),
1694 severity: DiagnosticSeverity::WARNING,
1695 message: "undefined variable 'A'".to_string(),
1696 is_disk_based: true,
1697 group_id: 6,
1698 is_primary: true,
1699 ..Default::default()
1700 }
1701 },
1702 DiagnosticEntry {
1703 range: Point::new(3, 9)..Point::new(3, 14),
1704 diagnostic: Diagnostic {
1705 source: Some("disk".into()),
1706 severity: DiagnosticSeverity::ERROR,
1707 message: "undefined variable 'BB'".to_string(),
1708 is_disk_based: true,
1709 group_id: 5,
1710 is_primary: true,
1711 ..Default::default()
1712 },
1713 }
1714 ]
1715 );
1716 });
1717}
1718
1719#[gpui::test]
1720async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1721 init_test(cx);
1722
1723 let text = concat!(
1724 "let one = ;\n", //
1725 "let two = \n",
1726 "let three = 3;\n",
1727 );
1728
1729 let fs = FakeFs::new(cx.background());
1730 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1731
1732 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1733 let buffer = project
1734 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1735 .await
1736 .unwrap();
1737
1738 project.update(cx, |project, cx| {
1739 project
1740 .update_buffer_diagnostics(
1741 &buffer,
1742 LanguageServerId(0),
1743 None,
1744 vec![
1745 DiagnosticEntry {
1746 range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 10)),
1747 diagnostic: Diagnostic {
1748 severity: DiagnosticSeverity::ERROR,
1749 message: "syntax error 1".to_string(),
1750 ..Default::default()
1751 },
1752 },
1753 DiagnosticEntry {
1754 range: Unclipped(PointUtf16::new(1, 10))..Unclipped(PointUtf16::new(1, 10)),
1755 diagnostic: Diagnostic {
1756 severity: DiagnosticSeverity::ERROR,
1757 message: "syntax error 2".to_string(),
1758 ..Default::default()
1759 },
1760 },
1761 ],
1762 cx,
1763 )
1764 .unwrap();
1765 });
1766
1767 // An empty range is extended forward to include the following character.
1768 // At the end of a line, an empty range is extended backward to include
1769 // the preceding character.
1770 buffer.read_with(cx, |buffer, _| {
1771 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1772 assert_eq!(
1773 chunks
1774 .iter()
1775 .map(|(s, d)| (s.as_str(), *d))
1776 .collect::<Vec<_>>(),
1777 &[
1778 ("let one = ", None),
1779 (";", Some(DiagnosticSeverity::ERROR)),
1780 ("\nlet two =", None),
1781 (" ", Some(DiagnosticSeverity::ERROR)),
1782 ("\nlet three = 3;\n", None)
1783 ]
1784 );
1785 });
1786}
1787
1788#[gpui::test]
1789async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
1790 init_test(cx);
1791
1792 let fs = FakeFs::new(cx.background());
1793 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
1794 .await;
1795
1796 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1797
1798 project.update(cx, |project, cx| {
1799 project
1800 .update_diagnostic_entries(
1801 LanguageServerId(0),
1802 Path::new("/dir/a.rs").to_owned(),
1803 None,
1804 vec![DiagnosticEntry {
1805 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1806 diagnostic: Diagnostic {
1807 severity: DiagnosticSeverity::ERROR,
1808 is_primary: true,
1809 message: "syntax error a1".to_string(),
1810 ..Default::default()
1811 },
1812 }],
1813 cx,
1814 )
1815 .unwrap();
1816 project
1817 .update_diagnostic_entries(
1818 LanguageServerId(1),
1819 Path::new("/dir/a.rs").to_owned(),
1820 None,
1821 vec![DiagnosticEntry {
1822 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1823 diagnostic: Diagnostic {
1824 severity: DiagnosticSeverity::ERROR,
1825 is_primary: true,
1826 message: "syntax error b1".to_string(),
1827 ..Default::default()
1828 },
1829 }],
1830 cx,
1831 )
1832 .unwrap();
1833
1834 assert_eq!(
1835 project.diagnostic_summary(false, cx),
1836 DiagnosticSummary {
1837 error_count: 2,
1838 warning_count: 0,
1839 }
1840 );
1841 });
1842}
1843
1844#[gpui::test]
1845async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
1846 init_test(cx);
1847
1848 let mut language = Language::new(
1849 LanguageConfig {
1850 name: "Rust".into(),
1851 path_suffixes: vec!["rs".to_string()],
1852 ..Default::default()
1853 },
1854 Some(tree_sitter_rust::language()),
1855 );
1856 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
1857
1858 let text = "
1859 fn a() {
1860 f1();
1861 }
1862 fn b() {
1863 f2();
1864 }
1865 fn c() {
1866 f3();
1867 }
1868 "
1869 .unindent();
1870
1871 let fs = FakeFs::new(cx.background());
1872 fs.insert_tree(
1873 "/dir",
1874 json!({
1875 "a.rs": text.clone(),
1876 }),
1877 )
1878 .await;
1879
1880 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1881 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1882 let buffer = project
1883 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1884 .await
1885 .unwrap();
1886
1887 let mut fake_server = fake_servers.next().await.unwrap();
1888 let lsp_document_version = fake_server
1889 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1890 .await
1891 .text_document
1892 .version;
1893
1894 // Simulate editing the buffer after the language server computes some edits.
1895 buffer.update(cx, |buffer, cx| {
1896 buffer.edit(
1897 [(
1898 Point::new(0, 0)..Point::new(0, 0),
1899 "// above first function\n",
1900 )],
1901 None,
1902 cx,
1903 );
1904 buffer.edit(
1905 [(
1906 Point::new(2, 0)..Point::new(2, 0),
1907 " // inside first function\n",
1908 )],
1909 None,
1910 cx,
1911 );
1912 buffer.edit(
1913 [(
1914 Point::new(6, 4)..Point::new(6, 4),
1915 "// inside second function ",
1916 )],
1917 None,
1918 cx,
1919 );
1920
1921 assert_eq!(
1922 buffer.text(),
1923 "
1924 // above first function
1925 fn a() {
1926 // inside first function
1927 f1();
1928 }
1929 fn b() {
1930 // inside second function f2();
1931 }
1932 fn c() {
1933 f3();
1934 }
1935 "
1936 .unindent()
1937 );
1938 });
1939
1940 let edits = project
1941 .update(cx, |project, cx| {
1942 project.edits_from_lsp(
1943 &buffer,
1944 vec![
1945 // replace body of first function
1946 lsp::TextEdit {
1947 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
1948 new_text: "
1949 fn a() {
1950 f10();
1951 }
1952 "
1953 .unindent(),
1954 },
1955 // edit inside second function
1956 lsp::TextEdit {
1957 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
1958 new_text: "00".into(),
1959 },
1960 // edit inside third function via two distinct edits
1961 lsp::TextEdit {
1962 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
1963 new_text: "4000".into(),
1964 },
1965 lsp::TextEdit {
1966 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
1967 new_text: "".into(),
1968 },
1969 ],
1970 LanguageServerId(0),
1971 Some(lsp_document_version),
1972 cx,
1973 )
1974 })
1975 .await
1976 .unwrap();
1977
1978 buffer.update(cx, |buffer, cx| {
1979 for (range, new_text) in edits {
1980 buffer.edit([(range, new_text)], None, cx);
1981 }
1982 assert_eq!(
1983 buffer.text(),
1984 "
1985 // above first function
1986 fn a() {
1987 // inside first function
1988 f10();
1989 }
1990 fn b() {
1991 // inside second function f200();
1992 }
1993 fn c() {
1994 f4000();
1995 }
1996 "
1997 .unindent()
1998 );
1999 });
2000}
2001
2002#[gpui::test]
2003async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2004 init_test(cx);
2005
2006 let text = "
2007 use a::b;
2008 use a::c;
2009
2010 fn f() {
2011 b();
2012 c();
2013 }
2014 "
2015 .unindent();
2016
2017 let fs = FakeFs::new(cx.background());
2018 fs.insert_tree(
2019 "/dir",
2020 json!({
2021 "a.rs": text.clone(),
2022 }),
2023 )
2024 .await;
2025
2026 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2027 let buffer = project
2028 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2029 .await
2030 .unwrap();
2031
2032 // Simulate the language server sending us a small edit in the form of a very large diff.
2033 // Rust-analyzer does this when performing a merge-imports code action.
2034 let edits = project
2035 .update(cx, |project, cx| {
2036 project.edits_from_lsp(
2037 &buffer,
2038 [
2039 // Replace the first use statement without editing the semicolon.
2040 lsp::TextEdit {
2041 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2042 new_text: "a::{b, c}".into(),
2043 },
2044 // Reinsert the remainder of the file between the semicolon and the final
2045 // newline of the file.
2046 lsp::TextEdit {
2047 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2048 new_text: "\n\n".into(),
2049 },
2050 lsp::TextEdit {
2051 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2052 new_text: "
2053 fn f() {
2054 b();
2055 c();
2056 }"
2057 .unindent(),
2058 },
2059 // Delete everything after the first newline of the file.
2060 lsp::TextEdit {
2061 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2062 new_text: "".into(),
2063 },
2064 ],
2065 LanguageServerId(0),
2066 None,
2067 cx,
2068 )
2069 })
2070 .await
2071 .unwrap();
2072
2073 buffer.update(cx, |buffer, cx| {
2074 let edits = edits
2075 .into_iter()
2076 .map(|(range, text)| {
2077 (
2078 range.start.to_point(buffer)..range.end.to_point(buffer),
2079 text,
2080 )
2081 })
2082 .collect::<Vec<_>>();
2083
2084 assert_eq!(
2085 edits,
2086 [
2087 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2088 (Point::new(1, 0)..Point::new(2, 0), "".into())
2089 ]
2090 );
2091
2092 for (range, new_text) in edits {
2093 buffer.edit([(range, new_text)], None, cx);
2094 }
2095 assert_eq!(
2096 buffer.text(),
2097 "
2098 use a::{b, c};
2099
2100 fn f() {
2101 b();
2102 c();
2103 }
2104 "
2105 .unindent()
2106 );
2107 });
2108}
2109
2110#[gpui::test]
2111async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
2112 init_test(cx);
2113
2114 let text = "
2115 use a::b;
2116 use a::c;
2117
2118 fn f() {
2119 b();
2120 c();
2121 }
2122 "
2123 .unindent();
2124
2125 let fs = FakeFs::new(cx.background());
2126 fs.insert_tree(
2127 "/dir",
2128 json!({
2129 "a.rs": text.clone(),
2130 }),
2131 )
2132 .await;
2133
2134 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2135 let buffer = project
2136 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2137 .await
2138 .unwrap();
2139
2140 // Simulate the language server sending us edits in a non-ordered fashion,
2141 // with ranges sometimes being inverted or pointing to invalid locations.
2142 let edits = project
2143 .update(cx, |project, cx| {
2144 project.edits_from_lsp(
2145 &buffer,
2146 [
2147 lsp::TextEdit {
2148 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2149 new_text: "\n\n".into(),
2150 },
2151 lsp::TextEdit {
2152 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2153 new_text: "a::{b, c}".into(),
2154 },
2155 lsp::TextEdit {
2156 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2157 new_text: "".into(),
2158 },
2159 lsp::TextEdit {
2160 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2161 new_text: "
2162 fn f() {
2163 b();
2164 c();
2165 }"
2166 .unindent(),
2167 },
2168 ],
2169 LanguageServerId(0),
2170 None,
2171 cx,
2172 )
2173 })
2174 .await
2175 .unwrap();
2176
2177 buffer.update(cx, |buffer, cx| {
2178 let edits = edits
2179 .into_iter()
2180 .map(|(range, text)| {
2181 (
2182 range.start.to_point(buffer)..range.end.to_point(buffer),
2183 text,
2184 )
2185 })
2186 .collect::<Vec<_>>();
2187
2188 assert_eq!(
2189 edits,
2190 [
2191 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2192 (Point::new(1, 0)..Point::new(2, 0), "".into())
2193 ]
2194 );
2195
2196 for (range, new_text) in edits {
2197 buffer.edit([(range, new_text)], None, cx);
2198 }
2199 assert_eq!(
2200 buffer.text(),
2201 "
2202 use a::{b, c};
2203
2204 fn f() {
2205 b();
2206 c();
2207 }
2208 "
2209 .unindent()
2210 );
2211 });
2212}
2213
2214fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2215 buffer: &Buffer,
2216 range: Range<T>,
2217) -> Vec<(String, Option<DiagnosticSeverity>)> {
2218 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2219 for chunk in buffer.snapshot().chunks(range, true) {
2220 if chunks.last().map_or(false, |prev_chunk| {
2221 prev_chunk.1 == chunk.diagnostic_severity
2222 }) {
2223 chunks.last_mut().unwrap().0.push_str(chunk.text);
2224 } else {
2225 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2226 }
2227 }
2228 chunks
2229}
2230
2231#[gpui::test(iterations = 10)]
2232async fn test_definition(cx: &mut gpui::TestAppContext) {
2233 init_test(cx);
2234
2235 let mut language = Language::new(
2236 LanguageConfig {
2237 name: "Rust".into(),
2238 path_suffixes: vec!["rs".to_string()],
2239 ..Default::default()
2240 },
2241 Some(tree_sitter_rust::language()),
2242 );
2243 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
2244
2245 let fs = FakeFs::new(cx.background());
2246 fs.insert_tree(
2247 "/dir",
2248 json!({
2249 "a.rs": "const fn a() { A }",
2250 "b.rs": "const y: i32 = crate::a()",
2251 }),
2252 )
2253 .await;
2254
2255 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2256 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2257
2258 let buffer = project
2259 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
2260 .await
2261 .unwrap();
2262
2263 let fake_server = fake_servers.next().await.unwrap();
2264 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2265 let params = params.text_document_position_params;
2266 assert_eq!(
2267 params.text_document.uri.to_file_path().unwrap(),
2268 Path::new("/dir/b.rs"),
2269 );
2270 assert_eq!(params.position, lsp::Position::new(0, 22));
2271
2272 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2273 lsp::Location::new(
2274 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
2275 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2276 ),
2277 )))
2278 });
2279
2280 let mut definitions = project
2281 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2282 .await
2283 .unwrap();
2284
2285 // Assert no new language server started
2286 cx.foreground().run_until_parked();
2287 assert!(fake_servers.try_next().is_err());
2288
2289 assert_eq!(definitions.len(), 1);
2290 let definition = definitions.pop().unwrap();
2291 cx.update(|cx| {
2292 let target_buffer = definition.target.buffer.read(cx);
2293 assert_eq!(
2294 target_buffer
2295 .file()
2296 .unwrap()
2297 .as_local()
2298 .unwrap()
2299 .abs_path(cx),
2300 Path::new("/dir/a.rs"),
2301 );
2302 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2303 assert_eq!(
2304 list_worktrees(&project, cx),
2305 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
2306 );
2307
2308 drop(definition);
2309 });
2310 cx.read(|cx| {
2311 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2312 });
2313
2314 fn list_worktrees<'a>(
2315 project: &'a ModelHandle<Project>,
2316 cx: &'a AppContext,
2317 ) -> Vec<(&'a Path, bool)> {
2318 project
2319 .read(cx)
2320 .worktrees(cx)
2321 .map(|worktree| {
2322 let worktree = worktree.read(cx);
2323 (
2324 worktree.as_local().unwrap().abs_path().as_ref(),
2325 worktree.is_visible(),
2326 )
2327 })
2328 .collect::<Vec<_>>()
2329 }
2330}
2331
2332#[gpui::test]
2333async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2334 init_test(cx);
2335
2336 let mut language = Language::new(
2337 LanguageConfig {
2338 name: "TypeScript".into(),
2339 path_suffixes: vec!["ts".to_string()],
2340 ..Default::default()
2341 },
2342 Some(tree_sitter_typescript::language_typescript()),
2343 );
2344 let mut fake_language_servers = language
2345 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
2346 capabilities: lsp::ServerCapabilities {
2347 completion_provider: Some(lsp::CompletionOptions {
2348 trigger_characters: Some(vec![":".to_string()]),
2349 ..Default::default()
2350 }),
2351 ..Default::default()
2352 },
2353 ..Default::default()
2354 }))
2355 .await;
2356
2357 let fs = FakeFs::new(cx.background());
2358 fs.insert_tree(
2359 "/dir",
2360 json!({
2361 "a.ts": "",
2362 }),
2363 )
2364 .await;
2365
2366 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2367 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2368 let buffer = project
2369 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2370 .await
2371 .unwrap();
2372
2373 let fake_server = fake_language_servers.next().await.unwrap();
2374
2375 let text = "let a = b.fqn";
2376 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2377 let completions = project.update(cx, |project, cx| {
2378 project.completions(&buffer, text.len(), cx)
2379 });
2380
2381 fake_server
2382 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2383 Ok(Some(lsp::CompletionResponse::Array(vec![
2384 lsp::CompletionItem {
2385 label: "fullyQualifiedName?".into(),
2386 insert_text: Some("fullyQualifiedName".into()),
2387 ..Default::default()
2388 },
2389 ])))
2390 })
2391 .next()
2392 .await;
2393 let completions = completions.await.unwrap();
2394 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
2395 assert_eq!(completions.len(), 1);
2396 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2397 assert_eq!(
2398 completions[0].old_range.to_offset(&snapshot),
2399 text.len() - 3..text.len()
2400 );
2401
2402 let text = "let a = \"atoms/cmp\"";
2403 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2404 let completions = project.update(cx, |project, cx| {
2405 project.completions(&buffer, text.len() - 1, cx)
2406 });
2407
2408 fake_server
2409 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2410 Ok(Some(lsp::CompletionResponse::Array(vec![
2411 lsp::CompletionItem {
2412 label: "component".into(),
2413 ..Default::default()
2414 },
2415 ])))
2416 })
2417 .next()
2418 .await;
2419 let completions = completions.await.unwrap();
2420 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
2421 assert_eq!(completions.len(), 1);
2422 assert_eq!(completions[0].new_text, "component");
2423 assert_eq!(
2424 completions[0].old_range.to_offset(&snapshot),
2425 text.len() - 4..text.len() - 1
2426 );
2427}
2428
2429#[gpui::test]
2430async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2431 init_test(cx);
2432
2433 let mut language = Language::new(
2434 LanguageConfig {
2435 name: "TypeScript".into(),
2436 path_suffixes: vec!["ts".to_string()],
2437 ..Default::default()
2438 },
2439 Some(tree_sitter_typescript::language_typescript()),
2440 );
2441 let mut fake_language_servers = language
2442 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
2443 capabilities: lsp::ServerCapabilities {
2444 completion_provider: Some(lsp::CompletionOptions {
2445 trigger_characters: Some(vec![":".to_string()]),
2446 ..Default::default()
2447 }),
2448 ..Default::default()
2449 },
2450 ..Default::default()
2451 }))
2452 .await;
2453
2454 let fs = FakeFs::new(cx.background());
2455 fs.insert_tree(
2456 "/dir",
2457 json!({
2458 "a.ts": "",
2459 }),
2460 )
2461 .await;
2462
2463 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2464 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2465 let buffer = project
2466 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2467 .await
2468 .unwrap();
2469
2470 let fake_server = fake_language_servers.next().await.unwrap();
2471
2472 let text = "let a = b.fqn";
2473 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2474 let completions = project.update(cx, |project, cx| {
2475 project.completions(&buffer, text.len(), cx)
2476 });
2477
2478 fake_server
2479 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2480 Ok(Some(lsp::CompletionResponse::Array(vec![
2481 lsp::CompletionItem {
2482 label: "fullyQualifiedName?".into(),
2483 insert_text: Some("fully\rQualified\r\nName".into()),
2484 ..Default::default()
2485 },
2486 ])))
2487 })
2488 .next()
2489 .await;
2490 let completions = completions.await.unwrap();
2491 assert_eq!(completions.len(), 1);
2492 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2493}
2494
2495#[gpui::test(iterations = 10)]
2496async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2497 init_test(cx);
2498
2499 let mut language = Language::new(
2500 LanguageConfig {
2501 name: "TypeScript".into(),
2502 path_suffixes: vec!["ts".to_string()],
2503 ..Default::default()
2504 },
2505 None,
2506 );
2507 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
2508
2509 let fs = FakeFs::new(cx.background());
2510 fs.insert_tree(
2511 "/dir",
2512 json!({
2513 "a.ts": "a",
2514 }),
2515 )
2516 .await;
2517
2518 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2519 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2520 let buffer = project
2521 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2522 .await
2523 .unwrap();
2524
2525 let fake_server = fake_language_servers.next().await.unwrap();
2526
2527 // Language server returns code actions that contain commands, and not edits.
2528 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
2529 fake_server
2530 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2531 Ok(Some(vec![
2532 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2533 title: "The code action".into(),
2534 command: Some(lsp::Command {
2535 title: "The command".into(),
2536 command: "_the/command".into(),
2537 arguments: Some(vec![json!("the-argument")]),
2538 }),
2539 ..Default::default()
2540 }),
2541 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2542 title: "two".into(),
2543 ..Default::default()
2544 }),
2545 ]))
2546 })
2547 .next()
2548 .await;
2549
2550 let action = actions.await.unwrap()[0].clone();
2551 let apply = project.update(cx, |project, cx| {
2552 project.apply_code_action(buffer.clone(), action, true, cx)
2553 });
2554
2555 // Resolving the code action does not populate its edits. In absence of
2556 // edits, we must execute the given command.
2557 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2558 |action, _| async move { Ok(action) },
2559 );
2560
2561 // While executing the command, the language server sends the editor
2562 // a `workspaceEdit` request.
2563 fake_server
2564 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2565 let fake = fake_server.clone();
2566 move |params, _| {
2567 assert_eq!(params.command, "_the/command");
2568 let fake = fake.clone();
2569 async move {
2570 fake.server
2571 .request::<lsp::request::ApplyWorkspaceEdit>(
2572 lsp::ApplyWorkspaceEditParams {
2573 label: None,
2574 edit: lsp::WorkspaceEdit {
2575 changes: Some(
2576 [(
2577 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2578 vec![lsp::TextEdit {
2579 range: lsp::Range::new(
2580 lsp::Position::new(0, 0),
2581 lsp::Position::new(0, 0),
2582 ),
2583 new_text: "X".into(),
2584 }],
2585 )]
2586 .into_iter()
2587 .collect(),
2588 ),
2589 ..Default::default()
2590 },
2591 },
2592 )
2593 .await
2594 .unwrap();
2595 Ok(Some(json!(null)))
2596 }
2597 }
2598 })
2599 .next()
2600 .await;
2601
2602 // Applying the code action returns a project transaction containing the edits
2603 // sent by the language server in its `workspaceEdit` request.
2604 let transaction = apply.await.unwrap();
2605 assert!(transaction.0.contains_key(&buffer));
2606 buffer.update(cx, |buffer, cx| {
2607 assert_eq!(buffer.text(), "Xa");
2608 buffer.undo(cx);
2609 assert_eq!(buffer.text(), "a");
2610 });
2611}
2612
2613#[gpui::test(iterations = 10)]
2614async fn test_save_file(cx: &mut gpui::TestAppContext) {
2615 init_test(cx);
2616
2617 let fs = FakeFs::new(cx.background());
2618 fs.insert_tree(
2619 "/dir",
2620 json!({
2621 "file1": "the old contents",
2622 }),
2623 )
2624 .await;
2625
2626 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2627 let buffer = project
2628 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2629 .await
2630 .unwrap();
2631 buffer.update(cx, |buffer, cx| {
2632 assert_eq!(buffer.text(), "the old contents");
2633 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2634 });
2635
2636 project
2637 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2638 .await
2639 .unwrap();
2640
2641 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2642 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2643}
2644
2645#[gpui::test]
2646async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2647 init_test(cx);
2648
2649 let fs = FakeFs::new(cx.background());
2650 fs.insert_tree(
2651 "/dir",
2652 json!({
2653 "file1": "the old contents",
2654 }),
2655 )
2656 .await;
2657
2658 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2659 let buffer = project
2660 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2661 .await
2662 .unwrap();
2663 buffer.update(cx, |buffer, cx| {
2664 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2665 });
2666
2667 project
2668 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2669 .await
2670 .unwrap();
2671
2672 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2673 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2674}
2675
2676// #[gpui::test]
2677// async fn test_save_as(cx: &mut gpui::TestAppContext) {
2678// init_test(cx);
2679
2680// let fs = FakeFs::new(cx.background());
2681// fs.insert_tree("/dir", json!({})).await;
2682
2683// let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2684
2685// let languages = project.read_with(cx, |project, _| project.languages().clone());
2686// languages.register(
2687// "/some/path",
2688// LanguageConfig {
2689// name: "Rust".into(),
2690// path_suffixes: vec!["rs".into()],
2691// ..Default::default()
2692// },
2693// tree_sitter_rust::language(),
2694// vec![],
2695// |_| Default::default(),
2696// );
2697
2698// let buffer = project.update(cx, |project, cx| {
2699// project.create_buffer("", None, cx).unwrap()
2700// });
2701// buffer.update(cx, |buffer, cx| {
2702// buffer.edit([(0..0, "abc")], None, cx);
2703// assert!(buffer.is_dirty());
2704// assert!(!buffer.has_conflict());
2705// assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text");
2706// });
2707// project
2708// .update(cx, |project, cx| {
2709// project.save_buffer_as(buffer.clone(), "/dir/file1.rs".into(), cx)
2710// })
2711// .await
2712// .unwrap();
2713// assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
2714
2715// cx.foreground().run_until_parked();
2716// buffer.read_with(cx, |buffer, cx| {
2717// assert_eq!(
2718// buffer.file().unwrap().full_path(cx),
2719// Path::new("dir/file1.rs")
2720// );
2721// assert!(!buffer.is_dirty());
2722// assert!(!buffer.has_conflict());
2723// assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust");
2724// });
2725
2726// let opened_buffer = project
2727// .update(cx, |project, cx| {
2728// project.open_local_buffer("/dir/file1.rs", cx)
2729// })
2730// .await
2731// .unwrap();
2732// assert_eq!(opened_buffer, buffer);
2733// }
2734
2735#[gpui::test(retries = 5)]
2736async fn test_rescan_and_remote_updates(
2737 deterministic: Arc<Deterministic>,
2738 cx: &mut gpui::TestAppContext,
2739) {
2740 init_test(cx);
2741 cx.foreground().allow_parking();
2742
2743 let dir = temp_tree(json!({
2744 "a": {
2745 "file1": "",
2746 "file2": "",
2747 "file3": "",
2748 },
2749 "b": {
2750 "c": {
2751 "file4": "",
2752 "file5": "",
2753 }
2754 }
2755 }));
2756
2757 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
2758 let rpc = project.read_with(cx, |p, _| p.client.clone());
2759
2760 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2761 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
2762 async move { buffer.await.unwrap() }
2763 };
2764 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
2765 project.read_with(cx, |project, cx| {
2766 let tree = project.worktrees(cx).next().unwrap();
2767 tree.read(cx)
2768 .entry_for_path(path)
2769 .unwrap_or_else(|| panic!("no entry for path {}", path))
2770 .id
2771 })
2772 };
2773
2774 let buffer2 = buffer_for_path("a/file2", cx).await;
2775 let buffer3 = buffer_for_path("a/file3", cx).await;
2776 let buffer4 = buffer_for_path("b/c/file4", cx).await;
2777 let buffer5 = buffer_for_path("b/c/file5", cx).await;
2778
2779 let file2_id = id_for_path("a/file2", cx);
2780 let file3_id = id_for_path("a/file3", cx);
2781 let file4_id = id_for_path("b/c/file4", cx);
2782
2783 // Create a remote copy of this worktree.
2784 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2785
2786 let metadata = tree.read_with(cx, |tree, _| tree.as_local().unwrap().metadata_proto());
2787
2788 let updates = Arc::new(Mutex::new(Vec::new()));
2789 tree.update(cx, |tree, cx| {
2790 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
2791 let updates = updates.clone();
2792 move |update| {
2793 updates.lock().push(update);
2794 async { true }
2795 }
2796 });
2797 });
2798
2799 let remote = cx.update(|cx| Worktree::remote(1, 1, metadata, rpc.clone(), cx));
2800 deterministic.run_until_parked();
2801
2802 cx.read(|cx| {
2803 assert!(!buffer2.read(cx).is_dirty());
2804 assert!(!buffer3.read(cx).is_dirty());
2805 assert!(!buffer4.read(cx).is_dirty());
2806 assert!(!buffer5.read(cx).is_dirty());
2807 });
2808
2809 // Rename and delete files and directories.
2810 tree.flush_fs_events(cx).await;
2811 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
2812 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
2813 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
2814 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
2815 tree.flush_fs_events(cx).await;
2816
2817 let expected_paths = vec![
2818 "a",
2819 "a/file1",
2820 "a/file2.new",
2821 "b",
2822 "d",
2823 "d/file3",
2824 "d/file4",
2825 ];
2826
2827 cx.read(|app| {
2828 assert_eq!(
2829 tree.read(app)
2830 .paths()
2831 .map(|p| p.to_str().unwrap())
2832 .collect::<Vec<_>>(),
2833 expected_paths
2834 );
2835
2836 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
2837 assert_eq!(id_for_path("d/file3", cx), file3_id);
2838 assert_eq!(id_for_path("d/file4", cx), file4_id);
2839
2840 assert_eq!(
2841 buffer2.read(app).file().unwrap().path().as_ref(),
2842 Path::new("a/file2.new")
2843 );
2844 assert_eq!(
2845 buffer3.read(app).file().unwrap().path().as_ref(),
2846 Path::new("d/file3")
2847 );
2848 assert_eq!(
2849 buffer4.read(app).file().unwrap().path().as_ref(),
2850 Path::new("d/file4")
2851 );
2852 assert_eq!(
2853 buffer5.read(app).file().unwrap().path().as_ref(),
2854 Path::new("b/c/file5")
2855 );
2856
2857 assert!(!buffer2.read(app).file().unwrap().is_deleted());
2858 assert!(!buffer3.read(app).file().unwrap().is_deleted());
2859 assert!(!buffer4.read(app).file().unwrap().is_deleted());
2860 assert!(buffer5.read(app).file().unwrap().is_deleted());
2861 });
2862
2863 // Update the remote worktree. Check that it becomes consistent with the
2864 // local worktree.
2865 deterministic.run_until_parked();
2866 remote.update(cx, |remote, _| {
2867 for update in updates.lock().drain(..) {
2868 remote.as_remote_mut().unwrap().update_from_remote(update);
2869 }
2870 });
2871 deterministic.run_until_parked();
2872 remote.read_with(cx, |remote, _| {
2873 assert_eq!(
2874 remote
2875 .paths()
2876 .map(|p| p.to_str().unwrap())
2877 .collect::<Vec<_>>(),
2878 expected_paths
2879 );
2880 });
2881}
2882
2883#[gpui::test(iterations = 10)]
2884async fn test_buffer_identity_across_renames(
2885 deterministic: Arc<Deterministic>,
2886 cx: &mut gpui::TestAppContext,
2887) {
2888 init_test(cx);
2889
2890 let fs = FakeFs::new(cx.background());
2891 fs.insert_tree(
2892 "/dir",
2893 json!({
2894 "a": {
2895 "file1": "",
2896 }
2897 }),
2898 )
2899 .await;
2900
2901 let project = Project::test(fs, [Path::new("/dir")], cx).await;
2902 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2903 let tree_id = tree.read_with(cx, |tree, _| tree.id());
2904
2905 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
2906 project.read_with(cx, |project, cx| {
2907 let tree = project.worktrees(cx).next().unwrap();
2908 tree.read(cx)
2909 .entry_for_path(path)
2910 .unwrap_or_else(|| panic!("no entry for path {}", path))
2911 .id
2912 })
2913 };
2914
2915 let dir_id = id_for_path("a", cx);
2916 let file_id = id_for_path("a/file1", cx);
2917 let buffer = project
2918 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
2919 .await
2920 .unwrap();
2921 buffer.read_with(cx, |buffer, _| assert!(!buffer.is_dirty()));
2922
2923 project
2924 .update(cx, |project, cx| {
2925 project.rename_entry(dir_id, Path::new("b"), cx)
2926 })
2927 .unwrap()
2928 .await
2929 .unwrap();
2930 deterministic.run_until_parked();
2931 assert_eq!(id_for_path("b", cx), dir_id);
2932 assert_eq!(id_for_path("b/file1", cx), file_id);
2933 buffer.read_with(cx, |buffer, _| assert!(!buffer.is_dirty()));
2934}
2935
2936#[gpui::test]
2937async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
2938 init_test(cx);
2939
2940 let fs = FakeFs::new(cx.background());
2941 fs.insert_tree(
2942 "/dir",
2943 json!({
2944 "a.txt": "a-contents",
2945 "b.txt": "b-contents",
2946 }),
2947 )
2948 .await;
2949
2950 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2951
2952 // Spawn multiple tasks to open paths, repeating some paths.
2953 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
2954 (
2955 p.open_local_buffer("/dir/a.txt", cx),
2956 p.open_local_buffer("/dir/b.txt", cx),
2957 p.open_local_buffer("/dir/a.txt", cx),
2958 )
2959 });
2960
2961 let buffer_a_1 = buffer_a_1.await.unwrap();
2962 let buffer_a_2 = buffer_a_2.await.unwrap();
2963 let buffer_b = buffer_b.await.unwrap();
2964 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
2965 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
2966
2967 // There is only one buffer per path.
2968 let buffer_a_id = buffer_a_1.id();
2969 assert_eq!(buffer_a_2.id(), buffer_a_id);
2970
2971 // Open the same path again while it is still open.
2972 drop(buffer_a_1);
2973 let buffer_a_3 = project
2974 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
2975 .await
2976 .unwrap();
2977
2978 // There's still only one buffer per path.
2979 assert_eq!(buffer_a_3.id(), buffer_a_id);
2980}
2981
2982#[gpui::test]
2983async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
2984 init_test(cx);
2985
2986 let fs = FakeFs::new(cx.background());
2987 fs.insert_tree(
2988 "/dir",
2989 json!({
2990 "file1": "abc",
2991 "file2": "def",
2992 "file3": "ghi",
2993 }),
2994 )
2995 .await;
2996
2997 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2998
2999 let buffer1 = project
3000 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3001 .await
3002 .unwrap();
3003 let events = Rc::new(RefCell::new(Vec::new()));
3004
3005 // initially, the buffer isn't dirty.
3006 buffer1.update(cx, |buffer, cx| {
3007 cx.subscribe(&buffer1, {
3008 let events = events.clone();
3009 move |_, _, event, _| match event {
3010 BufferEvent::Operation(_) => {}
3011 _ => events.borrow_mut().push(event.clone()),
3012 }
3013 })
3014 .detach();
3015
3016 assert!(!buffer.is_dirty());
3017 assert!(events.borrow().is_empty());
3018
3019 buffer.edit([(1..2, "")], None, cx);
3020 });
3021
3022 // after the first edit, the buffer is dirty, and emits a dirtied event.
3023 buffer1.update(cx, |buffer, cx| {
3024 assert!(buffer.text() == "ac");
3025 assert!(buffer.is_dirty());
3026 assert_eq!(
3027 *events.borrow(),
3028 &[language::Event::Edited, language::Event::DirtyChanged]
3029 );
3030 events.borrow_mut().clear();
3031 buffer.did_save(
3032 buffer.version(),
3033 buffer.as_rope().fingerprint(),
3034 buffer.file().unwrap().mtime(),
3035 cx,
3036 );
3037 });
3038
3039 // after saving, the buffer is not dirty, and emits a saved event.
3040 buffer1.update(cx, |buffer, cx| {
3041 assert!(!buffer.is_dirty());
3042 assert_eq!(*events.borrow(), &[language::Event::Saved]);
3043 events.borrow_mut().clear();
3044
3045 buffer.edit([(1..1, "B")], None, cx);
3046 buffer.edit([(2..2, "D")], None, cx);
3047 });
3048
3049 // after editing again, the buffer is dirty, and emits another dirty event.
3050 buffer1.update(cx, |buffer, cx| {
3051 assert!(buffer.text() == "aBDc");
3052 assert!(buffer.is_dirty());
3053 assert_eq!(
3054 *events.borrow(),
3055 &[
3056 language::Event::Edited,
3057 language::Event::DirtyChanged,
3058 language::Event::Edited,
3059 ],
3060 );
3061 events.borrow_mut().clear();
3062
3063 // After restoring the buffer to its previously-saved state,
3064 // the buffer is not considered dirty anymore.
3065 buffer.edit([(1..3, "")], None, cx);
3066 assert!(buffer.text() == "ac");
3067 assert!(!buffer.is_dirty());
3068 });
3069
3070 assert_eq!(
3071 *events.borrow(),
3072 &[language::Event::Edited, language::Event::DirtyChanged]
3073 );
3074
3075 // When a file is deleted, the buffer is considered dirty.
3076 let events = Rc::new(RefCell::new(Vec::new()));
3077 let buffer2 = project
3078 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3079 .await
3080 .unwrap();
3081 buffer2.update(cx, |_, cx| {
3082 cx.subscribe(&buffer2, {
3083 let events = events.clone();
3084 move |_, _, event, _| events.borrow_mut().push(event.clone())
3085 })
3086 .detach();
3087 });
3088
3089 fs.remove_file("/dir/file2".as_ref(), Default::default())
3090 .await
3091 .unwrap();
3092 cx.foreground().run_until_parked();
3093 buffer2.read_with(cx, |buffer, _| assert!(buffer.is_dirty()));
3094 assert_eq!(
3095 *events.borrow(),
3096 &[
3097 language::Event::DirtyChanged,
3098 language::Event::FileHandleChanged
3099 ]
3100 );
3101
3102 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3103 let events = Rc::new(RefCell::new(Vec::new()));
3104 let buffer3 = project
3105 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
3106 .await
3107 .unwrap();
3108 buffer3.update(cx, |_, cx| {
3109 cx.subscribe(&buffer3, {
3110 let events = events.clone();
3111 move |_, _, event, _| events.borrow_mut().push(event.clone())
3112 })
3113 .detach();
3114 });
3115
3116 buffer3.update(cx, |buffer, cx| {
3117 buffer.edit([(0..0, "x")], None, cx);
3118 });
3119 events.borrow_mut().clear();
3120 fs.remove_file("/dir/file3".as_ref(), Default::default())
3121 .await
3122 .unwrap();
3123 cx.foreground().run_until_parked();
3124 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
3125 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
3126}
3127
3128#[gpui::test]
3129async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3130 init_test(cx);
3131
3132 let initial_contents = "aaa\nbbbbb\nc\n";
3133 let fs = FakeFs::new(cx.background());
3134 fs.insert_tree(
3135 "/dir",
3136 json!({
3137 "the-file": initial_contents,
3138 }),
3139 )
3140 .await;
3141 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3142 let buffer = project
3143 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
3144 .await
3145 .unwrap();
3146
3147 let anchors = (0..3)
3148 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
3149 .collect::<Vec<_>>();
3150
3151 // Change the file on disk, adding two new lines of text, and removing
3152 // one line.
3153 buffer.read_with(cx, |buffer, _| {
3154 assert!(!buffer.is_dirty());
3155 assert!(!buffer.has_conflict());
3156 });
3157 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3158 fs.save(
3159 "/dir/the-file".as_ref(),
3160 &new_contents.into(),
3161 LineEnding::Unix,
3162 )
3163 .await
3164 .unwrap();
3165
3166 // Because the buffer was not modified, it is reloaded from disk. Its
3167 // contents are edited according to the diff between the old and new
3168 // file contents.
3169 cx.foreground().run_until_parked();
3170 buffer.update(cx, |buffer, _| {
3171 assert_eq!(buffer.text(), new_contents);
3172 assert!(!buffer.is_dirty());
3173 assert!(!buffer.has_conflict());
3174
3175 let anchor_positions = anchors
3176 .iter()
3177 .map(|anchor| anchor.to_point(&*buffer))
3178 .collect::<Vec<_>>();
3179 assert_eq!(
3180 anchor_positions,
3181 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
3182 );
3183 });
3184
3185 // Modify the buffer
3186 buffer.update(cx, |buffer, cx| {
3187 buffer.edit([(0..0, " ")], None, cx);
3188 assert!(buffer.is_dirty());
3189 assert!(!buffer.has_conflict());
3190 });
3191
3192 // Change the file on disk again, adding blank lines to the beginning.
3193 fs.save(
3194 "/dir/the-file".as_ref(),
3195 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3196 LineEnding::Unix,
3197 )
3198 .await
3199 .unwrap();
3200
3201 // Because the buffer is modified, it doesn't reload from disk, but is
3202 // marked as having a conflict.
3203 cx.foreground().run_until_parked();
3204 buffer.read_with(cx, |buffer, _| {
3205 assert!(buffer.has_conflict());
3206 });
3207}
3208
3209#[gpui::test]
3210async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3211 init_test(cx);
3212
3213 let fs = FakeFs::new(cx.background());
3214 fs.insert_tree(
3215 "/dir",
3216 json!({
3217 "file1": "a\nb\nc\n",
3218 "file2": "one\r\ntwo\r\nthree\r\n",
3219 }),
3220 )
3221 .await;
3222
3223 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3224 let buffer1 = project
3225 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3226 .await
3227 .unwrap();
3228 let buffer2 = project
3229 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3230 .await
3231 .unwrap();
3232
3233 buffer1.read_with(cx, |buffer, _| {
3234 assert_eq!(buffer.text(), "a\nb\nc\n");
3235 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3236 });
3237 buffer2.read_with(cx, |buffer, _| {
3238 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3239 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3240 });
3241
3242 // Change a file's line endings on disk from unix to windows. The buffer's
3243 // state updates correctly.
3244 fs.save(
3245 "/dir/file1".as_ref(),
3246 &"aaa\nb\nc\n".into(),
3247 LineEnding::Windows,
3248 )
3249 .await
3250 .unwrap();
3251 cx.foreground().run_until_parked();
3252 buffer1.read_with(cx, |buffer, _| {
3253 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3254 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3255 });
3256
3257 // Save a file with windows line endings. The file is written correctly.
3258 buffer2.update(cx, |buffer, cx| {
3259 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3260 });
3261 project
3262 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3263 .await
3264 .unwrap();
3265 assert_eq!(
3266 fs.load("/dir/file2".as_ref()).await.unwrap(),
3267 "one\r\ntwo\r\nthree\r\nfour\r\n",
3268 );
3269}
3270
3271#[gpui::test]
3272async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3273 init_test(cx);
3274
3275 let fs = FakeFs::new(cx.background());
3276 fs.insert_tree(
3277 "/the-dir",
3278 json!({
3279 "a.rs": "
3280 fn foo(mut v: Vec<usize>) {
3281 for x in &v {
3282 v.push(1);
3283 }
3284 }
3285 "
3286 .unindent(),
3287 }),
3288 )
3289 .await;
3290
3291 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3292 let buffer = project
3293 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3294 .await
3295 .unwrap();
3296
3297 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3298 let message = lsp::PublishDiagnosticsParams {
3299 uri: buffer_uri.clone(),
3300 diagnostics: vec![
3301 lsp::Diagnostic {
3302 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3303 severity: Some(DiagnosticSeverity::WARNING),
3304 message: "error 1".to_string(),
3305 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3306 location: lsp::Location {
3307 uri: buffer_uri.clone(),
3308 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3309 },
3310 message: "error 1 hint 1".to_string(),
3311 }]),
3312 ..Default::default()
3313 },
3314 lsp::Diagnostic {
3315 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3316 severity: Some(DiagnosticSeverity::HINT),
3317 message: "error 1 hint 1".to_string(),
3318 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3319 location: lsp::Location {
3320 uri: buffer_uri.clone(),
3321 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3322 },
3323 message: "original diagnostic".to_string(),
3324 }]),
3325 ..Default::default()
3326 },
3327 lsp::Diagnostic {
3328 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3329 severity: Some(DiagnosticSeverity::ERROR),
3330 message: "error 2".to_string(),
3331 related_information: Some(vec![
3332 lsp::DiagnosticRelatedInformation {
3333 location: lsp::Location {
3334 uri: buffer_uri.clone(),
3335 range: lsp::Range::new(
3336 lsp::Position::new(1, 13),
3337 lsp::Position::new(1, 15),
3338 ),
3339 },
3340 message: "error 2 hint 1".to_string(),
3341 },
3342 lsp::DiagnosticRelatedInformation {
3343 location: lsp::Location {
3344 uri: buffer_uri.clone(),
3345 range: lsp::Range::new(
3346 lsp::Position::new(1, 13),
3347 lsp::Position::new(1, 15),
3348 ),
3349 },
3350 message: "error 2 hint 2".to_string(),
3351 },
3352 ]),
3353 ..Default::default()
3354 },
3355 lsp::Diagnostic {
3356 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3357 severity: Some(DiagnosticSeverity::HINT),
3358 message: "error 2 hint 1".to_string(),
3359 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3360 location: lsp::Location {
3361 uri: buffer_uri.clone(),
3362 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3363 },
3364 message: "original diagnostic".to_string(),
3365 }]),
3366 ..Default::default()
3367 },
3368 lsp::Diagnostic {
3369 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3370 severity: Some(DiagnosticSeverity::HINT),
3371 message: "error 2 hint 2".to_string(),
3372 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3373 location: lsp::Location {
3374 uri: buffer_uri,
3375 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3376 },
3377 message: "original diagnostic".to_string(),
3378 }]),
3379 ..Default::default()
3380 },
3381 ],
3382 version: None,
3383 };
3384
3385 project
3386 .update(cx, |p, cx| {
3387 p.update_diagnostics(LanguageServerId(0), message, &[], cx)
3388 })
3389 .unwrap();
3390 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
3391
3392 assert_eq!(
3393 buffer
3394 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3395 .collect::<Vec<_>>(),
3396 &[
3397 DiagnosticEntry {
3398 range: Point::new(1, 8)..Point::new(1, 9),
3399 diagnostic: Diagnostic {
3400 severity: DiagnosticSeverity::WARNING,
3401 message: "error 1".to_string(),
3402 group_id: 1,
3403 is_primary: true,
3404 ..Default::default()
3405 }
3406 },
3407 DiagnosticEntry {
3408 range: Point::new(1, 8)..Point::new(1, 9),
3409 diagnostic: Diagnostic {
3410 severity: DiagnosticSeverity::HINT,
3411 message: "error 1 hint 1".to_string(),
3412 group_id: 1,
3413 is_primary: false,
3414 ..Default::default()
3415 }
3416 },
3417 DiagnosticEntry {
3418 range: Point::new(1, 13)..Point::new(1, 15),
3419 diagnostic: Diagnostic {
3420 severity: DiagnosticSeverity::HINT,
3421 message: "error 2 hint 1".to_string(),
3422 group_id: 0,
3423 is_primary: false,
3424 ..Default::default()
3425 }
3426 },
3427 DiagnosticEntry {
3428 range: Point::new(1, 13)..Point::new(1, 15),
3429 diagnostic: Diagnostic {
3430 severity: DiagnosticSeverity::HINT,
3431 message: "error 2 hint 2".to_string(),
3432 group_id: 0,
3433 is_primary: false,
3434 ..Default::default()
3435 }
3436 },
3437 DiagnosticEntry {
3438 range: Point::new(2, 8)..Point::new(2, 17),
3439 diagnostic: Diagnostic {
3440 severity: DiagnosticSeverity::ERROR,
3441 message: "error 2".to_string(),
3442 group_id: 0,
3443 is_primary: true,
3444 ..Default::default()
3445 }
3446 }
3447 ]
3448 );
3449
3450 assert_eq!(
3451 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3452 &[
3453 DiagnosticEntry {
3454 range: Point::new(1, 13)..Point::new(1, 15),
3455 diagnostic: Diagnostic {
3456 severity: DiagnosticSeverity::HINT,
3457 message: "error 2 hint 1".to_string(),
3458 group_id: 0,
3459 is_primary: false,
3460 ..Default::default()
3461 }
3462 },
3463 DiagnosticEntry {
3464 range: Point::new(1, 13)..Point::new(1, 15),
3465 diagnostic: Diagnostic {
3466 severity: DiagnosticSeverity::HINT,
3467 message: "error 2 hint 2".to_string(),
3468 group_id: 0,
3469 is_primary: false,
3470 ..Default::default()
3471 }
3472 },
3473 DiagnosticEntry {
3474 range: Point::new(2, 8)..Point::new(2, 17),
3475 diagnostic: Diagnostic {
3476 severity: DiagnosticSeverity::ERROR,
3477 message: "error 2".to_string(),
3478 group_id: 0,
3479 is_primary: true,
3480 ..Default::default()
3481 }
3482 }
3483 ]
3484 );
3485
3486 assert_eq!(
3487 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3488 &[
3489 DiagnosticEntry {
3490 range: Point::new(1, 8)..Point::new(1, 9),
3491 diagnostic: Diagnostic {
3492 severity: DiagnosticSeverity::WARNING,
3493 message: "error 1".to_string(),
3494 group_id: 1,
3495 is_primary: true,
3496 ..Default::default()
3497 }
3498 },
3499 DiagnosticEntry {
3500 range: Point::new(1, 8)..Point::new(1, 9),
3501 diagnostic: Diagnostic {
3502 severity: DiagnosticSeverity::HINT,
3503 message: "error 1 hint 1".to_string(),
3504 group_id: 1,
3505 is_primary: false,
3506 ..Default::default()
3507 }
3508 },
3509 ]
3510 );
3511}
3512
3513#[gpui::test]
3514async fn test_rename(cx: &mut gpui::TestAppContext) {
3515 init_test(cx);
3516
3517 let mut language = Language::new(
3518 LanguageConfig {
3519 name: "Rust".into(),
3520 path_suffixes: vec!["rs".to_string()],
3521 ..Default::default()
3522 },
3523 Some(tree_sitter_rust::language()),
3524 );
3525 let mut fake_servers = language
3526 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
3527 capabilities: lsp::ServerCapabilities {
3528 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
3529 prepare_provider: Some(true),
3530 work_done_progress_options: Default::default(),
3531 })),
3532 ..Default::default()
3533 },
3534 ..Default::default()
3535 }))
3536 .await;
3537
3538 let fs = FakeFs::new(cx.background());
3539 fs.insert_tree(
3540 "/dir",
3541 json!({
3542 "one.rs": "const ONE: usize = 1;",
3543 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3544 }),
3545 )
3546 .await;
3547
3548 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3549 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
3550 let buffer = project
3551 .update(cx, |project, cx| {
3552 project.open_local_buffer("/dir/one.rs", cx)
3553 })
3554 .await
3555 .unwrap();
3556
3557 let fake_server = fake_servers.next().await.unwrap();
3558
3559 let response = project.update(cx, |project, cx| {
3560 project.prepare_rename(buffer.clone(), 7, cx)
3561 });
3562 fake_server
3563 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
3564 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3565 assert_eq!(params.position, lsp::Position::new(0, 7));
3566 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
3567 lsp::Position::new(0, 6),
3568 lsp::Position::new(0, 9),
3569 ))))
3570 })
3571 .next()
3572 .await
3573 .unwrap();
3574 let range = response.await.unwrap().unwrap();
3575 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
3576 assert_eq!(range, 6..9);
3577
3578 let response = project.update(cx, |project, cx| {
3579 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
3580 });
3581 fake_server
3582 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
3583 assert_eq!(
3584 params.text_document_position.text_document.uri.as_str(),
3585 "file:///dir/one.rs"
3586 );
3587 assert_eq!(
3588 params.text_document_position.position,
3589 lsp::Position::new(0, 7)
3590 );
3591 assert_eq!(params.new_name, "THREE");
3592 Ok(Some(lsp::WorkspaceEdit {
3593 changes: Some(
3594 [
3595 (
3596 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
3597 vec![lsp::TextEdit::new(
3598 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
3599 "THREE".to_string(),
3600 )],
3601 ),
3602 (
3603 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
3604 vec![
3605 lsp::TextEdit::new(
3606 lsp::Range::new(
3607 lsp::Position::new(0, 24),
3608 lsp::Position::new(0, 27),
3609 ),
3610 "THREE".to_string(),
3611 ),
3612 lsp::TextEdit::new(
3613 lsp::Range::new(
3614 lsp::Position::new(0, 35),
3615 lsp::Position::new(0, 38),
3616 ),
3617 "THREE".to_string(),
3618 ),
3619 ],
3620 ),
3621 ]
3622 .into_iter()
3623 .collect(),
3624 ),
3625 ..Default::default()
3626 }))
3627 })
3628 .next()
3629 .await
3630 .unwrap();
3631 let mut transaction = response.await.unwrap().0;
3632 assert_eq!(transaction.len(), 2);
3633 assert_eq!(
3634 transaction
3635 .remove_entry(&buffer)
3636 .unwrap()
3637 .0
3638 .read_with(cx, |buffer, _| buffer.text()),
3639 "const THREE: usize = 1;"
3640 );
3641 assert_eq!(
3642 transaction
3643 .into_keys()
3644 .next()
3645 .unwrap()
3646 .read_with(cx, |buffer, _| buffer.text()),
3647 "const TWO: usize = one::THREE + one::THREE;"
3648 );
3649}
3650
3651#[gpui::test]
3652async fn test_search(cx: &mut gpui::TestAppContext) {
3653 init_test(cx);
3654
3655 let fs = FakeFs::new(cx.background());
3656 fs.insert_tree(
3657 "/dir",
3658 json!({
3659 "one.rs": "const ONE: usize = 1;",
3660 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3661 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3662 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3663 }),
3664 )
3665 .await;
3666 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3667 assert_eq!(
3668 search(
3669 &project,
3670 SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
3671 cx
3672 )
3673 .await
3674 .unwrap(),
3675 HashMap::from_iter([
3676 ("two.rs".to_string(), vec![6..9]),
3677 ("three.rs".to_string(), vec![37..40])
3678 ])
3679 );
3680
3681 let buffer_4 = project
3682 .update(cx, |project, cx| {
3683 project.open_local_buffer("/dir/four.rs", cx)
3684 })
3685 .await
3686 .unwrap();
3687 buffer_4.update(cx, |buffer, cx| {
3688 let text = "two::TWO";
3689 buffer.edit([(20..28, text), (31..43, text)], None, cx);
3690 });
3691
3692 assert_eq!(
3693 search(
3694 &project,
3695 SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
3696 cx
3697 )
3698 .await
3699 .unwrap(),
3700 HashMap::from_iter([
3701 ("two.rs".to_string(), vec![6..9]),
3702 ("three.rs".to_string(), vec![37..40]),
3703 ("four.rs".to_string(), vec![25..28, 36..39])
3704 ])
3705 );
3706}
3707
3708#[gpui::test]
3709async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
3710 init_test(cx);
3711
3712 let search_query = "file";
3713
3714 let fs = FakeFs::new(cx.background());
3715 fs.insert_tree(
3716 "/dir",
3717 json!({
3718 "one.rs": r#"// Rust file one"#,
3719 "one.ts": r#"// TypeScript file one"#,
3720 "two.rs": r#"// Rust file two"#,
3721 "two.ts": r#"// TypeScript file two"#,
3722 }),
3723 )
3724 .await;
3725 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3726
3727 assert!(
3728 search(
3729 &project,
3730 SearchQuery::text(
3731 search_query,
3732 false,
3733 true,
3734 false,
3735 vec![PathMatcher::new("*.odd").unwrap()],
3736 Vec::new()
3737 )
3738 .unwrap(),
3739 cx
3740 )
3741 .await
3742 .unwrap()
3743 .is_empty(),
3744 "If no inclusions match, no files should be returned"
3745 );
3746
3747 assert_eq!(
3748 search(
3749 &project,
3750 SearchQuery::text(
3751 search_query,
3752 false,
3753 true,
3754 false,
3755 vec![PathMatcher::new("*.rs").unwrap()],
3756 Vec::new()
3757 )
3758 .unwrap(),
3759 cx
3760 )
3761 .await
3762 .unwrap(),
3763 HashMap::from_iter([
3764 ("one.rs".to_string(), vec![8..12]),
3765 ("two.rs".to_string(), vec![8..12]),
3766 ]),
3767 "Rust only search should give only Rust files"
3768 );
3769
3770 assert_eq!(
3771 search(
3772 &project,
3773 SearchQuery::text(
3774 search_query,
3775 false,
3776 true,
3777 false,
3778 vec![
3779 PathMatcher::new("*.ts").unwrap(),
3780 PathMatcher::new("*.odd").unwrap(),
3781 ],
3782 Vec::new()
3783 ).unwrap(),
3784 cx
3785 )
3786 .await
3787 .unwrap(),
3788 HashMap::from_iter([
3789 ("one.ts".to_string(), vec![14..18]),
3790 ("two.ts".to_string(), vec![14..18]),
3791 ]),
3792 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
3793 );
3794
3795 assert_eq!(
3796 search(
3797 &project,
3798 SearchQuery::text(
3799 search_query,
3800 false,
3801 true,
3802 false,
3803 vec![
3804 PathMatcher::new("*.rs").unwrap(),
3805 PathMatcher::new("*.ts").unwrap(),
3806 PathMatcher::new("*.odd").unwrap(),
3807 ],
3808 Vec::new()
3809 ).unwrap(),
3810 cx
3811 )
3812 .await
3813 .unwrap(),
3814 HashMap::from_iter([
3815 ("one.rs".to_string(), vec![8..12]),
3816 ("one.ts".to_string(), vec![14..18]),
3817 ("two.rs".to_string(), vec![8..12]),
3818 ("two.ts".to_string(), vec![14..18]),
3819 ]),
3820 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
3821 );
3822}
3823
3824#[gpui::test]
3825async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
3826 init_test(cx);
3827
3828 let search_query = "file";
3829
3830 let fs = FakeFs::new(cx.background());
3831 fs.insert_tree(
3832 "/dir",
3833 json!({
3834 "one.rs": r#"// Rust file one"#,
3835 "one.ts": r#"// TypeScript file one"#,
3836 "two.rs": r#"// Rust file two"#,
3837 "two.ts": r#"// TypeScript file two"#,
3838 }),
3839 )
3840 .await;
3841 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3842
3843 assert_eq!(
3844 search(
3845 &project,
3846 SearchQuery::text(
3847 search_query,
3848 false,
3849 true,
3850 false,
3851 Vec::new(),
3852 vec![PathMatcher::new("*.odd").unwrap()],
3853 )
3854 .unwrap(),
3855 cx
3856 )
3857 .await
3858 .unwrap(),
3859 HashMap::from_iter([
3860 ("one.rs".to_string(), vec![8..12]),
3861 ("one.ts".to_string(), vec![14..18]),
3862 ("two.rs".to_string(), vec![8..12]),
3863 ("two.ts".to_string(), vec![14..18]),
3864 ]),
3865 "If no exclusions match, all files should be returned"
3866 );
3867
3868 assert_eq!(
3869 search(
3870 &project,
3871 SearchQuery::text(
3872 search_query,
3873 false,
3874 true,
3875 false,
3876 Vec::new(),
3877 vec![PathMatcher::new("*.rs").unwrap()],
3878 )
3879 .unwrap(),
3880 cx
3881 )
3882 .await
3883 .unwrap(),
3884 HashMap::from_iter([
3885 ("one.ts".to_string(), vec![14..18]),
3886 ("two.ts".to_string(), vec![14..18]),
3887 ]),
3888 "Rust exclusion search should give only TypeScript files"
3889 );
3890
3891 assert_eq!(
3892 search(
3893 &project,
3894 SearchQuery::text(
3895 search_query,
3896 false,
3897 true,
3898 false,
3899 Vec::new(),
3900 vec![
3901 PathMatcher::new("*.ts").unwrap(),
3902 PathMatcher::new("*.odd").unwrap(),
3903 ],
3904 ).unwrap(),
3905 cx
3906 )
3907 .await
3908 .unwrap(),
3909 HashMap::from_iter([
3910 ("one.rs".to_string(), vec![8..12]),
3911 ("two.rs".to_string(), vec![8..12]),
3912 ]),
3913 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
3914 );
3915
3916 assert!(
3917 search(
3918 &project,
3919 SearchQuery::text(
3920 search_query,
3921 false,
3922 true,
3923 false,
3924 Vec::new(),
3925 vec![
3926 PathMatcher::new("*.rs").unwrap(),
3927 PathMatcher::new("*.ts").unwrap(),
3928 PathMatcher::new("*.odd").unwrap(),
3929 ],
3930 ).unwrap(),
3931 cx
3932 )
3933 .await
3934 .unwrap().is_empty(),
3935 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
3936 );
3937}
3938
3939#[gpui::test]
3940async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
3941 init_test(cx);
3942
3943 let search_query = "file";
3944
3945 let fs = FakeFs::new(cx.background());
3946 fs.insert_tree(
3947 "/dir",
3948 json!({
3949 "one.rs": r#"// Rust file one"#,
3950 "one.ts": r#"// TypeScript file one"#,
3951 "two.rs": r#"// Rust file two"#,
3952 "two.ts": r#"// TypeScript file two"#,
3953 }),
3954 )
3955 .await;
3956 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3957
3958 assert!(
3959 search(
3960 &project,
3961 SearchQuery::text(
3962 search_query,
3963 false,
3964 true,
3965 false,
3966 vec![PathMatcher::new("*.odd").unwrap()],
3967 vec![PathMatcher::new("*.odd").unwrap()],
3968 )
3969 .unwrap(),
3970 cx
3971 )
3972 .await
3973 .unwrap()
3974 .is_empty(),
3975 "If both no exclusions and inclusions match, exclusions should win and return nothing"
3976 );
3977
3978 assert!(
3979 search(
3980 &project,
3981 SearchQuery::text(
3982 search_query,
3983 false,
3984 true,
3985 false,
3986 vec![PathMatcher::new("*.ts").unwrap()],
3987 vec![PathMatcher::new("*.ts").unwrap()],
3988 ).unwrap(),
3989 cx
3990 )
3991 .await
3992 .unwrap()
3993 .is_empty(),
3994 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
3995 );
3996
3997 assert!(
3998 search(
3999 &project,
4000 SearchQuery::text(
4001 search_query,
4002 false,
4003 true,
4004 false,
4005 vec![
4006 PathMatcher::new("*.ts").unwrap(),
4007 PathMatcher::new("*.odd").unwrap()
4008 ],
4009 vec![
4010 PathMatcher::new("*.ts").unwrap(),
4011 PathMatcher::new("*.odd").unwrap()
4012 ],
4013 )
4014 .unwrap(),
4015 cx
4016 )
4017 .await
4018 .unwrap()
4019 .is_empty(),
4020 "Non-matching inclusions and exclusions should not change that."
4021 );
4022
4023 assert_eq!(
4024 search(
4025 &project,
4026 SearchQuery::text(
4027 search_query,
4028 false,
4029 true,
4030 false,
4031 vec![
4032 PathMatcher::new("*.ts").unwrap(),
4033 PathMatcher::new("*.odd").unwrap()
4034 ],
4035 vec![
4036 PathMatcher::new("*.rs").unwrap(),
4037 PathMatcher::new("*.odd").unwrap()
4038 ],
4039 )
4040 .unwrap(),
4041 cx
4042 )
4043 .await
4044 .unwrap(),
4045 HashMap::from_iter([
4046 ("one.ts".to_string(), vec![14..18]),
4047 ("two.ts".to_string(), vec![14..18]),
4048 ]),
4049 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4050 );
4051}
4052
4053#[gpui::test]
4054async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
4055 init_test(cx);
4056
4057 let fs = FakeFs::new(cx.background());
4058 fs.insert_tree(
4059 "/dir",
4060 json!({
4061 ".git": {},
4062 ".gitignore": "**/target\n/node_modules\n",
4063 "target": {
4064 "index.txt": "index_key:index_value"
4065 },
4066 "node_modules": {
4067 "eslint": {
4068 "index.ts": "const eslint_key = 'eslint value'",
4069 "package.json": r#"{ "some_key": "some value" }"#,
4070 },
4071 "prettier": {
4072 "index.ts": "const prettier_key = 'prettier value'",
4073 "package.json": r#"{ "other_key": "other value" }"#,
4074 },
4075 },
4076 "package.json": r#"{ "main_key": "main value" }"#,
4077 }),
4078 )
4079 .await;
4080 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4081
4082 let query = "key";
4083 assert_eq!(
4084 search(
4085 &project,
4086 SearchQuery::text(query, false, false, false, Vec::new(), Vec::new()).unwrap(),
4087 cx
4088 )
4089 .await
4090 .unwrap(),
4091 HashMap::from_iter([("package.json".to_string(), vec![8..11])]),
4092 "Only one non-ignored file should have the query"
4093 );
4094
4095 assert_eq!(
4096 search(
4097 &project,
4098 SearchQuery::text(query, false, false, true, Vec::new(), Vec::new()).unwrap(),
4099 cx
4100 )
4101 .await
4102 .unwrap(),
4103 HashMap::from_iter([
4104 ("package.json".to_string(), vec![8..11]),
4105 ("target/index.txt".to_string(), vec![6..9]),
4106 (
4107 "node_modules/prettier/package.json".to_string(),
4108 vec![9..12]
4109 ),
4110 ("node_modules/prettier/index.ts".to_string(), vec![15..18]),
4111 ("node_modules/eslint/index.ts".to_string(), vec![13..16]),
4112 ("node_modules/eslint/package.json".to_string(), vec![8..11]),
4113 ]),
4114 "Unrestricted search with ignored directories should find every file with the query"
4115 );
4116
4117 assert_eq!(
4118 search(
4119 &project,
4120 SearchQuery::text(
4121 query,
4122 false,
4123 false,
4124 true,
4125 vec![PathMatcher::new("node_modules/prettier/**").unwrap()],
4126 vec![PathMatcher::new("*.ts").unwrap()],
4127 )
4128 .unwrap(),
4129 cx
4130 )
4131 .await
4132 .unwrap(),
4133 HashMap::from_iter([(
4134 "node_modules/prettier/package.json".to_string(),
4135 vec![9..12]
4136 )]),
4137 "With search including ignored prettier directory and excluding TS files, only one file should be found"
4138 );
4139}
4140
4141#[test]
4142fn test_glob_literal_prefix() {
4143 assert_eq!(glob_literal_prefix("**/*.js"), "");
4144 assert_eq!(glob_literal_prefix("node_modules/**/*.js"), "node_modules");
4145 assert_eq!(glob_literal_prefix("foo/{bar,baz}.js"), "foo");
4146 assert_eq!(glob_literal_prefix("foo/bar/baz.js"), "foo/bar/baz.js");
4147}
4148
4149async fn search(
4150 project: &ModelHandle<Project>,
4151 query: SearchQuery,
4152 cx: &mut gpui::TestAppContext,
4153) -> Result<HashMap<String, Vec<Range<usize>>>> {
4154 let mut search_rx = project.update(cx, |project, cx| project.search(query, cx));
4155 let mut result = HashMap::default();
4156 while let Some((buffer, range)) = search_rx.next().await {
4157 result.entry(buffer).or_insert(range);
4158 }
4159 Ok(result
4160 .into_iter()
4161 .map(|(buffer, ranges)| {
4162 buffer.read_with(cx, |buffer, _| {
4163 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
4164 let ranges = ranges
4165 .into_iter()
4166 .map(|range| range.to_offset(buffer))
4167 .collect::<Vec<_>>();
4168 (path, ranges)
4169 })
4170 })
4171 .collect())
4172}
4173
4174fn init_test(cx: &mut gpui::TestAppContext) {
4175 cx.foreground().forbid_parking();
4176
4177 cx.update(|cx| {
4178 cx.set_global(SettingsStore::test(cx));
4179 language::init(cx);
4180 Project::init_settings(cx);
4181 });
4182}