1use crate::{search::PathMatcher, worktree::WorktreeHandle, Event, *};
2use fs::{FakeFs, LineEnding, RealFs};
3use futures::{future, StreamExt};
4use gpui::{executor::Deterministic, test::subscribe, AppContext};
5use language::{
6 language_settings::{AllLanguageSettings, LanguageSettingsContent},
7 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
8 OffsetRangeExt, Point, ToPoint,
9};
10use lsp::Url;
11use parking_lot::Mutex;
12use pretty_assertions::assert_eq;
13use serde_json::json;
14use std::{cell::RefCell, os::unix, rc::Rc, task::Poll};
15use unindent::Unindent as _;
16use util::{assert_set_eq, test::temp_tree};
17
18#[cfg(test)]
19#[ctor::ctor]
20fn init_logger() {
21 if std::env::var("RUST_LOG").is_ok() {
22 env_logger::init();
23 }
24}
25
26#[gpui::test]
27async fn test_symlinks(cx: &mut gpui::TestAppContext) {
28 init_test(cx);
29 cx.foreground().allow_parking();
30
31 let dir = temp_tree(json!({
32 "root": {
33 "apple": "",
34 "banana": {
35 "carrot": {
36 "date": "",
37 "endive": "",
38 }
39 },
40 "fennel": {
41 "grape": "",
42 }
43 }
44 }));
45
46 let root_link_path = dir.path().join("root_link");
47 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
48 unix::fs::symlink(
49 &dir.path().join("root/fennel"),
50 &dir.path().join("root/finnochio"),
51 )
52 .unwrap();
53
54 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
55 project.read_with(cx, |project, cx| {
56 let tree = project.worktrees(cx).next().unwrap().read(cx);
57 assert_eq!(tree.file_count(), 5);
58 assert_eq!(
59 tree.inode_for_path("fennel/grape"),
60 tree.inode_for_path("finnochio/grape")
61 );
62 });
63}
64
65#[gpui::test]
66async fn test_managing_project_specific_settings(
67 deterministic: Arc<Deterministic>,
68 cx: &mut gpui::TestAppContext,
69) {
70 init_test(cx);
71
72 let fs = FakeFs::new(cx.background());
73 fs.insert_tree(
74 "/the-root",
75 json!({
76 ".zed": {
77 "settings.json": r#"{ "tab_size": 8 }"#
78 },
79 "a": {
80 "a.rs": "fn a() {\n A\n}"
81 },
82 "b": {
83 ".zed": {
84 "settings.json": r#"{ "tab_size": 2 }"#
85 },
86 "b.rs": "fn b() {\n B\n}"
87 }
88 }),
89 )
90 .await;
91
92 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
93 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
94
95 deterministic.run_until_parked();
96 cx.read(|cx| {
97 let tree = worktree.read(cx);
98
99 let settings_a = language_settings(
100 None,
101 Some(
102 &(File::for_entry(
103 tree.entry_for_path("a/a.rs").unwrap().clone(),
104 worktree.clone(),
105 ) as _),
106 ),
107 cx,
108 );
109 let settings_b = language_settings(
110 None,
111 Some(
112 &(File::for_entry(
113 tree.entry_for_path("b/b.rs").unwrap().clone(),
114 worktree.clone(),
115 ) as _),
116 ),
117 cx,
118 );
119
120 assert_eq!(settings_a.tab_size.get(), 8);
121 assert_eq!(settings_b.tab_size.get(), 2);
122 });
123}
124
125#[gpui::test]
126async fn test_managing_language_servers(
127 deterministic: Arc<Deterministic>,
128 cx: &mut gpui::TestAppContext,
129) {
130 init_test(cx);
131
132 let mut rust_language = Language::new(
133 LanguageConfig {
134 name: "Rust".into(),
135 path_suffixes: vec!["rs".to_string()],
136 ..Default::default()
137 },
138 Some(tree_sitter_rust::language()),
139 );
140 let mut json_language = Language::new(
141 LanguageConfig {
142 name: "JSON".into(),
143 path_suffixes: vec!["json".to_string()],
144 ..Default::default()
145 },
146 None,
147 );
148 let mut fake_rust_servers = rust_language
149 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
150 name: "the-rust-language-server",
151 capabilities: lsp::ServerCapabilities {
152 completion_provider: Some(lsp::CompletionOptions {
153 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
154 ..Default::default()
155 }),
156 ..Default::default()
157 },
158 ..Default::default()
159 }))
160 .await;
161 let mut fake_json_servers = json_language
162 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
163 name: "the-json-language-server",
164 capabilities: lsp::ServerCapabilities {
165 completion_provider: Some(lsp::CompletionOptions {
166 trigger_characters: Some(vec![":".to_string()]),
167 ..Default::default()
168 }),
169 ..Default::default()
170 },
171 ..Default::default()
172 }))
173 .await;
174
175 let fs = FakeFs::new(cx.background());
176 fs.insert_tree(
177 "/the-root",
178 json!({
179 "test.rs": "const A: i32 = 1;",
180 "test2.rs": "",
181 "Cargo.toml": "a = 1",
182 "package.json": "{\"a\": 1}",
183 }),
184 )
185 .await;
186
187 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
188
189 // Open a buffer without an associated language server.
190 let toml_buffer = project
191 .update(cx, |project, cx| {
192 project.open_local_buffer("/the-root/Cargo.toml", cx)
193 })
194 .await
195 .unwrap();
196
197 // Open a buffer with an associated language server before the language for it has been loaded.
198 let rust_buffer = project
199 .update(cx, |project, cx| {
200 project.open_local_buffer("/the-root/test.rs", cx)
201 })
202 .await
203 .unwrap();
204 rust_buffer.read_with(cx, |buffer, _| {
205 assert_eq!(buffer.language().map(|l| l.name()), None);
206 });
207
208 // Now we add the languages to the project, and ensure they get assigned to all
209 // the relevant open buffers.
210 project.update(cx, |project, _| {
211 project.languages.add(Arc::new(json_language));
212 project.languages.add(Arc::new(rust_language));
213 });
214 deterministic.run_until_parked();
215 rust_buffer.read_with(cx, |buffer, _| {
216 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
217 });
218
219 // A server is started up, and it is notified about Rust files.
220 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
221 assert_eq!(
222 fake_rust_server
223 .receive_notification::<lsp::notification::DidOpenTextDocument>()
224 .await
225 .text_document,
226 lsp::TextDocumentItem {
227 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
228 version: 0,
229 text: "const A: i32 = 1;".to_string(),
230 language_id: Default::default()
231 }
232 );
233
234 // The buffer is configured based on the language server's capabilities.
235 rust_buffer.read_with(cx, |buffer, _| {
236 assert_eq!(
237 buffer.completion_triggers(),
238 &[".".to_string(), "::".to_string()]
239 );
240 });
241 toml_buffer.read_with(cx, |buffer, _| {
242 assert!(buffer.completion_triggers().is_empty());
243 });
244
245 // Edit a buffer. The changes are reported to the language server.
246 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
247 assert_eq!(
248 fake_rust_server
249 .receive_notification::<lsp::notification::DidChangeTextDocument>()
250 .await
251 .text_document,
252 lsp::VersionedTextDocumentIdentifier::new(
253 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
254 1
255 )
256 );
257
258 // Open a third buffer with a different associated language server.
259 let json_buffer = project
260 .update(cx, |project, cx| {
261 project.open_local_buffer("/the-root/package.json", cx)
262 })
263 .await
264 .unwrap();
265
266 // A json language server is started up and is only notified about the json buffer.
267 let mut fake_json_server = fake_json_servers.next().await.unwrap();
268 assert_eq!(
269 fake_json_server
270 .receive_notification::<lsp::notification::DidOpenTextDocument>()
271 .await
272 .text_document,
273 lsp::TextDocumentItem {
274 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
275 version: 0,
276 text: "{\"a\": 1}".to_string(),
277 language_id: Default::default()
278 }
279 );
280
281 // This buffer is configured based on the second language server's
282 // capabilities.
283 json_buffer.read_with(cx, |buffer, _| {
284 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
285 });
286
287 // When opening another buffer whose language server is already running,
288 // it is also configured based on the existing language server's capabilities.
289 let rust_buffer2 = project
290 .update(cx, |project, cx| {
291 project.open_local_buffer("/the-root/test2.rs", cx)
292 })
293 .await
294 .unwrap();
295 rust_buffer2.read_with(cx, |buffer, _| {
296 assert_eq!(
297 buffer.completion_triggers(),
298 &[".".to_string(), "::".to_string()]
299 );
300 });
301
302 // Changes are reported only to servers matching the buffer's language.
303 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
304 rust_buffer2.update(cx, |buffer, cx| {
305 buffer.edit([(0..0, "let x = 1;")], None, cx)
306 });
307 assert_eq!(
308 fake_rust_server
309 .receive_notification::<lsp::notification::DidChangeTextDocument>()
310 .await
311 .text_document,
312 lsp::VersionedTextDocumentIdentifier::new(
313 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
314 1
315 )
316 );
317
318 // Save notifications are reported to all servers.
319 project
320 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
321 .await
322 .unwrap();
323 assert_eq!(
324 fake_rust_server
325 .receive_notification::<lsp::notification::DidSaveTextDocument>()
326 .await
327 .text_document,
328 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
329 );
330 assert_eq!(
331 fake_json_server
332 .receive_notification::<lsp::notification::DidSaveTextDocument>()
333 .await
334 .text_document,
335 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
336 );
337
338 // Renames are reported only to servers matching the buffer's language.
339 fs.rename(
340 Path::new("/the-root/test2.rs"),
341 Path::new("/the-root/test3.rs"),
342 Default::default(),
343 )
344 .await
345 .unwrap();
346 assert_eq!(
347 fake_rust_server
348 .receive_notification::<lsp::notification::DidCloseTextDocument>()
349 .await
350 .text_document,
351 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
352 );
353 assert_eq!(
354 fake_rust_server
355 .receive_notification::<lsp::notification::DidOpenTextDocument>()
356 .await
357 .text_document,
358 lsp::TextDocumentItem {
359 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
360 version: 0,
361 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
362 language_id: Default::default()
363 },
364 );
365
366 rust_buffer2.update(cx, |buffer, cx| {
367 buffer.update_diagnostics(
368 LanguageServerId(0),
369 DiagnosticSet::from_sorted_entries(
370 vec![DiagnosticEntry {
371 diagnostic: Default::default(),
372 range: Anchor::MIN..Anchor::MAX,
373 }],
374 &buffer.snapshot(),
375 ),
376 cx,
377 );
378 assert_eq!(
379 buffer
380 .snapshot()
381 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
382 .count(),
383 1
384 );
385 });
386
387 // When the rename changes the extension of the file, the buffer gets closed on the old
388 // language server and gets opened on the new one.
389 fs.rename(
390 Path::new("/the-root/test3.rs"),
391 Path::new("/the-root/test3.json"),
392 Default::default(),
393 )
394 .await
395 .unwrap();
396 assert_eq!(
397 fake_rust_server
398 .receive_notification::<lsp::notification::DidCloseTextDocument>()
399 .await
400 .text_document,
401 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
402 );
403 assert_eq!(
404 fake_json_server
405 .receive_notification::<lsp::notification::DidOpenTextDocument>()
406 .await
407 .text_document,
408 lsp::TextDocumentItem {
409 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
410 version: 0,
411 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
412 language_id: Default::default()
413 },
414 );
415
416 // We clear the diagnostics, since the language has changed.
417 rust_buffer2.read_with(cx, |buffer, _| {
418 assert_eq!(
419 buffer
420 .snapshot()
421 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
422 .count(),
423 0
424 );
425 });
426
427 // The renamed file's version resets after changing language server.
428 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
429 assert_eq!(
430 fake_json_server
431 .receive_notification::<lsp::notification::DidChangeTextDocument>()
432 .await
433 .text_document,
434 lsp::VersionedTextDocumentIdentifier::new(
435 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
436 1
437 )
438 );
439
440 // Restart language servers
441 project.update(cx, |project, cx| {
442 project.restart_language_servers_for_buffers(
443 vec![rust_buffer.clone(), json_buffer.clone()],
444 cx,
445 );
446 });
447
448 let mut rust_shutdown_requests = fake_rust_server
449 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
450 let mut json_shutdown_requests = fake_json_server
451 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
452 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
453
454 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
455 let mut fake_json_server = fake_json_servers.next().await.unwrap();
456
457 // Ensure rust document is reopened in new rust language server
458 assert_eq!(
459 fake_rust_server
460 .receive_notification::<lsp::notification::DidOpenTextDocument>()
461 .await
462 .text_document,
463 lsp::TextDocumentItem {
464 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
465 version: 0,
466 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
467 language_id: Default::default()
468 }
469 );
470
471 // Ensure json documents are reopened in new json language server
472 assert_set_eq!(
473 [
474 fake_json_server
475 .receive_notification::<lsp::notification::DidOpenTextDocument>()
476 .await
477 .text_document,
478 fake_json_server
479 .receive_notification::<lsp::notification::DidOpenTextDocument>()
480 .await
481 .text_document,
482 ],
483 [
484 lsp::TextDocumentItem {
485 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
486 version: 0,
487 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
488 language_id: Default::default()
489 },
490 lsp::TextDocumentItem {
491 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
492 version: 0,
493 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
494 language_id: Default::default()
495 }
496 ]
497 );
498
499 // Close notifications are reported only to servers matching the buffer's language.
500 cx.update(|_| drop(json_buffer));
501 let close_message = lsp::DidCloseTextDocumentParams {
502 text_document: lsp::TextDocumentIdentifier::new(
503 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
504 ),
505 };
506 assert_eq!(
507 fake_json_server
508 .receive_notification::<lsp::notification::DidCloseTextDocument>()
509 .await,
510 close_message,
511 );
512}
513
514#[gpui::test]
515async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
516 init_test(cx);
517
518 let mut language = Language::new(
519 LanguageConfig {
520 name: "Rust".into(),
521 path_suffixes: vec!["rs".to_string()],
522 ..Default::default()
523 },
524 Some(tree_sitter_rust::language()),
525 );
526 let mut fake_servers = language
527 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
528 name: "the-language-server",
529 ..Default::default()
530 }))
531 .await;
532
533 let fs = FakeFs::new(cx.background());
534 fs.insert_tree(
535 "/the-root",
536 json!({
537 ".gitignore": "target\n",
538 "src": {
539 "a.rs": "",
540 "b.rs": "",
541 },
542 "target": {
543 "x": {
544 "out": {
545 "x.rs": ""
546 }
547 },
548 "y": {
549 "out": {
550 "y.rs": "",
551 }
552 },
553 "z": {
554 "out": {
555 "z.rs": ""
556 }
557 }
558 }
559 }),
560 )
561 .await;
562
563 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
564 project.update(cx, |project, _| {
565 project.languages.add(Arc::new(language));
566 });
567 cx.foreground().run_until_parked();
568
569 // Start the language server by opening a buffer with a compatible file extension.
570 let _buffer = project
571 .update(cx, |project, cx| {
572 project.open_local_buffer("/the-root/src/a.rs", cx)
573 })
574 .await
575 .unwrap();
576
577 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
578 project.read_with(cx, |project, cx| {
579 let worktree = project.worktrees(cx).next().unwrap();
580 assert_eq!(
581 worktree
582 .read(cx)
583 .snapshot()
584 .entries(true)
585 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
586 .collect::<Vec<_>>(),
587 &[
588 (Path::new(""), false),
589 (Path::new(".gitignore"), false),
590 (Path::new("src"), false),
591 (Path::new("src/a.rs"), false),
592 (Path::new("src/b.rs"), false),
593 (Path::new("target"), true),
594 ]
595 );
596 });
597
598 let prev_read_dir_count = fs.read_dir_call_count();
599
600 // Keep track of the FS events reported to the language server.
601 let fake_server = fake_servers.next().await.unwrap();
602 let file_changes = Arc::new(Mutex::new(Vec::new()));
603 fake_server
604 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
605 registrations: vec![lsp::Registration {
606 id: Default::default(),
607 method: "workspace/didChangeWatchedFiles".to_string(),
608 register_options: serde_json::to_value(
609 lsp::DidChangeWatchedFilesRegistrationOptions {
610 watchers: vec![
611 lsp::FileSystemWatcher {
612 glob_pattern: lsp::GlobPattern::String(
613 "/the-root/Cargo.toml".to_string(),
614 ),
615 kind: None,
616 },
617 lsp::FileSystemWatcher {
618 glob_pattern: lsp::GlobPattern::String(
619 "/the-root/src/*.{rs,c}".to_string(),
620 ),
621 kind: None,
622 },
623 lsp::FileSystemWatcher {
624 glob_pattern: lsp::GlobPattern::String(
625 "/the-root/target/y/**/*.rs".to_string(),
626 ),
627 kind: None,
628 },
629 ],
630 },
631 )
632 .ok(),
633 }],
634 })
635 .await
636 .unwrap();
637 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
638 let file_changes = file_changes.clone();
639 move |params, _| {
640 let mut file_changes = file_changes.lock();
641 file_changes.extend(params.changes);
642 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
643 }
644 });
645
646 cx.foreground().run_until_parked();
647 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
648 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
649
650 // Now the language server has asked us to watch an ignored directory path,
651 // so we recursively load it.
652 project.read_with(cx, |project, cx| {
653 let worktree = project.worktrees(cx).next().unwrap();
654 assert_eq!(
655 worktree
656 .read(cx)
657 .snapshot()
658 .entries(true)
659 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
660 .collect::<Vec<_>>(),
661 &[
662 (Path::new(""), false),
663 (Path::new(".gitignore"), false),
664 (Path::new("src"), false),
665 (Path::new("src/a.rs"), false),
666 (Path::new("src/b.rs"), false),
667 (Path::new("target"), true),
668 (Path::new("target/x"), true),
669 (Path::new("target/y"), true),
670 (Path::new("target/y/out"), true),
671 (Path::new("target/y/out/y.rs"), true),
672 (Path::new("target/z"), true),
673 ]
674 );
675 });
676
677 // Perform some file system mutations, two of which match the watched patterns,
678 // and one of which does not.
679 fs.create_file("/the-root/src/c.rs".as_ref(), Default::default())
680 .await
681 .unwrap();
682 fs.create_file("/the-root/src/d.txt".as_ref(), Default::default())
683 .await
684 .unwrap();
685 fs.remove_file("/the-root/src/b.rs".as_ref(), Default::default())
686 .await
687 .unwrap();
688 fs.create_file("/the-root/target/x/out/x2.rs".as_ref(), Default::default())
689 .await
690 .unwrap();
691 fs.create_file("/the-root/target/y/out/y2.rs".as_ref(), Default::default())
692 .await
693 .unwrap();
694
695 // The language server receives events for the FS mutations that match its watch patterns.
696 cx.foreground().run_until_parked();
697 assert_eq!(
698 &*file_changes.lock(),
699 &[
700 lsp::FileEvent {
701 uri: lsp::Url::from_file_path("/the-root/src/b.rs").unwrap(),
702 typ: lsp::FileChangeType::DELETED,
703 },
704 lsp::FileEvent {
705 uri: lsp::Url::from_file_path("/the-root/src/c.rs").unwrap(),
706 typ: lsp::FileChangeType::CREATED,
707 },
708 lsp::FileEvent {
709 uri: lsp::Url::from_file_path("/the-root/target/y/out/y2.rs").unwrap(),
710 typ: lsp::FileChangeType::CREATED,
711 },
712 ]
713 );
714}
715
716#[gpui::test]
717async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
718 init_test(cx);
719
720 let fs = FakeFs::new(cx.background());
721 fs.insert_tree(
722 "/dir",
723 json!({
724 "a.rs": "let a = 1;",
725 "b.rs": "let b = 2;"
726 }),
727 )
728 .await;
729
730 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
731
732 let buffer_a = project
733 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
734 .await
735 .unwrap();
736 let buffer_b = project
737 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
738 .await
739 .unwrap();
740
741 project.update(cx, |project, cx| {
742 project
743 .update_diagnostics(
744 LanguageServerId(0),
745 lsp::PublishDiagnosticsParams {
746 uri: Url::from_file_path("/dir/a.rs").unwrap(),
747 version: None,
748 diagnostics: vec![lsp::Diagnostic {
749 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
750 severity: Some(lsp::DiagnosticSeverity::ERROR),
751 message: "error 1".to_string(),
752 ..Default::default()
753 }],
754 },
755 &[],
756 cx,
757 )
758 .unwrap();
759 project
760 .update_diagnostics(
761 LanguageServerId(0),
762 lsp::PublishDiagnosticsParams {
763 uri: Url::from_file_path("/dir/b.rs").unwrap(),
764 version: None,
765 diagnostics: vec![lsp::Diagnostic {
766 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
767 severity: Some(lsp::DiagnosticSeverity::WARNING),
768 message: "error 2".to_string(),
769 ..Default::default()
770 }],
771 },
772 &[],
773 cx,
774 )
775 .unwrap();
776 });
777
778 buffer_a.read_with(cx, |buffer, _| {
779 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
780 assert_eq!(
781 chunks
782 .iter()
783 .map(|(s, d)| (s.as_str(), *d))
784 .collect::<Vec<_>>(),
785 &[
786 ("let ", None),
787 ("a", Some(DiagnosticSeverity::ERROR)),
788 (" = 1;", None),
789 ]
790 );
791 });
792 buffer_b.read_with(cx, |buffer, _| {
793 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
794 assert_eq!(
795 chunks
796 .iter()
797 .map(|(s, d)| (s.as_str(), *d))
798 .collect::<Vec<_>>(),
799 &[
800 ("let ", None),
801 ("b", Some(DiagnosticSeverity::WARNING)),
802 (" = 2;", None),
803 ]
804 );
805 });
806}
807
808#[gpui::test]
809async fn test_hidden_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
810 init_test(cx);
811
812 let fs = FakeFs::new(cx.background());
813 fs.insert_tree(
814 "/root",
815 json!({
816 "dir": {
817 "a.rs": "let a = 1;",
818 },
819 "other.rs": "let b = c;"
820 }),
821 )
822 .await;
823
824 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
825
826 let (worktree, _) = project
827 .update(cx, |project, cx| {
828 project.find_or_create_local_worktree("/root/other.rs", false, cx)
829 })
830 .await
831 .unwrap();
832 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
833
834 project.update(cx, |project, cx| {
835 project
836 .update_diagnostics(
837 LanguageServerId(0),
838 lsp::PublishDiagnosticsParams {
839 uri: Url::from_file_path("/root/other.rs").unwrap(),
840 version: None,
841 diagnostics: vec![lsp::Diagnostic {
842 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
843 severity: Some(lsp::DiagnosticSeverity::ERROR),
844 message: "unknown variable 'c'".to_string(),
845 ..Default::default()
846 }],
847 },
848 &[],
849 cx,
850 )
851 .unwrap();
852 });
853
854 let buffer = project
855 .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
856 .await
857 .unwrap();
858 buffer.read_with(cx, |buffer, _| {
859 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
860 assert_eq!(
861 chunks
862 .iter()
863 .map(|(s, d)| (s.as_str(), *d))
864 .collect::<Vec<_>>(),
865 &[
866 ("let b = ", None),
867 ("c", Some(DiagnosticSeverity::ERROR)),
868 (";", None),
869 ]
870 );
871 });
872
873 project.read_with(cx, |project, cx| {
874 assert_eq!(project.diagnostic_summaries(cx).next(), None);
875 assert_eq!(project.diagnostic_summary(cx).error_count, 0);
876 });
877}
878
879#[gpui::test]
880async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
881 init_test(cx);
882
883 let progress_token = "the-progress-token";
884 let mut language = Language::new(
885 LanguageConfig {
886 name: "Rust".into(),
887 path_suffixes: vec!["rs".to_string()],
888 ..Default::default()
889 },
890 Some(tree_sitter_rust::language()),
891 );
892 let mut fake_servers = language
893 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
894 disk_based_diagnostics_progress_token: Some(progress_token.into()),
895 disk_based_diagnostics_sources: vec!["disk".into()],
896 ..Default::default()
897 }))
898 .await;
899
900 let fs = FakeFs::new(cx.background());
901 fs.insert_tree(
902 "/dir",
903 json!({
904 "a.rs": "fn a() { A }",
905 "b.rs": "const y: i32 = 1",
906 }),
907 )
908 .await;
909
910 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
911 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
912 let worktree_id = project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
913
914 // Cause worktree to start the fake language server
915 let _buffer = project
916 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
917 .await
918 .unwrap();
919
920 let mut events = subscribe(&project, cx);
921
922 let fake_server = fake_servers.next().await.unwrap();
923 assert_eq!(
924 events.next().await.unwrap(),
925 Event::LanguageServerAdded(LanguageServerId(0)),
926 );
927
928 fake_server
929 .start_progress(format!("{}/0", progress_token))
930 .await;
931 assert_eq!(
932 events.next().await.unwrap(),
933 Event::DiskBasedDiagnosticsStarted {
934 language_server_id: LanguageServerId(0),
935 }
936 );
937
938 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
939 uri: Url::from_file_path("/dir/a.rs").unwrap(),
940 version: None,
941 diagnostics: vec![lsp::Diagnostic {
942 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
943 severity: Some(lsp::DiagnosticSeverity::ERROR),
944 message: "undefined variable 'A'".to_string(),
945 ..Default::default()
946 }],
947 });
948 assert_eq!(
949 events.next().await.unwrap(),
950 Event::DiagnosticsUpdated {
951 language_server_id: LanguageServerId(0),
952 path: (worktree_id, Path::new("a.rs")).into()
953 }
954 );
955
956 fake_server.end_progress(format!("{}/0", progress_token));
957 assert_eq!(
958 events.next().await.unwrap(),
959 Event::DiskBasedDiagnosticsFinished {
960 language_server_id: LanguageServerId(0)
961 }
962 );
963
964 let buffer = project
965 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
966 .await
967 .unwrap();
968
969 buffer.read_with(cx, |buffer, _| {
970 let snapshot = buffer.snapshot();
971 let diagnostics = snapshot
972 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
973 .collect::<Vec<_>>();
974 assert_eq!(
975 diagnostics,
976 &[DiagnosticEntry {
977 range: Point::new(0, 9)..Point::new(0, 10),
978 diagnostic: Diagnostic {
979 severity: lsp::DiagnosticSeverity::ERROR,
980 message: "undefined variable 'A'".to_string(),
981 group_id: 0,
982 is_primary: true,
983 ..Default::default()
984 }
985 }]
986 )
987 });
988
989 // Ensure publishing empty diagnostics twice only results in one update event.
990 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
991 uri: Url::from_file_path("/dir/a.rs").unwrap(),
992 version: None,
993 diagnostics: Default::default(),
994 });
995 assert_eq!(
996 events.next().await.unwrap(),
997 Event::DiagnosticsUpdated {
998 language_server_id: LanguageServerId(0),
999 path: (worktree_id, Path::new("a.rs")).into()
1000 }
1001 );
1002
1003 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1004 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1005 version: None,
1006 diagnostics: Default::default(),
1007 });
1008 cx.foreground().run_until_parked();
1009 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1010}
1011
1012#[gpui::test]
1013async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1014 init_test(cx);
1015
1016 let progress_token = "the-progress-token";
1017 let mut language = Language::new(
1018 LanguageConfig {
1019 path_suffixes: vec!["rs".to_string()],
1020 ..Default::default()
1021 },
1022 None,
1023 );
1024 let mut fake_servers = language
1025 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1026 disk_based_diagnostics_sources: vec!["disk".into()],
1027 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1028 ..Default::default()
1029 }))
1030 .await;
1031
1032 let fs = FakeFs::new(cx.background());
1033 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1034
1035 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1036 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1037
1038 let buffer = project
1039 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1040 .await
1041 .unwrap();
1042
1043 // Simulate diagnostics starting to update.
1044 let fake_server = fake_servers.next().await.unwrap();
1045 fake_server.start_progress(progress_token).await;
1046
1047 // Restart the server before the diagnostics finish updating.
1048 project.update(cx, |project, cx| {
1049 project.restart_language_servers_for_buffers([buffer], cx);
1050 });
1051 let mut events = subscribe(&project, cx);
1052
1053 // Simulate the newly started server sending more diagnostics.
1054 let fake_server = fake_servers.next().await.unwrap();
1055 assert_eq!(
1056 events.next().await.unwrap(),
1057 Event::LanguageServerAdded(LanguageServerId(1))
1058 );
1059 fake_server.start_progress(progress_token).await;
1060 assert_eq!(
1061 events.next().await.unwrap(),
1062 Event::DiskBasedDiagnosticsStarted {
1063 language_server_id: LanguageServerId(1)
1064 }
1065 );
1066 project.read_with(cx, |project, _| {
1067 assert_eq!(
1068 project
1069 .language_servers_running_disk_based_diagnostics()
1070 .collect::<Vec<_>>(),
1071 [LanguageServerId(1)]
1072 );
1073 });
1074
1075 // All diagnostics are considered done, despite the old server's diagnostic
1076 // task never completing.
1077 fake_server.end_progress(progress_token);
1078 assert_eq!(
1079 events.next().await.unwrap(),
1080 Event::DiskBasedDiagnosticsFinished {
1081 language_server_id: LanguageServerId(1)
1082 }
1083 );
1084 project.read_with(cx, |project, _| {
1085 assert_eq!(
1086 project
1087 .language_servers_running_disk_based_diagnostics()
1088 .collect::<Vec<_>>(),
1089 [LanguageServerId(0); 0]
1090 );
1091 });
1092}
1093
1094#[gpui::test]
1095async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1096 init_test(cx);
1097
1098 let mut language = Language::new(
1099 LanguageConfig {
1100 path_suffixes: vec!["rs".to_string()],
1101 ..Default::default()
1102 },
1103 None,
1104 );
1105 let mut fake_servers = language
1106 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1107 ..Default::default()
1108 }))
1109 .await;
1110
1111 let fs = FakeFs::new(cx.background());
1112 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1113
1114 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1115 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1116
1117 let buffer = project
1118 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1119 .await
1120 .unwrap();
1121
1122 // Publish diagnostics
1123 let fake_server = fake_servers.next().await.unwrap();
1124 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1125 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1126 version: None,
1127 diagnostics: vec![lsp::Diagnostic {
1128 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1129 severity: Some(lsp::DiagnosticSeverity::ERROR),
1130 message: "the message".to_string(),
1131 ..Default::default()
1132 }],
1133 });
1134
1135 cx.foreground().run_until_parked();
1136 buffer.read_with(cx, |buffer, _| {
1137 assert_eq!(
1138 buffer
1139 .snapshot()
1140 .diagnostics_in_range::<_, usize>(0..1, false)
1141 .map(|entry| entry.diagnostic.message.clone())
1142 .collect::<Vec<_>>(),
1143 ["the message".to_string()]
1144 );
1145 });
1146 project.read_with(cx, |project, cx| {
1147 assert_eq!(
1148 project.diagnostic_summary(cx),
1149 DiagnosticSummary {
1150 error_count: 1,
1151 warning_count: 0,
1152 }
1153 );
1154 });
1155
1156 project.update(cx, |project, cx| {
1157 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1158 });
1159
1160 // The diagnostics are cleared.
1161 cx.foreground().run_until_parked();
1162 buffer.read_with(cx, |buffer, _| {
1163 assert_eq!(
1164 buffer
1165 .snapshot()
1166 .diagnostics_in_range::<_, usize>(0..1, false)
1167 .map(|entry| entry.diagnostic.message.clone())
1168 .collect::<Vec<_>>(),
1169 Vec::<String>::new(),
1170 );
1171 });
1172 project.read_with(cx, |project, cx| {
1173 assert_eq!(
1174 project.diagnostic_summary(cx),
1175 DiagnosticSummary {
1176 error_count: 0,
1177 warning_count: 0,
1178 }
1179 );
1180 });
1181}
1182
1183#[gpui::test]
1184async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1185 init_test(cx);
1186
1187 let mut language = Language::new(
1188 LanguageConfig {
1189 path_suffixes: vec!["rs".to_string()],
1190 ..Default::default()
1191 },
1192 None,
1193 );
1194 let mut fake_servers = language
1195 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1196 name: "the-lsp",
1197 ..Default::default()
1198 }))
1199 .await;
1200
1201 let fs = FakeFs::new(cx.background());
1202 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1203
1204 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1205 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1206
1207 let buffer = project
1208 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1209 .await
1210 .unwrap();
1211
1212 // Before restarting the server, report diagnostics with an unknown buffer version.
1213 let fake_server = fake_servers.next().await.unwrap();
1214 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1215 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1216 version: Some(10000),
1217 diagnostics: Vec::new(),
1218 });
1219 cx.foreground().run_until_parked();
1220
1221 project.update(cx, |project, cx| {
1222 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1223 });
1224 let mut fake_server = fake_servers.next().await.unwrap();
1225 let notification = fake_server
1226 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1227 .await
1228 .text_document;
1229 assert_eq!(notification.version, 0);
1230}
1231
1232#[gpui::test]
1233async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1234 init_test(cx);
1235
1236 let mut rust = Language::new(
1237 LanguageConfig {
1238 name: Arc::from("Rust"),
1239 path_suffixes: vec!["rs".to_string()],
1240 ..Default::default()
1241 },
1242 None,
1243 );
1244 let mut fake_rust_servers = rust
1245 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1246 name: "rust-lsp",
1247 ..Default::default()
1248 }))
1249 .await;
1250 let mut js = Language::new(
1251 LanguageConfig {
1252 name: Arc::from("JavaScript"),
1253 path_suffixes: vec!["js".to_string()],
1254 ..Default::default()
1255 },
1256 None,
1257 );
1258 let mut fake_js_servers = js
1259 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1260 name: "js-lsp",
1261 ..Default::default()
1262 }))
1263 .await;
1264
1265 let fs = FakeFs::new(cx.background());
1266 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1267 .await;
1268
1269 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1270 project.update(cx, |project, _| {
1271 project.languages.add(Arc::new(rust));
1272 project.languages.add(Arc::new(js));
1273 });
1274
1275 let _rs_buffer = project
1276 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1277 .await
1278 .unwrap();
1279 let _js_buffer = project
1280 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
1281 .await
1282 .unwrap();
1283
1284 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1285 assert_eq!(
1286 fake_rust_server_1
1287 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1288 .await
1289 .text_document
1290 .uri
1291 .as_str(),
1292 "file:///dir/a.rs"
1293 );
1294
1295 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1296 assert_eq!(
1297 fake_js_server
1298 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1299 .await
1300 .text_document
1301 .uri
1302 .as_str(),
1303 "file:///dir/b.js"
1304 );
1305
1306 // Disable Rust language server, ensuring only that server gets stopped.
1307 cx.update(|cx| {
1308 cx.update_global(|settings: &mut SettingsStore, cx| {
1309 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1310 settings.languages.insert(
1311 Arc::from("Rust"),
1312 LanguageSettingsContent {
1313 enable_language_server: Some(false),
1314 ..Default::default()
1315 },
1316 );
1317 });
1318 })
1319 });
1320 fake_rust_server_1
1321 .receive_notification::<lsp::notification::Exit>()
1322 .await;
1323
1324 // Enable Rust and disable JavaScript language servers, ensuring that the
1325 // former gets started again and that the latter stops.
1326 cx.update(|cx| {
1327 cx.update_global(|settings: &mut SettingsStore, cx| {
1328 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1329 settings.languages.insert(
1330 Arc::from("Rust"),
1331 LanguageSettingsContent {
1332 enable_language_server: Some(true),
1333 ..Default::default()
1334 },
1335 );
1336 settings.languages.insert(
1337 Arc::from("JavaScript"),
1338 LanguageSettingsContent {
1339 enable_language_server: Some(false),
1340 ..Default::default()
1341 },
1342 );
1343 });
1344 })
1345 });
1346 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1347 assert_eq!(
1348 fake_rust_server_2
1349 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1350 .await
1351 .text_document
1352 .uri
1353 .as_str(),
1354 "file:///dir/a.rs"
1355 );
1356 fake_js_server
1357 .receive_notification::<lsp::notification::Exit>()
1358 .await;
1359}
1360
1361#[gpui::test(iterations = 3)]
1362async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1363 init_test(cx);
1364
1365 let mut language = Language::new(
1366 LanguageConfig {
1367 name: "Rust".into(),
1368 path_suffixes: vec!["rs".to_string()],
1369 ..Default::default()
1370 },
1371 Some(tree_sitter_rust::language()),
1372 );
1373 let mut fake_servers = language
1374 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1375 disk_based_diagnostics_sources: vec!["disk".into()],
1376 ..Default::default()
1377 }))
1378 .await;
1379
1380 let text = "
1381 fn a() { A }
1382 fn b() { BB }
1383 fn c() { CCC }
1384 "
1385 .unindent();
1386
1387 let fs = FakeFs::new(cx.background());
1388 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1389
1390 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1391 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1392
1393 let buffer = project
1394 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1395 .await
1396 .unwrap();
1397
1398 let mut fake_server = fake_servers.next().await.unwrap();
1399 let open_notification = fake_server
1400 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1401 .await;
1402
1403 // Edit the buffer, moving the content down
1404 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1405 let change_notification_1 = fake_server
1406 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1407 .await;
1408 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1409
1410 // Report some diagnostics for the initial version of the buffer
1411 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1412 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1413 version: Some(open_notification.text_document.version),
1414 diagnostics: vec![
1415 lsp::Diagnostic {
1416 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1417 severity: Some(DiagnosticSeverity::ERROR),
1418 message: "undefined variable 'A'".to_string(),
1419 source: Some("disk".to_string()),
1420 ..Default::default()
1421 },
1422 lsp::Diagnostic {
1423 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1424 severity: Some(DiagnosticSeverity::ERROR),
1425 message: "undefined variable 'BB'".to_string(),
1426 source: Some("disk".to_string()),
1427 ..Default::default()
1428 },
1429 lsp::Diagnostic {
1430 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1431 severity: Some(DiagnosticSeverity::ERROR),
1432 source: Some("disk".to_string()),
1433 message: "undefined variable 'CCC'".to_string(),
1434 ..Default::default()
1435 },
1436 ],
1437 });
1438
1439 // The diagnostics have moved down since they were created.
1440 buffer.next_notification(cx).await;
1441 cx.foreground().run_until_parked();
1442 buffer.read_with(cx, |buffer, _| {
1443 assert_eq!(
1444 buffer
1445 .snapshot()
1446 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1447 .collect::<Vec<_>>(),
1448 &[
1449 DiagnosticEntry {
1450 range: Point::new(3, 9)..Point::new(3, 11),
1451 diagnostic: Diagnostic {
1452 source: Some("disk".into()),
1453 severity: DiagnosticSeverity::ERROR,
1454 message: "undefined variable 'BB'".to_string(),
1455 is_disk_based: true,
1456 group_id: 1,
1457 is_primary: true,
1458 ..Default::default()
1459 },
1460 },
1461 DiagnosticEntry {
1462 range: Point::new(4, 9)..Point::new(4, 12),
1463 diagnostic: Diagnostic {
1464 source: Some("disk".into()),
1465 severity: DiagnosticSeverity::ERROR,
1466 message: "undefined variable 'CCC'".to_string(),
1467 is_disk_based: true,
1468 group_id: 2,
1469 is_primary: true,
1470 ..Default::default()
1471 }
1472 }
1473 ]
1474 );
1475 assert_eq!(
1476 chunks_with_diagnostics(buffer, 0..buffer.len()),
1477 [
1478 ("\n\nfn a() { ".to_string(), None),
1479 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1480 (" }\nfn b() { ".to_string(), None),
1481 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1482 (" }\nfn c() { ".to_string(), None),
1483 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1484 (" }\n".to_string(), None),
1485 ]
1486 );
1487 assert_eq!(
1488 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1489 [
1490 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1491 (" }\nfn c() { ".to_string(), None),
1492 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1493 ]
1494 );
1495 });
1496
1497 // Ensure overlapping diagnostics are highlighted correctly.
1498 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1499 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1500 version: Some(open_notification.text_document.version),
1501 diagnostics: vec![
1502 lsp::Diagnostic {
1503 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1504 severity: Some(DiagnosticSeverity::ERROR),
1505 message: "undefined variable 'A'".to_string(),
1506 source: Some("disk".to_string()),
1507 ..Default::default()
1508 },
1509 lsp::Diagnostic {
1510 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1511 severity: Some(DiagnosticSeverity::WARNING),
1512 message: "unreachable statement".to_string(),
1513 source: Some("disk".to_string()),
1514 ..Default::default()
1515 },
1516 ],
1517 });
1518
1519 buffer.next_notification(cx).await;
1520 cx.foreground().run_until_parked();
1521 buffer.read_with(cx, |buffer, _| {
1522 assert_eq!(
1523 buffer
1524 .snapshot()
1525 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1526 .collect::<Vec<_>>(),
1527 &[
1528 DiagnosticEntry {
1529 range: Point::new(2, 9)..Point::new(2, 12),
1530 diagnostic: Diagnostic {
1531 source: Some("disk".into()),
1532 severity: DiagnosticSeverity::WARNING,
1533 message: "unreachable statement".to_string(),
1534 is_disk_based: true,
1535 group_id: 4,
1536 is_primary: true,
1537 ..Default::default()
1538 }
1539 },
1540 DiagnosticEntry {
1541 range: Point::new(2, 9)..Point::new(2, 10),
1542 diagnostic: Diagnostic {
1543 source: Some("disk".into()),
1544 severity: DiagnosticSeverity::ERROR,
1545 message: "undefined variable 'A'".to_string(),
1546 is_disk_based: true,
1547 group_id: 3,
1548 is_primary: true,
1549 ..Default::default()
1550 },
1551 }
1552 ]
1553 );
1554 assert_eq!(
1555 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1556 [
1557 ("fn a() { ".to_string(), None),
1558 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1559 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1560 ("\n".to_string(), None),
1561 ]
1562 );
1563 assert_eq!(
1564 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1565 [
1566 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1567 ("\n".to_string(), None),
1568 ]
1569 );
1570 });
1571
1572 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1573 // changes since the last save.
1574 buffer.update(cx, |buffer, cx| {
1575 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1576 buffer.edit(
1577 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1578 None,
1579 cx,
1580 );
1581 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1582 });
1583 let change_notification_2 = fake_server
1584 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1585 .await;
1586 assert!(
1587 change_notification_2.text_document.version > change_notification_1.text_document.version
1588 );
1589
1590 // Handle out-of-order diagnostics
1591 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1592 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1593 version: Some(change_notification_2.text_document.version),
1594 diagnostics: vec![
1595 lsp::Diagnostic {
1596 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1597 severity: Some(DiagnosticSeverity::ERROR),
1598 message: "undefined variable 'BB'".to_string(),
1599 source: Some("disk".to_string()),
1600 ..Default::default()
1601 },
1602 lsp::Diagnostic {
1603 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1604 severity: Some(DiagnosticSeverity::WARNING),
1605 message: "undefined variable 'A'".to_string(),
1606 source: Some("disk".to_string()),
1607 ..Default::default()
1608 },
1609 ],
1610 });
1611
1612 buffer.next_notification(cx).await;
1613 cx.foreground().run_until_parked();
1614 buffer.read_with(cx, |buffer, _| {
1615 assert_eq!(
1616 buffer
1617 .snapshot()
1618 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1619 .collect::<Vec<_>>(),
1620 &[
1621 DiagnosticEntry {
1622 range: Point::new(2, 21)..Point::new(2, 22),
1623 diagnostic: Diagnostic {
1624 source: Some("disk".into()),
1625 severity: DiagnosticSeverity::WARNING,
1626 message: "undefined variable 'A'".to_string(),
1627 is_disk_based: true,
1628 group_id: 6,
1629 is_primary: true,
1630 ..Default::default()
1631 }
1632 },
1633 DiagnosticEntry {
1634 range: Point::new(3, 9)..Point::new(3, 14),
1635 diagnostic: Diagnostic {
1636 source: Some("disk".into()),
1637 severity: DiagnosticSeverity::ERROR,
1638 message: "undefined variable 'BB'".to_string(),
1639 is_disk_based: true,
1640 group_id: 5,
1641 is_primary: true,
1642 ..Default::default()
1643 },
1644 }
1645 ]
1646 );
1647 });
1648}
1649
1650#[gpui::test]
1651async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1652 init_test(cx);
1653
1654 let text = concat!(
1655 "let one = ;\n", //
1656 "let two = \n",
1657 "let three = 3;\n",
1658 );
1659
1660 let fs = FakeFs::new(cx.background());
1661 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1662
1663 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1664 let buffer = project
1665 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1666 .await
1667 .unwrap();
1668
1669 project.update(cx, |project, cx| {
1670 project
1671 .update_buffer_diagnostics(
1672 &buffer,
1673 LanguageServerId(0),
1674 None,
1675 vec![
1676 DiagnosticEntry {
1677 range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 10)),
1678 diagnostic: Diagnostic {
1679 severity: DiagnosticSeverity::ERROR,
1680 message: "syntax error 1".to_string(),
1681 ..Default::default()
1682 },
1683 },
1684 DiagnosticEntry {
1685 range: Unclipped(PointUtf16::new(1, 10))..Unclipped(PointUtf16::new(1, 10)),
1686 diagnostic: Diagnostic {
1687 severity: DiagnosticSeverity::ERROR,
1688 message: "syntax error 2".to_string(),
1689 ..Default::default()
1690 },
1691 },
1692 ],
1693 cx,
1694 )
1695 .unwrap();
1696 });
1697
1698 // An empty range is extended forward to include the following character.
1699 // At the end of a line, an empty range is extended backward to include
1700 // the preceding character.
1701 buffer.read_with(cx, |buffer, _| {
1702 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1703 assert_eq!(
1704 chunks
1705 .iter()
1706 .map(|(s, d)| (s.as_str(), *d))
1707 .collect::<Vec<_>>(),
1708 &[
1709 ("let one = ", None),
1710 (";", Some(DiagnosticSeverity::ERROR)),
1711 ("\nlet two =", None),
1712 (" ", Some(DiagnosticSeverity::ERROR)),
1713 ("\nlet three = 3;\n", None)
1714 ]
1715 );
1716 });
1717}
1718
1719#[gpui::test]
1720async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
1721 init_test(cx);
1722
1723 let fs = FakeFs::new(cx.background());
1724 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
1725 .await;
1726
1727 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1728
1729 project.update(cx, |project, cx| {
1730 project
1731 .update_diagnostic_entries(
1732 LanguageServerId(0),
1733 Path::new("/dir/a.rs").to_owned(),
1734 None,
1735 vec![DiagnosticEntry {
1736 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1737 diagnostic: Diagnostic {
1738 severity: DiagnosticSeverity::ERROR,
1739 is_primary: true,
1740 message: "syntax error a1".to_string(),
1741 ..Default::default()
1742 },
1743 }],
1744 cx,
1745 )
1746 .unwrap();
1747 project
1748 .update_diagnostic_entries(
1749 LanguageServerId(1),
1750 Path::new("/dir/a.rs").to_owned(),
1751 None,
1752 vec![DiagnosticEntry {
1753 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1754 diagnostic: Diagnostic {
1755 severity: DiagnosticSeverity::ERROR,
1756 is_primary: true,
1757 message: "syntax error b1".to_string(),
1758 ..Default::default()
1759 },
1760 }],
1761 cx,
1762 )
1763 .unwrap();
1764
1765 assert_eq!(
1766 project.diagnostic_summary(cx),
1767 DiagnosticSummary {
1768 error_count: 2,
1769 warning_count: 0,
1770 }
1771 );
1772 });
1773}
1774
1775#[gpui::test]
1776async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
1777 init_test(cx);
1778
1779 let mut language = Language::new(
1780 LanguageConfig {
1781 name: "Rust".into(),
1782 path_suffixes: vec!["rs".to_string()],
1783 ..Default::default()
1784 },
1785 Some(tree_sitter_rust::language()),
1786 );
1787 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
1788
1789 let text = "
1790 fn a() {
1791 f1();
1792 }
1793 fn b() {
1794 f2();
1795 }
1796 fn c() {
1797 f3();
1798 }
1799 "
1800 .unindent();
1801
1802 let fs = FakeFs::new(cx.background());
1803 fs.insert_tree(
1804 "/dir",
1805 json!({
1806 "a.rs": text.clone(),
1807 }),
1808 )
1809 .await;
1810
1811 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1812 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1813 let buffer = project
1814 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1815 .await
1816 .unwrap();
1817
1818 let mut fake_server = fake_servers.next().await.unwrap();
1819 let lsp_document_version = fake_server
1820 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1821 .await
1822 .text_document
1823 .version;
1824
1825 // Simulate editing the buffer after the language server computes some edits.
1826 buffer.update(cx, |buffer, cx| {
1827 buffer.edit(
1828 [(
1829 Point::new(0, 0)..Point::new(0, 0),
1830 "// above first function\n",
1831 )],
1832 None,
1833 cx,
1834 );
1835 buffer.edit(
1836 [(
1837 Point::new(2, 0)..Point::new(2, 0),
1838 " // inside first function\n",
1839 )],
1840 None,
1841 cx,
1842 );
1843 buffer.edit(
1844 [(
1845 Point::new(6, 4)..Point::new(6, 4),
1846 "// inside second function ",
1847 )],
1848 None,
1849 cx,
1850 );
1851
1852 assert_eq!(
1853 buffer.text(),
1854 "
1855 // above first function
1856 fn a() {
1857 // inside first function
1858 f1();
1859 }
1860 fn b() {
1861 // inside second function f2();
1862 }
1863 fn c() {
1864 f3();
1865 }
1866 "
1867 .unindent()
1868 );
1869 });
1870
1871 let edits = project
1872 .update(cx, |project, cx| {
1873 project.edits_from_lsp(
1874 &buffer,
1875 vec![
1876 // replace body of first function
1877 lsp::TextEdit {
1878 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
1879 new_text: "
1880 fn a() {
1881 f10();
1882 }
1883 "
1884 .unindent(),
1885 },
1886 // edit inside second function
1887 lsp::TextEdit {
1888 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
1889 new_text: "00".into(),
1890 },
1891 // edit inside third function via two distinct edits
1892 lsp::TextEdit {
1893 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
1894 new_text: "4000".into(),
1895 },
1896 lsp::TextEdit {
1897 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
1898 new_text: "".into(),
1899 },
1900 ],
1901 LanguageServerId(0),
1902 Some(lsp_document_version),
1903 cx,
1904 )
1905 })
1906 .await
1907 .unwrap();
1908
1909 buffer.update(cx, |buffer, cx| {
1910 for (range, new_text) in edits {
1911 buffer.edit([(range, new_text)], None, cx);
1912 }
1913 assert_eq!(
1914 buffer.text(),
1915 "
1916 // above first function
1917 fn a() {
1918 // inside first function
1919 f10();
1920 }
1921 fn b() {
1922 // inside second function f200();
1923 }
1924 fn c() {
1925 f4000();
1926 }
1927 "
1928 .unindent()
1929 );
1930 });
1931}
1932
1933#[gpui::test]
1934async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
1935 init_test(cx);
1936
1937 let text = "
1938 use a::b;
1939 use a::c;
1940
1941 fn f() {
1942 b();
1943 c();
1944 }
1945 "
1946 .unindent();
1947
1948 let fs = FakeFs::new(cx.background());
1949 fs.insert_tree(
1950 "/dir",
1951 json!({
1952 "a.rs": text.clone(),
1953 }),
1954 )
1955 .await;
1956
1957 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1958 let buffer = project
1959 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1960 .await
1961 .unwrap();
1962
1963 // Simulate the language server sending us a small edit in the form of a very large diff.
1964 // Rust-analyzer does this when performing a merge-imports code action.
1965 let edits = project
1966 .update(cx, |project, cx| {
1967 project.edits_from_lsp(
1968 &buffer,
1969 [
1970 // Replace the first use statement without editing the semicolon.
1971 lsp::TextEdit {
1972 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
1973 new_text: "a::{b, c}".into(),
1974 },
1975 // Reinsert the remainder of the file between the semicolon and the final
1976 // newline of the file.
1977 lsp::TextEdit {
1978 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1979 new_text: "\n\n".into(),
1980 },
1981 lsp::TextEdit {
1982 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1983 new_text: "
1984 fn f() {
1985 b();
1986 c();
1987 }"
1988 .unindent(),
1989 },
1990 // Delete everything after the first newline of the file.
1991 lsp::TextEdit {
1992 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
1993 new_text: "".into(),
1994 },
1995 ],
1996 LanguageServerId(0),
1997 None,
1998 cx,
1999 )
2000 })
2001 .await
2002 .unwrap();
2003
2004 buffer.update(cx, |buffer, cx| {
2005 let edits = edits
2006 .into_iter()
2007 .map(|(range, text)| {
2008 (
2009 range.start.to_point(buffer)..range.end.to_point(buffer),
2010 text,
2011 )
2012 })
2013 .collect::<Vec<_>>();
2014
2015 assert_eq!(
2016 edits,
2017 [
2018 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2019 (Point::new(1, 0)..Point::new(2, 0), "".into())
2020 ]
2021 );
2022
2023 for (range, new_text) in edits {
2024 buffer.edit([(range, new_text)], None, cx);
2025 }
2026 assert_eq!(
2027 buffer.text(),
2028 "
2029 use a::{b, c};
2030
2031 fn f() {
2032 b();
2033 c();
2034 }
2035 "
2036 .unindent()
2037 );
2038 });
2039}
2040
2041#[gpui::test]
2042async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
2043 init_test(cx);
2044
2045 let text = "
2046 use a::b;
2047 use a::c;
2048
2049 fn f() {
2050 b();
2051 c();
2052 }
2053 "
2054 .unindent();
2055
2056 let fs = FakeFs::new(cx.background());
2057 fs.insert_tree(
2058 "/dir",
2059 json!({
2060 "a.rs": text.clone(),
2061 }),
2062 )
2063 .await;
2064
2065 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2066 let buffer = project
2067 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2068 .await
2069 .unwrap();
2070
2071 // Simulate the language server sending us edits in a non-ordered fashion,
2072 // with ranges sometimes being inverted or pointing to invalid locations.
2073 let edits = project
2074 .update(cx, |project, cx| {
2075 project.edits_from_lsp(
2076 &buffer,
2077 [
2078 lsp::TextEdit {
2079 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2080 new_text: "\n\n".into(),
2081 },
2082 lsp::TextEdit {
2083 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2084 new_text: "a::{b, c}".into(),
2085 },
2086 lsp::TextEdit {
2087 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2088 new_text: "".into(),
2089 },
2090 lsp::TextEdit {
2091 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2092 new_text: "
2093 fn f() {
2094 b();
2095 c();
2096 }"
2097 .unindent(),
2098 },
2099 ],
2100 LanguageServerId(0),
2101 None,
2102 cx,
2103 )
2104 })
2105 .await
2106 .unwrap();
2107
2108 buffer.update(cx, |buffer, cx| {
2109 let edits = edits
2110 .into_iter()
2111 .map(|(range, text)| {
2112 (
2113 range.start.to_point(buffer)..range.end.to_point(buffer),
2114 text,
2115 )
2116 })
2117 .collect::<Vec<_>>();
2118
2119 assert_eq!(
2120 edits,
2121 [
2122 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2123 (Point::new(1, 0)..Point::new(2, 0), "".into())
2124 ]
2125 );
2126
2127 for (range, new_text) in edits {
2128 buffer.edit([(range, new_text)], None, cx);
2129 }
2130 assert_eq!(
2131 buffer.text(),
2132 "
2133 use a::{b, c};
2134
2135 fn f() {
2136 b();
2137 c();
2138 }
2139 "
2140 .unindent()
2141 );
2142 });
2143}
2144
2145fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2146 buffer: &Buffer,
2147 range: Range<T>,
2148) -> Vec<(String, Option<DiagnosticSeverity>)> {
2149 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2150 for chunk in buffer.snapshot().chunks(range, true) {
2151 if chunks.last().map_or(false, |prev_chunk| {
2152 prev_chunk.1 == chunk.diagnostic_severity
2153 }) {
2154 chunks.last_mut().unwrap().0.push_str(chunk.text);
2155 } else {
2156 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2157 }
2158 }
2159 chunks
2160}
2161
2162#[gpui::test(iterations = 10)]
2163async fn test_definition(cx: &mut gpui::TestAppContext) {
2164 init_test(cx);
2165
2166 let mut language = Language::new(
2167 LanguageConfig {
2168 name: "Rust".into(),
2169 path_suffixes: vec!["rs".to_string()],
2170 ..Default::default()
2171 },
2172 Some(tree_sitter_rust::language()),
2173 );
2174 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
2175
2176 let fs = FakeFs::new(cx.background());
2177 fs.insert_tree(
2178 "/dir",
2179 json!({
2180 "a.rs": "const fn a() { A }",
2181 "b.rs": "const y: i32 = crate::a()",
2182 }),
2183 )
2184 .await;
2185
2186 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2187 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2188
2189 let buffer = project
2190 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
2191 .await
2192 .unwrap();
2193
2194 let fake_server = fake_servers.next().await.unwrap();
2195 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2196 let params = params.text_document_position_params;
2197 assert_eq!(
2198 params.text_document.uri.to_file_path().unwrap(),
2199 Path::new("/dir/b.rs"),
2200 );
2201 assert_eq!(params.position, lsp::Position::new(0, 22));
2202
2203 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2204 lsp::Location::new(
2205 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
2206 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2207 ),
2208 )))
2209 });
2210
2211 let mut definitions = project
2212 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2213 .await
2214 .unwrap();
2215
2216 // Assert no new language server started
2217 cx.foreground().run_until_parked();
2218 assert!(fake_servers.try_next().is_err());
2219
2220 assert_eq!(definitions.len(), 1);
2221 let definition = definitions.pop().unwrap();
2222 cx.update(|cx| {
2223 let target_buffer = definition.target.buffer.read(cx);
2224 assert_eq!(
2225 target_buffer
2226 .file()
2227 .unwrap()
2228 .as_local()
2229 .unwrap()
2230 .abs_path(cx),
2231 Path::new("/dir/a.rs"),
2232 );
2233 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2234 assert_eq!(
2235 list_worktrees(&project, cx),
2236 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
2237 );
2238
2239 drop(definition);
2240 });
2241 cx.read(|cx| {
2242 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2243 });
2244
2245 fn list_worktrees<'a>(
2246 project: &'a ModelHandle<Project>,
2247 cx: &'a AppContext,
2248 ) -> Vec<(&'a Path, bool)> {
2249 project
2250 .read(cx)
2251 .worktrees(cx)
2252 .map(|worktree| {
2253 let worktree = worktree.read(cx);
2254 (
2255 worktree.as_local().unwrap().abs_path().as_ref(),
2256 worktree.is_visible(),
2257 )
2258 })
2259 .collect::<Vec<_>>()
2260 }
2261}
2262
2263#[gpui::test]
2264async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2265 init_test(cx);
2266
2267 let mut language = Language::new(
2268 LanguageConfig {
2269 name: "TypeScript".into(),
2270 path_suffixes: vec!["ts".to_string()],
2271 ..Default::default()
2272 },
2273 Some(tree_sitter_typescript::language_typescript()),
2274 );
2275 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
2276
2277 let fs = FakeFs::new(cx.background());
2278 fs.insert_tree(
2279 "/dir",
2280 json!({
2281 "a.ts": "",
2282 }),
2283 )
2284 .await;
2285
2286 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2287 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2288 let buffer = project
2289 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2290 .await
2291 .unwrap();
2292
2293 let fake_server = fake_language_servers.next().await.unwrap();
2294
2295 let text = "let a = b.fqn";
2296 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2297 let completions = project.update(cx, |project, cx| {
2298 project.completions(&buffer, text.len(), cx)
2299 });
2300
2301 fake_server
2302 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2303 Ok(Some(lsp::CompletionResponse::Array(vec![
2304 lsp::CompletionItem {
2305 label: "fullyQualifiedName?".into(),
2306 insert_text: Some("fullyQualifiedName".into()),
2307 ..Default::default()
2308 },
2309 ])))
2310 })
2311 .next()
2312 .await;
2313 let completions = completions.await.unwrap();
2314 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
2315 assert_eq!(completions.len(), 1);
2316 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2317 assert_eq!(
2318 completions[0].old_range.to_offset(&snapshot),
2319 text.len() - 3..text.len()
2320 );
2321
2322 let text = "let a = \"atoms/cmp\"";
2323 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2324 let completions = project.update(cx, |project, cx| {
2325 project.completions(&buffer, text.len() - 1, cx)
2326 });
2327
2328 fake_server
2329 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2330 Ok(Some(lsp::CompletionResponse::Array(vec![
2331 lsp::CompletionItem {
2332 label: "component".into(),
2333 ..Default::default()
2334 },
2335 ])))
2336 })
2337 .next()
2338 .await;
2339 let completions = completions.await.unwrap();
2340 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
2341 assert_eq!(completions.len(), 1);
2342 assert_eq!(completions[0].new_text, "component");
2343 assert_eq!(
2344 completions[0].old_range.to_offset(&snapshot),
2345 text.len() - 4..text.len() - 1
2346 );
2347}
2348
2349#[gpui::test]
2350async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2351 init_test(cx);
2352
2353 let mut language = Language::new(
2354 LanguageConfig {
2355 name: "TypeScript".into(),
2356 path_suffixes: vec!["ts".to_string()],
2357 ..Default::default()
2358 },
2359 Some(tree_sitter_typescript::language_typescript()),
2360 );
2361 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
2362
2363 let fs = FakeFs::new(cx.background());
2364 fs.insert_tree(
2365 "/dir",
2366 json!({
2367 "a.ts": "",
2368 }),
2369 )
2370 .await;
2371
2372 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2373 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2374 let buffer = project
2375 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2376 .await
2377 .unwrap();
2378
2379 let fake_server = fake_language_servers.next().await.unwrap();
2380
2381 let text = "let a = b.fqn";
2382 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2383 let completions = project.update(cx, |project, cx| {
2384 project.completions(&buffer, text.len(), cx)
2385 });
2386
2387 fake_server
2388 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2389 Ok(Some(lsp::CompletionResponse::Array(vec![
2390 lsp::CompletionItem {
2391 label: "fullyQualifiedName?".into(),
2392 insert_text: Some("fully\rQualified\r\nName".into()),
2393 ..Default::default()
2394 },
2395 ])))
2396 })
2397 .next()
2398 .await;
2399 let completions = completions.await.unwrap();
2400 assert_eq!(completions.len(), 1);
2401 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2402}
2403
2404#[gpui::test(iterations = 10)]
2405async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2406 init_test(cx);
2407
2408 let mut language = Language::new(
2409 LanguageConfig {
2410 name: "TypeScript".into(),
2411 path_suffixes: vec!["ts".to_string()],
2412 ..Default::default()
2413 },
2414 None,
2415 );
2416 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
2417
2418 let fs = FakeFs::new(cx.background());
2419 fs.insert_tree(
2420 "/dir",
2421 json!({
2422 "a.ts": "a",
2423 }),
2424 )
2425 .await;
2426
2427 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2428 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2429 let buffer = project
2430 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2431 .await
2432 .unwrap();
2433
2434 let fake_server = fake_language_servers.next().await.unwrap();
2435
2436 // Language server returns code actions that contain commands, and not edits.
2437 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
2438 fake_server
2439 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2440 Ok(Some(vec![
2441 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2442 title: "The code action".into(),
2443 command: Some(lsp::Command {
2444 title: "The command".into(),
2445 command: "_the/command".into(),
2446 arguments: Some(vec![json!("the-argument")]),
2447 }),
2448 ..Default::default()
2449 }),
2450 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2451 title: "two".into(),
2452 ..Default::default()
2453 }),
2454 ]))
2455 })
2456 .next()
2457 .await;
2458
2459 let action = actions.await.unwrap()[0].clone();
2460 let apply = project.update(cx, |project, cx| {
2461 project.apply_code_action(buffer.clone(), action, true, cx)
2462 });
2463
2464 // Resolving the code action does not populate its edits. In absence of
2465 // edits, we must execute the given command.
2466 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2467 |action, _| async move { Ok(action) },
2468 );
2469
2470 // While executing the command, the language server sends the editor
2471 // a `workspaceEdit` request.
2472 fake_server
2473 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2474 let fake = fake_server.clone();
2475 move |params, _| {
2476 assert_eq!(params.command, "_the/command");
2477 let fake = fake.clone();
2478 async move {
2479 fake.server
2480 .request::<lsp::request::ApplyWorkspaceEdit>(
2481 lsp::ApplyWorkspaceEditParams {
2482 label: None,
2483 edit: lsp::WorkspaceEdit {
2484 changes: Some(
2485 [(
2486 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2487 vec![lsp::TextEdit {
2488 range: lsp::Range::new(
2489 lsp::Position::new(0, 0),
2490 lsp::Position::new(0, 0),
2491 ),
2492 new_text: "X".into(),
2493 }],
2494 )]
2495 .into_iter()
2496 .collect(),
2497 ),
2498 ..Default::default()
2499 },
2500 },
2501 )
2502 .await
2503 .unwrap();
2504 Ok(Some(json!(null)))
2505 }
2506 }
2507 })
2508 .next()
2509 .await;
2510
2511 // Applying the code action returns a project transaction containing the edits
2512 // sent by the language server in its `workspaceEdit` request.
2513 let transaction = apply.await.unwrap();
2514 assert!(transaction.0.contains_key(&buffer));
2515 buffer.update(cx, |buffer, cx| {
2516 assert_eq!(buffer.text(), "Xa");
2517 buffer.undo(cx);
2518 assert_eq!(buffer.text(), "a");
2519 });
2520}
2521
2522#[gpui::test(iterations = 10)]
2523async fn test_save_file(cx: &mut gpui::TestAppContext) {
2524 init_test(cx);
2525
2526 let fs = FakeFs::new(cx.background());
2527 fs.insert_tree(
2528 "/dir",
2529 json!({
2530 "file1": "the old contents",
2531 }),
2532 )
2533 .await;
2534
2535 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2536 let buffer = project
2537 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2538 .await
2539 .unwrap();
2540 buffer.update(cx, |buffer, cx| {
2541 assert_eq!(buffer.text(), "the old contents");
2542 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2543 });
2544
2545 project
2546 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2547 .await
2548 .unwrap();
2549
2550 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2551 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2552}
2553
2554#[gpui::test]
2555async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2556 init_test(cx);
2557
2558 let fs = FakeFs::new(cx.background());
2559 fs.insert_tree(
2560 "/dir",
2561 json!({
2562 "file1": "the old contents",
2563 }),
2564 )
2565 .await;
2566
2567 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2568 let buffer = project
2569 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2570 .await
2571 .unwrap();
2572 buffer.update(cx, |buffer, cx| {
2573 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2574 });
2575
2576 project
2577 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2578 .await
2579 .unwrap();
2580
2581 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2582 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2583}
2584
2585#[gpui::test]
2586async fn test_save_as(cx: &mut gpui::TestAppContext) {
2587 init_test(cx);
2588
2589 let fs = FakeFs::new(cx.background());
2590 fs.insert_tree("/dir", json!({})).await;
2591
2592 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2593
2594 let languages = project.read_with(cx, |project, _| project.languages().clone());
2595 languages.register(
2596 "/some/path",
2597 LanguageConfig {
2598 name: "Rust".into(),
2599 path_suffixes: vec!["rs".into()],
2600 ..Default::default()
2601 },
2602 tree_sitter_rust::language(),
2603 vec![],
2604 |_| Default::default(),
2605 );
2606
2607 let buffer = project.update(cx, |project, cx| {
2608 project.create_buffer("", None, cx).unwrap()
2609 });
2610 buffer.update(cx, |buffer, cx| {
2611 buffer.edit([(0..0, "abc")], None, cx);
2612 assert!(buffer.is_dirty());
2613 assert!(!buffer.has_conflict());
2614 assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text");
2615 });
2616 project
2617 .update(cx, |project, cx| {
2618 project.save_buffer_as(buffer.clone(), "/dir/file1.rs".into(), cx)
2619 })
2620 .await
2621 .unwrap();
2622 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
2623
2624 cx.foreground().run_until_parked();
2625 buffer.read_with(cx, |buffer, cx| {
2626 assert_eq!(
2627 buffer.file().unwrap().full_path(cx),
2628 Path::new("dir/file1.rs")
2629 );
2630 assert!(!buffer.is_dirty());
2631 assert!(!buffer.has_conflict());
2632 assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust");
2633 });
2634
2635 let opened_buffer = project
2636 .update(cx, |project, cx| {
2637 project.open_local_buffer("/dir/file1.rs", cx)
2638 })
2639 .await
2640 .unwrap();
2641 assert_eq!(opened_buffer, buffer);
2642}
2643
2644#[gpui::test(retries = 5)]
2645async fn test_rescan_and_remote_updates(
2646 deterministic: Arc<Deterministic>,
2647 cx: &mut gpui::TestAppContext,
2648) {
2649 init_test(cx);
2650 cx.foreground().allow_parking();
2651
2652 let dir = temp_tree(json!({
2653 "a": {
2654 "file1": "",
2655 "file2": "",
2656 "file3": "",
2657 },
2658 "b": {
2659 "c": {
2660 "file4": "",
2661 "file5": "",
2662 }
2663 }
2664 }));
2665
2666 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
2667 let rpc = project.read_with(cx, |p, _| p.client.clone());
2668
2669 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2670 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
2671 async move { buffer.await.unwrap() }
2672 };
2673 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
2674 project.read_with(cx, |project, cx| {
2675 let tree = project.worktrees(cx).next().unwrap();
2676 tree.read(cx)
2677 .entry_for_path(path)
2678 .unwrap_or_else(|| panic!("no entry for path {}", path))
2679 .id
2680 })
2681 };
2682
2683 let buffer2 = buffer_for_path("a/file2", cx).await;
2684 let buffer3 = buffer_for_path("a/file3", cx).await;
2685 let buffer4 = buffer_for_path("b/c/file4", cx).await;
2686 let buffer5 = buffer_for_path("b/c/file5", cx).await;
2687
2688 let file2_id = id_for_path("a/file2", cx);
2689 let file3_id = id_for_path("a/file3", cx);
2690 let file4_id = id_for_path("b/c/file4", cx);
2691
2692 // Create a remote copy of this worktree.
2693 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2694
2695 let metadata = tree.read_with(cx, |tree, _| tree.as_local().unwrap().metadata_proto());
2696
2697 let updates = Arc::new(Mutex::new(Vec::new()));
2698 tree.update(cx, |tree, cx| {
2699 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
2700 let updates = updates.clone();
2701 move |update| {
2702 updates.lock().push(update);
2703 async { true }
2704 }
2705 });
2706 });
2707
2708 let remote = cx.update(|cx| Worktree::remote(1, 1, metadata, rpc.clone(), cx));
2709 deterministic.run_until_parked();
2710
2711 cx.read(|cx| {
2712 assert!(!buffer2.read(cx).is_dirty());
2713 assert!(!buffer3.read(cx).is_dirty());
2714 assert!(!buffer4.read(cx).is_dirty());
2715 assert!(!buffer5.read(cx).is_dirty());
2716 });
2717
2718 // Rename and delete files and directories.
2719 tree.flush_fs_events(cx).await;
2720 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
2721 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
2722 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
2723 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
2724 tree.flush_fs_events(cx).await;
2725
2726 let expected_paths = vec![
2727 "a",
2728 "a/file1",
2729 "a/file2.new",
2730 "b",
2731 "d",
2732 "d/file3",
2733 "d/file4",
2734 ];
2735
2736 cx.read(|app| {
2737 assert_eq!(
2738 tree.read(app)
2739 .paths()
2740 .map(|p| p.to_str().unwrap())
2741 .collect::<Vec<_>>(),
2742 expected_paths
2743 );
2744
2745 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
2746 assert_eq!(id_for_path("d/file3", cx), file3_id);
2747 assert_eq!(id_for_path("d/file4", cx), file4_id);
2748
2749 assert_eq!(
2750 buffer2.read(app).file().unwrap().path().as_ref(),
2751 Path::new("a/file2.new")
2752 );
2753 assert_eq!(
2754 buffer3.read(app).file().unwrap().path().as_ref(),
2755 Path::new("d/file3")
2756 );
2757 assert_eq!(
2758 buffer4.read(app).file().unwrap().path().as_ref(),
2759 Path::new("d/file4")
2760 );
2761 assert_eq!(
2762 buffer5.read(app).file().unwrap().path().as_ref(),
2763 Path::new("b/c/file5")
2764 );
2765
2766 assert!(!buffer2.read(app).file().unwrap().is_deleted());
2767 assert!(!buffer3.read(app).file().unwrap().is_deleted());
2768 assert!(!buffer4.read(app).file().unwrap().is_deleted());
2769 assert!(buffer5.read(app).file().unwrap().is_deleted());
2770 });
2771
2772 // Update the remote worktree. Check that it becomes consistent with the
2773 // local worktree.
2774 deterministic.run_until_parked();
2775 remote.update(cx, |remote, _| {
2776 for update in updates.lock().drain(..) {
2777 remote.as_remote_mut().unwrap().update_from_remote(update);
2778 }
2779 });
2780 deterministic.run_until_parked();
2781 remote.read_with(cx, |remote, _| {
2782 assert_eq!(
2783 remote
2784 .paths()
2785 .map(|p| p.to_str().unwrap())
2786 .collect::<Vec<_>>(),
2787 expected_paths
2788 );
2789 });
2790}
2791
2792#[gpui::test(iterations = 10)]
2793async fn test_buffer_identity_across_renames(
2794 deterministic: Arc<Deterministic>,
2795 cx: &mut gpui::TestAppContext,
2796) {
2797 init_test(cx);
2798
2799 let fs = FakeFs::new(cx.background());
2800 fs.insert_tree(
2801 "/dir",
2802 json!({
2803 "a": {
2804 "file1": "",
2805 }
2806 }),
2807 )
2808 .await;
2809
2810 let project = Project::test(fs, [Path::new("/dir")], cx).await;
2811 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2812 let tree_id = tree.read_with(cx, |tree, _| tree.id());
2813
2814 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
2815 project.read_with(cx, |project, cx| {
2816 let tree = project.worktrees(cx).next().unwrap();
2817 tree.read(cx)
2818 .entry_for_path(path)
2819 .unwrap_or_else(|| panic!("no entry for path {}", path))
2820 .id
2821 })
2822 };
2823
2824 let dir_id = id_for_path("a", cx);
2825 let file_id = id_for_path("a/file1", cx);
2826 let buffer = project
2827 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
2828 .await
2829 .unwrap();
2830 buffer.read_with(cx, |buffer, _| assert!(!buffer.is_dirty()));
2831
2832 project
2833 .update(cx, |project, cx| {
2834 project.rename_entry(dir_id, Path::new("b"), cx)
2835 })
2836 .unwrap()
2837 .await
2838 .unwrap();
2839 deterministic.run_until_parked();
2840 assert_eq!(id_for_path("b", cx), dir_id);
2841 assert_eq!(id_for_path("b/file1", cx), file_id);
2842 buffer.read_with(cx, |buffer, _| assert!(!buffer.is_dirty()));
2843}
2844
2845#[gpui::test]
2846async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
2847 init_test(cx);
2848
2849 let fs = FakeFs::new(cx.background());
2850 fs.insert_tree(
2851 "/dir",
2852 json!({
2853 "a.txt": "a-contents",
2854 "b.txt": "b-contents",
2855 }),
2856 )
2857 .await;
2858
2859 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2860
2861 // Spawn multiple tasks to open paths, repeating some paths.
2862 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
2863 (
2864 p.open_local_buffer("/dir/a.txt", cx),
2865 p.open_local_buffer("/dir/b.txt", cx),
2866 p.open_local_buffer("/dir/a.txt", cx),
2867 )
2868 });
2869
2870 let buffer_a_1 = buffer_a_1.await.unwrap();
2871 let buffer_a_2 = buffer_a_2.await.unwrap();
2872 let buffer_b = buffer_b.await.unwrap();
2873 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
2874 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
2875
2876 // There is only one buffer per path.
2877 let buffer_a_id = buffer_a_1.id();
2878 assert_eq!(buffer_a_2.id(), buffer_a_id);
2879
2880 // Open the same path again while it is still open.
2881 drop(buffer_a_1);
2882 let buffer_a_3 = project
2883 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
2884 .await
2885 .unwrap();
2886
2887 // There's still only one buffer per path.
2888 assert_eq!(buffer_a_3.id(), buffer_a_id);
2889}
2890
2891#[gpui::test]
2892async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
2893 init_test(cx);
2894
2895 let fs = FakeFs::new(cx.background());
2896 fs.insert_tree(
2897 "/dir",
2898 json!({
2899 "file1": "abc",
2900 "file2": "def",
2901 "file3": "ghi",
2902 }),
2903 )
2904 .await;
2905
2906 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2907
2908 let buffer1 = project
2909 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2910 .await
2911 .unwrap();
2912 let events = Rc::new(RefCell::new(Vec::new()));
2913
2914 // initially, the buffer isn't dirty.
2915 buffer1.update(cx, |buffer, cx| {
2916 cx.subscribe(&buffer1, {
2917 let events = events.clone();
2918 move |_, _, event, _| match event {
2919 BufferEvent::Operation(_) => {}
2920 _ => events.borrow_mut().push(event.clone()),
2921 }
2922 })
2923 .detach();
2924
2925 assert!(!buffer.is_dirty());
2926 assert!(events.borrow().is_empty());
2927
2928 buffer.edit([(1..2, "")], None, cx);
2929 });
2930
2931 // after the first edit, the buffer is dirty, and emits a dirtied event.
2932 buffer1.update(cx, |buffer, cx| {
2933 assert!(buffer.text() == "ac");
2934 assert!(buffer.is_dirty());
2935 assert_eq!(
2936 *events.borrow(),
2937 &[language::Event::Edited, language::Event::DirtyChanged]
2938 );
2939 events.borrow_mut().clear();
2940 buffer.did_save(
2941 buffer.version(),
2942 buffer.as_rope().fingerprint(),
2943 buffer.file().unwrap().mtime(),
2944 cx,
2945 );
2946 });
2947
2948 // after saving, the buffer is not dirty, and emits a saved event.
2949 buffer1.update(cx, |buffer, cx| {
2950 assert!(!buffer.is_dirty());
2951 assert_eq!(*events.borrow(), &[language::Event::Saved]);
2952 events.borrow_mut().clear();
2953
2954 buffer.edit([(1..1, "B")], None, cx);
2955 buffer.edit([(2..2, "D")], None, cx);
2956 });
2957
2958 // after editing again, the buffer is dirty, and emits another dirty event.
2959 buffer1.update(cx, |buffer, cx| {
2960 assert!(buffer.text() == "aBDc");
2961 assert!(buffer.is_dirty());
2962 assert_eq!(
2963 *events.borrow(),
2964 &[
2965 language::Event::Edited,
2966 language::Event::DirtyChanged,
2967 language::Event::Edited,
2968 ],
2969 );
2970 events.borrow_mut().clear();
2971
2972 // After restoring the buffer to its previously-saved state,
2973 // the buffer is not considered dirty anymore.
2974 buffer.edit([(1..3, "")], None, cx);
2975 assert!(buffer.text() == "ac");
2976 assert!(!buffer.is_dirty());
2977 });
2978
2979 assert_eq!(
2980 *events.borrow(),
2981 &[language::Event::Edited, language::Event::DirtyChanged]
2982 );
2983
2984 // When a file is deleted, the buffer is considered dirty.
2985 let events = Rc::new(RefCell::new(Vec::new()));
2986 let buffer2 = project
2987 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
2988 .await
2989 .unwrap();
2990 buffer2.update(cx, |_, cx| {
2991 cx.subscribe(&buffer2, {
2992 let events = events.clone();
2993 move |_, _, event, _| events.borrow_mut().push(event.clone())
2994 })
2995 .detach();
2996 });
2997
2998 fs.remove_file("/dir/file2".as_ref(), Default::default())
2999 .await
3000 .unwrap();
3001 cx.foreground().run_until_parked();
3002 buffer2.read_with(cx, |buffer, _| assert!(buffer.is_dirty()));
3003 assert_eq!(
3004 *events.borrow(),
3005 &[
3006 language::Event::DirtyChanged,
3007 language::Event::FileHandleChanged
3008 ]
3009 );
3010
3011 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3012 let events = Rc::new(RefCell::new(Vec::new()));
3013 let buffer3 = project
3014 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
3015 .await
3016 .unwrap();
3017 buffer3.update(cx, |_, cx| {
3018 cx.subscribe(&buffer3, {
3019 let events = events.clone();
3020 move |_, _, event, _| events.borrow_mut().push(event.clone())
3021 })
3022 .detach();
3023 });
3024
3025 buffer3.update(cx, |buffer, cx| {
3026 buffer.edit([(0..0, "x")], None, cx);
3027 });
3028 events.borrow_mut().clear();
3029 fs.remove_file("/dir/file3".as_ref(), Default::default())
3030 .await
3031 .unwrap();
3032 cx.foreground().run_until_parked();
3033 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
3034 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
3035}
3036
3037#[gpui::test]
3038async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3039 init_test(cx);
3040
3041 let initial_contents = "aaa\nbbbbb\nc\n";
3042 let fs = FakeFs::new(cx.background());
3043 fs.insert_tree(
3044 "/dir",
3045 json!({
3046 "the-file": initial_contents,
3047 }),
3048 )
3049 .await;
3050 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3051 let buffer = project
3052 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
3053 .await
3054 .unwrap();
3055
3056 let anchors = (0..3)
3057 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
3058 .collect::<Vec<_>>();
3059
3060 // Change the file on disk, adding two new lines of text, and removing
3061 // one line.
3062 buffer.read_with(cx, |buffer, _| {
3063 assert!(!buffer.is_dirty());
3064 assert!(!buffer.has_conflict());
3065 });
3066 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3067 fs.save(
3068 "/dir/the-file".as_ref(),
3069 &new_contents.into(),
3070 LineEnding::Unix,
3071 )
3072 .await
3073 .unwrap();
3074
3075 // Because the buffer was not modified, it is reloaded from disk. Its
3076 // contents are edited according to the diff between the old and new
3077 // file contents.
3078 cx.foreground().run_until_parked();
3079 buffer.update(cx, |buffer, _| {
3080 assert_eq!(buffer.text(), new_contents);
3081 assert!(!buffer.is_dirty());
3082 assert!(!buffer.has_conflict());
3083
3084 let anchor_positions = anchors
3085 .iter()
3086 .map(|anchor| anchor.to_point(&*buffer))
3087 .collect::<Vec<_>>();
3088 assert_eq!(
3089 anchor_positions,
3090 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
3091 );
3092 });
3093
3094 // Modify the buffer
3095 buffer.update(cx, |buffer, cx| {
3096 buffer.edit([(0..0, " ")], None, cx);
3097 assert!(buffer.is_dirty());
3098 assert!(!buffer.has_conflict());
3099 });
3100
3101 // Change the file on disk again, adding blank lines to the beginning.
3102 fs.save(
3103 "/dir/the-file".as_ref(),
3104 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3105 LineEnding::Unix,
3106 )
3107 .await
3108 .unwrap();
3109
3110 // Because the buffer is modified, it doesn't reload from disk, but is
3111 // marked as having a conflict.
3112 cx.foreground().run_until_parked();
3113 buffer.read_with(cx, |buffer, _| {
3114 assert!(buffer.has_conflict());
3115 });
3116}
3117
3118#[gpui::test]
3119async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3120 init_test(cx);
3121
3122 let fs = FakeFs::new(cx.background());
3123 fs.insert_tree(
3124 "/dir",
3125 json!({
3126 "file1": "a\nb\nc\n",
3127 "file2": "one\r\ntwo\r\nthree\r\n",
3128 }),
3129 )
3130 .await;
3131
3132 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3133 let buffer1 = project
3134 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3135 .await
3136 .unwrap();
3137 let buffer2 = project
3138 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3139 .await
3140 .unwrap();
3141
3142 buffer1.read_with(cx, |buffer, _| {
3143 assert_eq!(buffer.text(), "a\nb\nc\n");
3144 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3145 });
3146 buffer2.read_with(cx, |buffer, _| {
3147 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3148 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3149 });
3150
3151 // Change a file's line endings on disk from unix to windows. The buffer's
3152 // state updates correctly.
3153 fs.save(
3154 "/dir/file1".as_ref(),
3155 &"aaa\nb\nc\n".into(),
3156 LineEnding::Windows,
3157 )
3158 .await
3159 .unwrap();
3160 cx.foreground().run_until_parked();
3161 buffer1.read_with(cx, |buffer, _| {
3162 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3163 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3164 });
3165
3166 // Save a file with windows line endings. The file is written correctly.
3167 buffer2.update(cx, |buffer, cx| {
3168 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3169 });
3170 project
3171 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3172 .await
3173 .unwrap();
3174 assert_eq!(
3175 fs.load("/dir/file2".as_ref()).await.unwrap(),
3176 "one\r\ntwo\r\nthree\r\nfour\r\n",
3177 );
3178}
3179
3180#[gpui::test]
3181async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3182 init_test(cx);
3183
3184 let fs = FakeFs::new(cx.background());
3185 fs.insert_tree(
3186 "/the-dir",
3187 json!({
3188 "a.rs": "
3189 fn foo(mut v: Vec<usize>) {
3190 for x in &v {
3191 v.push(1);
3192 }
3193 }
3194 "
3195 .unindent(),
3196 }),
3197 )
3198 .await;
3199
3200 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3201 let buffer = project
3202 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3203 .await
3204 .unwrap();
3205
3206 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3207 let message = lsp::PublishDiagnosticsParams {
3208 uri: buffer_uri.clone(),
3209 diagnostics: vec![
3210 lsp::Diagnostic {
3211 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3212 severity: Some(DiagnosticSeverity::WARNING),
3213 message: "error 1".to_string(),
3214 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3215 location: lsp::Location {
3216 uri: buffer_uri.clone(),
3217 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3218 },
3219 message: "error 1 hint 1".to_string(),
3220 }]),
3221 ..Default::default()
3222 },
3223 lsp::Diagnostic {
3224 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3225 severity: Some(DiagnosticSeverity::HINT),
3226 message: "error 1 hint 1".to_string(),
3227 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3228 location: lsp::Location {
3229 uri: buffer_uri.clone(),
3230 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3231 },
3232 message: "original diagnostic".to_string(),
3233 }]),
3234 ..Default::default()
3235 },
3236 lsp::Diagnostic {
3237 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3238 severity: Some(DiagnosticSeverity::ERROR),
3239 message: "error 2".to_string(),
3240 related_information: Some(vec![
3241 lsp::DiagnosticRelatedInformation {
3242 location: lsp::Location {
3243 uri: buffer_uri.clone(),
3244 range: lsp::Range::new(
3245 lsp::Position::new(1, 13),
3246 lsp::Position::new(1, 15),
3247 ),
3248 },
3249 message: "error 2 hint 1".to_string(),
3250 },
3251 lsp::DiagnosticRelatedInformation {
3252 location: lsp::Location {
3253 uri: buffer_uri.clone(),
3254 range: lsp::Range::new(
3255 lsp::Position::new(1, 13),
3256 lsp::Position::new(1, 15),
3257 ),
3258 },
3259 message: "error 2 hint 2".to_string(),
3260 },
3261 ]),
3262 ..Default::default()
3263 },
3264 lsp::Diagnostic {
3265 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3266 severity: Some(DiagnosticSeverity::HINT),
3267 message: "error 2 hint 1".to_string(),
3268 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3269 location: lsp::Location {
3270 uri: buffer_uri.clone(),
3271 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3272 },
3273 message: "original diagnostic".to_string(),
3274 }]),
3275 ..Default::default()
3276 },
3277 lsp::Diagnostic {
3278 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3279 severity: Some(DiagnosticSeverity::HINT),
3280 message: "error 2 hint 2".to_string(),
3281 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3282 location: lsp::Location {
3283 uri: buffer_uri,
3284 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3285 },
3286 message: "original diagnostic".to_string(),
3287 }]),
3288 ..Default::default()
3289 },
3290 ],
3291 version: None,
3292 };
3293
3294 project
3295 .update(cx, |p, cx| {
3296 p.update_diagnostics(LanguageServerId(0), message, &[], cx)
3297 })
3298 .unwrap();
3299 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
3300
3301 assert_eq!(
3302 buffer
3303 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3304 .collect::<Vec<_>>(),
3305 &[
3306 DiagnosticEntry {
3307 range: Point::new(1, 8)..Point::new(1, 9),
3308 diagnostic: Diagnostic {
3309 severity: DiagnosticSeverity::WARNING,
3310 message: "error 1".to_string(),
3311 group_id: 1,
3312 is_primary: true,
3313 ..Default::default()
3314 }
3315 },
3316 DiagnosticEntry {
3317 range: Point::new(1, 8)..Point::new(1, 9),
3318 diagnostic: Diagnostic {
3319 severity: DiagnosticSeverity::HINT,
3320 message: "error 1 hint 1".to_string(),
3321 group_id: 1,
3322 is_primary: false,
3323 ..Default::default()
3324 }
3325 },
3326 DiagnosticEntry {
3327 range: Point::new(1, 13)..Point::new(1, 15),
3328 diagnostic: Diagnostic {
3329 severity: DiagnosticSeverity::HINT,
3330 message: "error 2 hint 1".to_string(),
3331 group_id: 0,
3332 is_primary: false,
3333 ..Default::default()
3334 }
3335 },
3336 DiagnosticEntry {
3337 range: Point::new(1, 13)..Point::new(1, 15),
3338 diagnostic: Diagnostic {
3339 severity: DiagnosticSeverity::HINT,
3340 message: "error 2 hint 2".to_string(),
3341 group_id: 0,
3342 is_primary: false,
3343 ..Default::default()
3344 }
3345 },
3346 DiagnosticEntry {
3347 range: Point::new(2, 8)..Point::new(2, 17),
3348 diagnostic: Diagnostic {
3349 severity: DiagnosticSeverity::ERROR,
3350 message: "error 2".to_string(),
3351 group_id: 0,
3352 is_primary: true,
3353 ..Default::default()
3354 }
3355 }
3356 ]
3357 );
3358
3359 assert_eq!(
3360 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3361 &[
3362 DiagnosticEntry {
3363 range: Point::new(1, 13)..Point::new(1, 15),
3364 diagnostic: Diagnostic {
3365 severity: DiagnosticSeverity::HINT,
3366 message: "error 2 hint 1".to_string(),
3367 group_id: 0,
3368 is_primary: false,
3369 ..Default::default()
3370 }
3371 },
3372 DiagnosticEntry {
3373 range: Point::new(1, 13)..Point::new(1, 15),
3374 diagnostic: Diagnostic {
3375 severity: DiagnosticSeverity::HINT,
3376 message: "error 2 hint 2".to_string(),
3377 group_id: 0,
3378 is_primary: false,
3379 ..Default::default()
3380 }
3381 },
3382 DiagnosticEntry {
3383 range: Point::new(2, 8)..Point::new(2, 17),
3384 diagnostic: Diagnostic {
3385 severity: DiagnosticSeverity::ERROR,
3386 message: "error 2".to_string(),
3387 group_id: 0,
3388 is_primary: true,
3389 ..Default::default()
3390 }
3391 }
3392 ]
3393 );
3394
3395 assert_eq!(
3396 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3397 &[
3398 DiagnosticEntry {
3399 range: Point::new(1, 8)..Point::new(1, 9),
3400 diagnostic: Diagnostic {
3401 severity: DiagnosticSeverity::WARNING,
3402 message: "error 1".to_string(),
3403 group_id: 1,
3404 is_primary: true,
3405 ..Default::default()
3406 }
3407 },
3408 DiagnosticEntry {
3409 range: Point::new(1, 8)..Point::new(1, 9),
3410 diagnostic: Diagnostic {
3411 severity: DiagnosticSeverity::HINT,
3412 message: "error 1 hint 1".to_string(),
3413 group_id: 1,
3414 is_primary: false,
3415 ..Default::default()
3416 }
3417 },
3418 ]
3419 );
3420}
3421
3422#[gpui::test]
3423async fn test_rename(cx: &mut gpui::TestAppContext) {
3424 init_test(cx);
3425
3426 let mut language = Language::new(
3427 LanguageConfig {
3428 name: "Rust".into(),
3429 path_suffixes: vec!["rs".to_string()],
3430 ..Default::default()
3431 },
3432 Some(tree_sitter_rust::language()),
3433 );
3434 let mut fake_servers = language
3435 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
3436 capabilities: lsp::ServerCapabilities {
3437 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
3438 prepare_provider: Some(true),
3439 work_done_progress_options: Default::default(),
3440 })),
3441 ..Default::default()
3442 },
3443 ..Default::default()
3444 }))
3445 .await;
3446
3447 let fs = FakeFs::new(cx.background());
3448 fs.insert_tree(
3449 "/dir",
3450 json!({
3451 "one.rs": "const ONE: usize = 1;",
3452 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3453 }),
3454 )
3455 .await;
3456
3457 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3458 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
3459 let buffer = project
3460 .update(cx, |project, cx| {
3461 project.open_local_buffer("/dir/one.rs", cx)
3462 })
3463 .await
3464 .unwrap();
3465
3466 let fake_server = fake_servers.next().await.unwrap();
3467
3468 let response = project.update(cx, |project, cx| {
3469 project.prepare_rename(buffer.clone(), 7, cx)
3470 });
3471 fake_server
3472 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
3473 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3474 assert_eq!(params.position, lsp::Position::new(0, 7));
3475 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
3476 lsp::Position::new(0, 6),
3477 lsp::Position::new(0, 9),
3478 ))))
3479 })
3480 .next()
3481 .await
3482 .unwrap();
3483 let range = response.await.unwrap().unwrap();
3484 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
3485 assert_eq!(range, 6..9);
3486
3487 let response = project.update(cx, |project, cx| {
3488 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
3489 });
3490 fake_server
3491 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
3492 assert_eq!(
3493 params.text_document_position.text_document.uri.as_str(),
3494 "file:///dir/one.rs"
3495 );
3496 assert_eq!(
3497 params.text_document_position.position,
3498 lsp::Position::new(0, 7)
3499 );
3500 assert_eq!(params.new_name, "THREE");
3501 Ok(Some(lsp::WorkspaceEdit {
3502 changes: Some(
3503 [
3504 (
3505 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
3506 vec![lsp::TextEdit::new(
3507 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
3508 "THREE".to_string(),
3509 )],
3510 ),
3511 (
3512 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
3513 vec![
3514 lsp::TextEdit::new(
3515 lsp::Range::new(
3516 lsp::Position::new(0, 24),
3517 lsp::Position::new(0, 27),
3518 ),
3519 "THREE".to_string(),
3520 ),
3521 lsp::TextEdit::new(
3522 lsp::Range::new(
3523 lsp::Position::new(0, 35),
3524 lsp::Position::new(0, 38),
3525 ),
3526 "THREE".to_string(),
3527 ),
3528 ],
3529 ),
3530 ]
3531 .into_iter()
3532 .collect(),
3533 ),
3534 ..Default::default()
3535 }))
3536 })
3537 .next()
3538 .await
3539 .unwrap();
3540 let mut transaction = response.await.unwrap().0;
3541 assert_eq!(transaction.len(), 2);
3542 assert_eq!(
3543 transaction
3544 .remove_entry(&buffer)
3545 .unwrap()
3546 .0
3547 .read_with(cx, |buffer, _| buffer.text()),
3548 "const THREE: usize = 1;"
3549 );
3550 assert_eq!(
3551 transaction
3552 .into_keys()
3553 .next()
3554 .unwrap()
3555 .read_with(cx, |buffer, _| buffer.text()),
3556 "const TWO: usize = one::THREE + one::THREE;"
3557 );
3558}
3559
3560#[gpui::test]
3561async fn test_search(cx: &mut gpui::TestAppContext) {
3562 init_test(cx);
3563
3564 let fs = FakeFs::new(cx.background());
3565 fs.insert_tree(
3566 "/dir",
3567 json!({
3568 "one.rs": "const ONE: usize = 1;",
3569 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3570 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3571 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3572 }),
3573 )
3574 .await;
3575 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3576 assert_eq!(
3577 search(
3578 &project,
3579 SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()),
3580 cx
3581 )
3582 .await
3583 .unwrap(),
3584 HashMap::from_iter([
3585 ("two.rs".to_string(), vec![6..9]),
3586 ("three.rs".to_string(), vec![37..40])
3587 ])
3588 );
3589
3590 let buffer_4 = project
3591 .update(cx, |project, cx| {
3592 project.open_local_buffer("/dir/four.rs", cx)
3593 })
3594 .await
3595 .unwrap();
3596 buffer_4.update(cx, |buffer, cx| {
3597 let text = "two::TWO";
3598 buffer.edit([(20..28, text), (31..43, text)], None, cx);
3599 });
3600
3601 assert_eq!(
3602 search(
3603 &project,
3604 SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()),
3605 cx
3606 )
3607 .await
3608 .unwrap(),
3609 HashMap::from_iter([
3610 ("two.rs".to_string(), vec![6..9]),
3611 ("three.rs".to_string(), vec![37..40]),
3612 ("four.rs".to_string(), vec![25..28, 36..39])
3613 ])
3614 );
3615}
3616
3617#[gpui::test]
3618async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
3619 init_test(cx);
3620
3621 let search_query = "file";
3622
3623 let fs = FakeFs::new(cx.background());
3624 fs.insert_tree(
3625 "/dir",
3626 json!({
3627 "one.rs": r#"// Rust file one"#,
3628 "one.ts": r#"// TypeScript file one"#,
3629 "two.rs": r#"// Rust file two"#,
3630 "two.ts": r#"// TypeScript file two"#,
3631 }),
3632 )
3633 .await;
3634 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3635
3636 assert!(
3637 search(
3638 &project,
3639 SearchQuery::text(
3640 search_query,
3641 false,
3642 true,
3643 vec![PathMatcher::new("*.odd").unwrap()],
3644 Vec::new()
3645 ),
3646 cx
3647 )
3648 .await
3649 .unwrap()
3650 .is_empty(),
3651 "If no inclusions match, no files should be returned"
3652 );
3653
3654 assert_eq!(
3655 search(
3656 &project,
3657 SearchQuery::text(
3658 search_query,
3659 false,
3660 true,
3661 vec![PathMatcher::new("*.rs").unwrap()],
3662 Vec::new()
3663 ),
3664 cx
3665 )
3666 .await
3667 .unwrap(),
3668 HashMap::from_iter([
3669 ("one.rs".to_string(), vec![8..12]),
3670 ("two.rs".to_string(), vec![8..12]),
3671 ]),
3672 "Rust only search should give only Rust files"
3673 );
3674
3675 assert_eq!(
3676 search(
3677 &project,
3678 SearchQuery::text(
3679 search_query,
3680 false,
3681 true,
3682 vec![
3683 PathMatcher::new("*.ts").unwrap(),
3684 PathMatcher::new("*.odd").unwrap(),
3685 ],
3686 Vec::new()
3687 ),
3688 cx
3689 )
3690 .await
3691 .unwrap(),
3692 HashMap::from_iter([
3693 ("one.ts".to_string(), vec![14..18]),
3694 ("two.ts".to_string(), vec![14..18]),
3695 ]),
3696 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
3697 );
3698
3699 assert_eq!(
3700 search(
3701 &project,
3702 SearchQuery::text(
3703 search_query,
3704 false,
3705 true,
3706 vec![
3707 PathMatcher::new("*.rs").unwrap(),
3708 PathMatcher::new("*.ts").unwrap(),
3709 PathMatcher::new("*.odd").unwrap(),
3710 ],
3711 Vec::new()
3712 ),
3713 cx
3714 )
3715 .await
3716 .unwrap(),
3717 HashMap::from_iter([
3718 ("one.rs".to_string(), vec![8..12]),
3719 ("one.ts".to_string(), vec![14..18]),
3720 ("two.rs".to_string(), vec![8..12]),
3721 ("two.ts".to_string(), vec![14..18]),
3722 ]),
3723 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
3724 );
3725}
3726
3727#[gpui::test]
3728async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
3729 init_test(cx);
3730
3731 let search_query = "file";
3732
3733 let fs = FakeFs::new(cx.background());
3734 fs.insert_tree(
3735 "/dir",
3736 json!({
3737 "one.rs": r#"// Rust file one"#,
3738 "one.ts": r#"// TypeScript file one"#,
3739 "two.rs": r#"// Rust file two"#,
3740 "two.ts": r#"// TypeScript file two"#,
3741 }),
3742 )
3743 .await;
3744 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3745
3746 assert_eq!(
3747 search(
3748 &project,
3749 SearchQuery::text(
3750 search_query,
3751 false,
3752 true,
3753 Vec::new(),
3754 vec![PathMatcher::new("*.odd").unwrap()],
3755 ),
3756 cx
3757 )
3758 .await
3759 .unwrap(),
3760 HashMap::from_iter([
3761 ("one.rs".to_string(), vec![8..12]),
3762 ("one.ts".to_string(), vec![14..18]),
3763 ("two.rs".to_string(), vec![8..12]),
3764 ("two.ts".to_string(), vec![14..18]),
3765 ]),
3766 "If no exclusions match, all files should be returned"
3767 );
3768
3769 assert_eq!(
3770 search(
3771 &project,
3772 SearchQuery::text(
3773 search_query,
3774 false,
3775 true,
3776 Vec::new(),
3777 vec![PathMatcher::new("*.rs").unwrap()],
3778 ),
3779 cx
3780 )
3781 .await
3782 .unwrap(),
3783 HashMap::from_iter([
3784 ("one.ts".to_string(), vec![14..18]),
3785 ("two.ts".to_string(), vec![14..18]),
3786 ]),
3787 "Rust exclusion search should give only TypeScript files"
3788 );
3789
3790 assert_eq!(
3791 search(
3792 &project,
3793 SearchQuery::text(
3794 search_query,
3795 false,
3796 true,
3797 Vec::new(),
3798 vec![
3799 PathMatcher::new("*.ts").unwrap(),
3800 PathMatcher::new("*.odd").unwrap(),
3801 ],
3802 ),
3803 cx
3804 )
3805 .await
3806 .unwrap(),
3807 HashMap::from_iter([
3808 ("one.rs".to_string(), vec![8..12]),
3809 ("two.rs".to_string(), vec![8..12]),
3810 ]),
3811 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
3812 );
3813
3814 assert!(
3815 search(
3816 &project,
3817 SearchQuery::text(
3818 search_query,
3819 false,
3820 true,
3821 Vec::new(),
3822 vec![
3823 PathMatcher::new("*.rs").unwrap(),
3824 PathMatcher::new("*.ts").unwrap(),
3825 PathMatcher::new("*.odd").unwrap(),
3826 ],
3827 ),
3828 cx
3829 )
3830 .await
3831 .unwrap().is_empty(),
3832 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
3833 );
3834}
3835
3836#[gpui::test]
3837async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
3838 init_test(cx);
3839
3840 let search_query = "file";
3841
3842 let fs = FakeFs::new(cx.background());
3843 fs.insert_tree(
3844 "/dir",
3845 json!({
3846 "one.rs": r#"// Rust file one"#,
3847 "one.ts": r#"// TypeScript file one"#,
3848 "two.rs": r#"// Rust file two"#,
3849 "two.ts": r#"// TypeScript file two"#,
3850 }),
3851 )
3852 .await;
3853 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3854
3855 assert!(
3856 search(
3857 &project,
3858 SearchQuery::text(
3859 search_query,
3860 false,
3861 true,
3862 vec![PathMatcher::new("*.odd").unwrap()],
3863 vec![PathMatcher::new("*.odd").unwrap()],
3864 ),
3865 cx
3866 )
3867 .await
3868 .unwrap()
3869 .is_empty(),
3870 "If both no exclusions and inclusions match, exclusions should win and return nothing"
3871 );
3872
3873 assert!(
3874 search(
3875 &project,
3876 SearchQuery::text(
3877 search_query,
3878 false,
3879 true,
3880 vec![PathMatcher::new("*.ts").unwrap()],
3881 vec![PathMatcher::new("*.ts").unwrap()],
3882 ),
3883 cx
3884 )
3885 .await
3886 .unwrap()
3887 .is_empty(),
3888 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
3889 );
3890
3891 assert!(
3892 search(
3893 &project,
3894 SearchQuery::text(
3895 search_query,
3896 false,
3897 true,
3898 vec![
3899 PathMatcher::new("*.ts").unwrap(),
3900 PathMatcher::new("*.odd").unwrap()
3901 ],
3902 vec![
3903 PathMatcher::new("*.ts").unwrap(),
3904 PathMatcher::new("*.odd").unwrap()
3905 ],
3906 ),
3907 cx
3908 )
3909 .await
3910 .unwrap()
3911 .is_empty(),
3912 "Non-matching inclusions and exclusions should not change that."
3913 );
3914
3915 assert_eq!(
3916 search(
3917 &project,
3918 SearchQuery::text(
3919 search_query,
3920 false,
3921 true,
3922 vec![
3923 PathMatcher::new("*.ts").unwrap(),
3924 PathMatcher::new("*.odd").unwrap()
3925 ],
3926 vec![
3927 PathMatcher::new("*.rs").unwrap(),
3928 PathMatcher::new("*.odd").unwrap()
3929 ],
3930 ),
3931 cx
3932 )
3933 .await
3934 .unwrap(),
3935 HashMap::from_iter([
3936 ("one.ts".to_string(), vec![14..18]),
3937 ("two.ts".to_string(), vec![14..18]),
3938 ]),
3939 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
3940 );
3941}
3942
3943#[test]
3944fn test_glob_literal_prefix() {
3945 assert_eq!(glob_literal_prefix("**/*.js"), "");
3946 assert_eq!(glob_literal_prefix("node_modules/**/*.js"), "node_modules");
3947 assert_eq!(glob_literal_prefix("foo/{bar,baz}.js"), "foo");
3948 assert_eq!(glob_literal_prefix("foo/bar/baz.js"), "foo/bar/baz.js");
3949}
3950
3951async fn search(
3952 project: &ModelHandle<Project>,
3953 query: SearchQuery,
3954 cx: &mut gpui::TestAppContext,
3955) -> Result<HashMap<String, Vec<Range<usize>>>> {
3956 let results = project
3957 .update(cx, |project, cx| project.search(query, cx))
3958 .await?;
3959
3960 Ok(results
3961 .into_iter()
3962 .map(|(buffer, ranges)| {
3963 buffer.read_with(cx, |buffer, _| {
3964 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
3965 let ranges = ranges
3966 .into_iter()
3967 .map(|range| range.to_offset(buffer))
3968 .collect::<Vec<_>>();
3969 (path, ranges)
3970 })
3971 })
3972 .collect())
3973}
3974
3975fn init_test(cx: &mut gpui::TestAppContext) {
3976 cx.foreground().forbid_parking();
3977
3978 cx.update(|cx| {
3979 cx.set_global(SettingsStore::test(cx));
3980 language::init(cx);
3981 Project::init_settings(cx);
3982 });
3983}