1use crate::{search::PathMatcher, worktree::WorktreeModelHandle, Event, *};
2use fs::{FakeFs, RealFs};
3use futures::{future, StreamExt};
4use gpui::{executor::Deterministic, test::subscribe, AppContext};
5use language::{
6 language_settings::{AllLanguageSettings, LanguageSettingsContent},
7 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
8 LineEnding, OffsetRangeExt, Point, ToPoint,
9};
10use lsp::Url;
11use parking_lot::Mutex;
12use pretty_assertions::assert_eq;
13use serde_json::json;
14use std::{cell::RefCell, os::unix, rc::Rc, task::Poll};
15use unindent::Unindent as _;
16use util::{assert_set_eq, test::temp_tree};
17
18#[cfg(test)]
19#[ctor::ctor]
20fn init_logger() {
21 if std::env::var("RUST_LOG").is_ok() {
22 env_logger::init();
23 }
24}
25
26#[gpui::test]
27async fn test_symlinks(cx: &mut gpui::TestAppContext) {
28 init_test(cx);
29 cx.foreground().allow_parking();
30
31 let dir = temp_tree(json!({
32 "root": {
33 "apple": "",
34 "banana": {
35 "carrot": {
36 "date": "",
37 "endive": "",
38 }
39 },
40 "fennel": {
41 "grape": "",
42 }
43 }
44 }));
45
46 let root_link_path = dir.path().join("root_link");
47 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
48 unix::fs::symlink(
49 &dir.path().join("root/fennel"),
50 &dir.path().join("root/finnochio"),
51 )
52 .unwrap();
53
54 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
55 project.read_with(cx, |project, cx| {
56 let tree = project.worktrees(cx).next().unwrap().read(cx);
57 assert_eq!(tree.file_count(), 5);
58 assert_eq!(
59 tree.inode_for_path("fennel/grape"),
60 tree.inode_for_path("finnochio/grape")
61 );
62 });
63}
64
65#[gpui::test]
66async fn test_managing_project_specific_settings(
67 deterministic: Arc<Deterministic>,
68 cx: &mut gpui::TestAppContext,
69) {
70 init_test(cx);
71
72 let fs = FakeFs::new(cx.background());
73 fs.insert_tree(
74 "/the-root",
75 json!({
76 ".zed": {
77 "settings.json": r#"{ "tab_size": 8 }"#
78 },
79 "a": {
80 "a.rs": "fn a() {\n A\n}"
81 },
82 "b": {
83 ".zed": {
84 "settings.json": r#"{ "tab_size": 2 }"#
85 },
86 "b.rs": "fn b() {\n B\n}"
87 }
88 }),
89 )
90 .await;
91
92 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
93 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
94
95 deterministic.run_until_parked();
96 cx.read(|cx| {
97 let tree = worktree.read(cx);
98
99 let settings_a = language_settings(
100 None,
101 Some(
102 &(File::for_entry(
103 tree.entry_for_path("a/a.rs").unwrap().clone(),
104 worktree.clone(),
105 ) as _),
106 ),
107 cx,
108 );
109 let settings_b = language_settings(
110 None,
111 Some(
112 &(File::for_entry(
113 tree.entry_for_path("b/b.rs").unwrap().clone(),
114 worktree.clone(),
115 ) as _),
116 ),
117 cx,
118 );
119
120 assert_eq!(settings_a.tab_size.get(), 8);
121 assert_eq!(settings_b.tab_size.get(), 2);
122 });
123}
124
125#[gpui::test]
126async fn test_managing_language_servers(
127 deterministic: Arc<Deterministic>,
128 cx: &mut gpui::TestAppContext,
129) {
130 init_test(cx);
131
132 let mut rust_language = Language::new(
133 LanguageConfig {
134 name: "Rust".into(),
135 path_suffixes: vec!["rs".to_string()],
136 ..Default::default()
137 },
138 Some(tree_sitter_rust::language()),
139 );
140 let mut json_language = Language::new(
141 LanguageConfig {
142 name: "JSON".into(),
143 path_suffixes: vec!["json".to_string()],
144 ..Default::default()
145 },
146 None,
147 );
148 let mut fake_rust_servers = rust_language
149 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
150 name: "the-rust-language-server",
151 capabilities: lsp::ServerCapabilities {
152 completion_provider: Some(lsp::CompletionOptions {
153 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
154 ..Default::default()
155 }),
156 ..Default::default()
157 },
158 ..Default::default()
159 }))
160 .await;
161 let mut fake_json_servers = json_language
162 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
163 name: "the-json-language-server",
164 capabilities: lsp::ServerCapabilities {
165 completion_provider: Some(lsp::CompletionOptions {
166 trigger_characters: Some(vec![":".to_string()]),
167 ..Default::default()
168 }),
169 ..Default::default()
170 },
171 ..Default::default()
172 }))
173 .await;
174
175 let fs = FakeFs::new(cx.background());
176 fs.insert_tree(
177 "/the-root",
178 json!({
179 "test.rs": "const A: i32 = 1;",
180 "test2.rs": "",
181 "Cargo.toml": "a = 1",
182 "package.json": "{\"a\": 1}",
183 }),
184 )
185 .await;
186
187 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
188
189 // Open a buffer without an associated language server.
190 let toml_buffer = project
191 .update(cx, |project, cx| {
192 project.open_local_buffer("/the-root/Cargo.toml", cx)
193 })
194 .await
195 .unwrap();
196
197 // Open a buffer with an associated language server before the language for it has been loaded.
198 let rust_buffer = project
199 .update(cx, |project, cx| {
200 project.open_local_buffer("/the-root/test.rs", cx)
201 })
202 .await
203 .unwrap();
204 rust_buffer.read_with(cx, |buffer, _| {
205 assert_eq!(buffer.language().map(|l| l.name()), None);
206 });
207
208 // Now we add the languages to the project, and ensure they get assigned to all
209 // the relevant open buffers.
210 project.update(cx, |project, _| {
211 project.languages.add(Arc::new(json_language));
212 project.languages.add(Arc::new(rust_language));
213 });
214 deterministic.run_until_parked();
215 rust_buffer.read_with(cx, |buffer, _| {
216 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
217 });
218
219 // A server is started up, and it is notified about Rust files.
220 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
221 assert_eq!(
222 fake_rust_server
223 .receive_notification::<lsp::notification::DidOpenTextDocument>()
224 .await
225 .text_document,
226 lsp::TextDocumentItem {
227 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
228 version: 0,
229 text: "const A: i32 = 1;".to_string(),
230 language_id: Default::default()
231 }
232 );
233
234 // The buffer is configured based on the language server's capabilities.
235 rust_buffer.read_with(cx, |buffer, _| {
236 assert_eq!(
237 buffer.completion_triggers(),
238 &[".".to_string(), "::".to_string()]
239 );
240 });
241 toml_buffer.read_with(cx, |buffer, _| {
242 assert!(buffer.completion_triggers().is_empty());
243 });
244
245 // Edit a buffer. The changes are reported to the language server.
246 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
247 assert_eq!(
248 fake_rust_server
249 .receive_notification::<lsp::notification::DidChangeTextDocument>()
250 .await
251 .text_document,
252 lsp::VersionedTextDocumentIdentifier::new(
253 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
254 1
255 )
256 );
257
258 // Open a third buffer with a different associated language server.
259 let json_buffer = project
260 .update(cx, |project, cx| {
261 project.open_local_buffer("/the-root/package.json", cx)
262 })
263 .await
264 .unwrap();
265
266 // A json language server is started up and is only notified about the json buffer.
267 let mut fake_json_server = fake_json_servers.next().await.unwrap();
268 assert_eq!(
269 fake_json_server
270 .receive_notification::<lsp::notification::DidOpenTextDocument>()
271 .await
272 .text_document,
273 lsp::TextDocumentItem {
274 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
275 version: 0,
276 text: "{\"a\": 1}".to_string(),
277 language_id: Default::default()
278 }
279 );
280
281 // This buffer is configured based on the second language server's
282 // capabilities.
283 json_buffer.read_with(cx, |buffer, _| {
284 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
285 });
286
287 // When opening another buffer whose language server is already running,
288 // it is also configured based on the existing language server's capabilities.
289 let rust_buffer2 = project
290 .update(cx, |project, cx| {
291 project.open_local_buffer("/the-root/test2.rs", cx)
292 })
293 .await
294 .unwrap();
295 rust_buffer2.read_with(cx, |buffer, _| {
296 assert_eq!(
297 buffer.completion_triggers(),
298 &[".".to_string(), "::".to_string()]
299 );
300 });
301
302 // Changes are reported only to servers matching the buffer's language.
303 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
304 rust_buffer2.update(cx, |buffer, cx| {
305 buffer.edit([(0..0, "let x = 1;")], None, cx)
306 });
307 assert_eq!(
308 fake_rust_server
309 .receive_notification::<lsp::notification::DidChangeTextDocument>()
310 .await
311 .text_document,
312 lsp::VersionedTextDocumentIdentifier::new(
313 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
314 1
315 )
316 );
317
318 // Save notifications are reported to all servers.
319 project
320 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
321 .await
322 .unwrap();
323 assert_eq!(
324 fake_rust_server
325 .receive_notification::<lsp::notification::DidSaveTextDocument>()
326 .await
327 .text_document,
328 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
329 );
330 assert_eq!(
331 fake_json_server
332 .receive_notification::<lsp::notification::DidSaveTextDocument>()
333 .await
334 .text_document,
335 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
336 );
337
338 // Renames are reported only to servers matching the buffer's language.
339 fs.rename(
340 Path::new("/the-root/test2.rs"),
341 Path::new("/the-root/test3.rs"),
342 Default::default(),
343 )
344 .await
345 .unwrap();
346 assert_eq!(
347 fake_rust_server
348 .receive_notification::<lsp::notification::DidCloseTextDocument>()
349 .await
350 .text_document,
351 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
352 );
353 assert_eq!(
354 fake_rust_server
355 .receive_notification::<lsp::notification::DidOpenTextDocument>()
356 .await
357 .text_document,
358 lsp::TextDocumentItem {
359 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
360 version: 0,
361 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
362 language_id: Default::default()
363 },
364 );
365
366 rust_buffer2.update(cx, |buffer, cx| {
367 buffer.update_diagnostics(
368 LanguageServerId(0),
369 DiagnosticSet::from_sorted_entries(
370 vec![DiagnosticEntry {
371 diagnostic: Default::default(),
372 range: Anchor::MIN..Anchor::MAX,
373 }],
374 &buffer.snapshot(),
375 ),
376 cx,
377 );
378 assert_eq!(
379 buffer
380 .snapshot()
381 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
382 .count(),
383 1
384 );
385 });
386
387 // When the rename changes the extension of the file, the buffer gets closed on the old
388 // language server and gets opened on the new one.
389 fs.rename(
390 Path::new("/the-root/test3.rs"),
391 Path::new("/the-root/test3.json"),
392 Default::default(),
393 )
394 .await
395 .unwrap();
396 assert_eq!(
397 fake_rust_server
398 .receive_notification::<lsp::notification::DidCloseTextDocument>()
399 .await
400 .text_document,
401 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
402 );
403 assert_eq!(
404 fake_json_server
405 .receive_notification::<lsp::notification::DidOpenTextDocument>()
406 .await
407 .text_document,
408 lsp::TextDocumentItem {
409 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
410 version: 0,
411 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
412 language_id: Default::default()
413 },
414 );
415
416 // We clear the diagnostics, since the language has changed.
417 rust_buffer2.read_with(cx, |buffer, _| {
418 assert_eq!(
419 buffer
420 .snapshot()
421 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
422 .count(),
423 0
424 );
425 });
426
427 // The renamed file's version resets after changing language server.
428 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
429 assert_eq!(
430 fake_json_server
431 .receive_notification::<lsp::notification::DidChangeTextDocument>()
432 .await
433 .text_document,
434 lsp::VersionedTextDocumentIdentifier::new(
435 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
436 1
437 )
438 );
439
440 // Restart language servers
441 project.update(cx, |project, cx| {
442 project.restart_language_servers_for_buffers(
443 vec![rust_buffer.clone(), json_buffer.clone()],
444 cx,
445 );
446 });
447
448 let mut rust_shutdown_requests = fake_rust_server
449 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
450 let mut json_shutdown_requests = fake_json_server
451 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
452 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
453
454 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
455 let mut fake_json_server = fake_json_servers.next().await.unwrap();
456
457 // Ensure rust document is reopened in new rust language server
458 assert_eq!(
459 fake_rust_server
460 .receive_notification::<lsp::notification::DidOpenTextDocument>()
461 .await
462 .text_document,
463 lsp::TextDocumentItem {
464 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
465 version: 0,
466 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
467 language_id: Default::default()
468 }
469 );
470
471 // Ensure json documents are reopened in new json language server
472 assert_set_eq!(
473 [
474 fake_json_server
475 .receive_notification::<lsp::notification::DidOpenTextDocument>()
476 .await
477 .text_document,
478 fake_json_server
479 .receive_notification::<lsp::notification::DidOpenTextDocument>()
480 .await
481 .text_document,
482 ],
483 [
484 lsp::TextDocumentItem {
485 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
486 version: 0,
487 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
488 language_id: Default::default()
489 },
490 lsp::TextDocumentItem {
491 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
492 version: 0,
493 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
494 language_id: Default::default()
495 }
496 ]
497 );
498
499 // Close notifications are reported only to servers matching the buffer's language.
500 cx.update(|_| drop(json_buffer));
501 let close_message = lsp::DidCloseTextDocumentParams {
502 text_document: lsp::TextDocumentIdentifier::new(
503 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
504 ),
505 };
506 assert_eq!(
507 fake_json_server
508 .receive_notification::<lsp::notification::DidCloseTextDocument>()
509 .await,
510 close_message,
511 );
512}
513
514#[gpui::test]
515async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
516 init_test(cx);
517
518 let mut language = Language::new(
519 LanguageConfig {
520 name: "Rust".into(),
521 path_suffixes: vec!["rs".to_string()],
522 ..Default::default()
523 },
524 Some(tree_sitter_rust::language()),
525 );
526 let mut fake_servers = language
527 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
528 name: "the-language-server",
529 ..Default::default()
530 }))
531 .await;
532
533 let fs = FakeFs::new(cx.background());
534 fs.insert_tree(
535 "/the-root",
536 json!({
537 ".gitignore": "target\n",
538 "src": {
539 "a.rs": "",
540 "b.rs": "",
541 },
542 "target": {
543 "x": {
544 "out": {
545 "x.rs": ""
546 }
547 },
548 "y": {
549 "out": {
550 "y.rs": "",
551 }
552 },
553 "z": {
554 "out": {
555 "z.rs": ""
556 }
557 }
558 }
559 }),
560 )
561 .await;
562
563 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
564 project.update(cx, |project, _| {
565 project.languages.add(Arc::new(language));
566 });
567 cx.foreground().run_until_parked();
568
569 // Start the language server by opening a buffer with a compatible file extension.
570 let _buffer = project
571 .update(cx, |project, cx| {
572 project.open_local_buffer("/the-root/src/a.rs", cx)
573 })
574 .await
575 .unwrap();
576
577 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
578 project.read_with(cx, |project, cx| {
579 let worktree = project.worktrees(cx).next().unwrap();
580 assert_eq!(
581 worktree
582 .read(cx)
583 .snapshot()
584 .entries(true)
585 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
586 .collect::<Vec<_>>(),
587 &[
588 (Path::new(""), false),
589 (Path::new(".gitignore"), false),
590 (Path::new("src"), false),
591 (Path::new("src/a.rs"), false),
592 (Path::new("src/b.rs"), false),
593 (Path::new("target"), true),
594 ]
595 );
596 });
597
598 let prev_read_dir_count = fs.read_dir_call_count();
599
600 // Keep track of the FS events reported to the language server.
601 let fake_server = fake_servers.next().await.unwrap();
602 let file_changes = Arc::new(Mutex::new(Vec::new()));
603 fake_server
604 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
605 registrations: vec![lsp::Registration {
606 id: Default::default(),
607 method: "workspace/didChangeWatchedFiles".to_string(),
608 register_options: serde_json::to_value(
609 lsp::DidChangeWatchedFilesRegistrationOptions {
610 watchers: vec![
611 lsp::FileSystemWatcher {
612 glob_pattern: lsp::GlobPattern::String(
613 "/the-root/Cargo.toml".to_string(),
614 ),
615 kind: None,
616 },
617 lsp::FileSystemWatcher {
618 glob_pattern: lsp::GlobPattern::String(
619 "/the-root/src/*.{rs,c}".to_string(),
620 ),
621 kind: None,
622 },
623 lsp::FileSystemWatcher {
624 glob_pattern: lsp::GlobPattern::String(
625 "/the-root/target/y/**/*.rs".to_string(),
626 ),
627 kind: None,
628 },
629 ],
630 },
631 )
632 .ok(),
633 }],
634 })
635 .await
636 .unwrap();
637 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
638 let file_changes = file_changes.clone();
639 move |params, _| {
640 let mut file_changes = file_changes.lock();
641 file_changes.extend(params.changes);
642 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
643 }
644 });
645
646 cx.foreground().run_until_parked();
647 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
648 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
649
650 // Now the language server has asked us to watch an ignored directory path,
651 // so we recursively load it.
652 project.read_with(cx, |project, cx| {
653 let worktree = project.worktrees(cx).next().unwrap();
654 assert_eq!(
655 worktree
656 .read(cx)
657 .snapshot()
658 .entries(true)
659 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
660 .collect::<Vec<_>>(),
661 &[
662 (Path::new(""), false),
663 (Path::new(".gitignore"), false),
664 (Path::new("src"), false),
665 (Path::new("src/a.rs"), false),
666 (Path::new("src/b.rs"), false),
667 (Path::new("target"), true),
668 (Path::new("target/x"), true),
669 (Path::new("target/y"), true),
670 (Path::new("target/y/out"), true),
671 (Path::new("target/y/out/y.rs"), true),
672 (Path::new("target/z"), true),
673 ]
674 );
675 });
676
677 // Perform some file system mutations, two of which match the watched patterns,
678 // and one of which does not.
679 fs.create_file("/the-root/src/c.rs".as_ref(), Default::default())
680 .await
681 .unwrap();
682 fs.create_file("/the-root/src/d.txt".as_ref(), Default::default())
683 .await
684 .unwrap();
685 fs.remove_file("/the-root/src/b.rs".as_ref(), Default::default())
686 .await
687 .unwrap();
688 fs.create_file("/the-root/target/x/out/x2.rs".as_ref(), Default::default())
689 .await
690 .unwrap();
691 fs.create_file("/the-root/target/y/out/y2.rs".as_ref(), Default::default())
692 .await
693 .unwrap();
694
695 // The language server receives events for the FS mutations that match its watch patterns.
696 cx.foreground().run_until_parked();
697 assert_eq!(
698 &*file_changes.lock(),
699 &[
700 lsp::FileEvent {
701 uri: lsp::Url::from_file_path("/the-root/src/b.rs").unwrap(),
702 typ: lsp::FileChangeType::DELETED,
703 },
704 lsp::FileEvent {
705 uri: lsp::Url::from_file_path("/the-root/src/c.rs").unwrap(),
706 typ: lsp::FileChangeType::CREATED,
707 },
708 lsp::FileEvent {
709 uri: lsp::Url::from_file_path("/the-root/target/y/out/y2.rs").unwrap(),
710 typ: lsp::FileChangeType::CREATED,
711 },
712 ]
713 );
714}
715
716#[gpui::test]
717async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
718 init_test(cx);
719
720 let fs = FakeFs::new(cx.background());
721 fs.insert_tree(
722 "/dir",
723 json!({
724 "a.rs": "let a = 1;",
725 "b.rs": "let b = 2;"
726 }),
727 )
728 .await;
729
730 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
731
732 let buffer_a = project
733 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
734 .await
735 .unwrap();
736 let buffer_b = project
737 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
738 .await
739 .unwrap();
740
741 project.update(cx, |project, cx| {
742 project
743 .update_diagnostics(
744 LanguageServerId(0),
745 lsp::PublishDiagnosticsParams {
746 uri: Url::from_file_path("/dir/a.rs").unwrap(),
747 version: None,
748 diagnostics: vec![lsp::Diagnostic {
749 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
750 severity: Some(lsp::DiagnosticSeverity::ERROR),
751 message: "error 1".to_string(),
752 ..Default::default()
753 }],
754 },
755 &[],
756 cx,
757 )
758 .unwrap();
759 project
760 .update_diagnostics(
761 LanguageServerId(0),
762 lsp::PublishDiagnosticsParams {
763 uri: Url::from_file_path("/dir/b.rs").unwrap(),
764 version: None,
765 diagnostics: vec![lsp::Diagnostic {
766 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
767 severity: Some(lsp::DiagnosticSeverity::WARNING),
768 message: "error 2".to_string(),
769 ..Default::default()
770 }],
771 },
772 &[],
773 cx,
774 )
775 .unwrap();
776 });
777
778 buffer_a.read_with(cx, |buffer, _| {
779 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
780 assert_eq!(
781 chunks
782 .iter()
783 .map(|(s, d)| (s.as_str(), *d))
784 .collect::<Vec<_>>(),
785 &[
786 ("let ", None),
787 ("a", Some(DiagnosticSeverity::ERROR)),
788 (" = 1;", None),
789 ]
790 );
791 });
792 buffer_b.read_with(cx, |buffer, _| {
793 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
794 assert_eq!(
795 chunks
796 .iter()
797 .map(|(s, d)| (s.as_str(), *d))
798 .collect::<Vec<_>>(),
799 &[
800 ("let ", None),
801 ("b", Some(DiagnosticSeverity::WARNING)),
802 (" = 2;", None),
803 ]
804 );
805 });
806}
807
808#[gpui::test]
809async fn test_hidden_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
810 init_test(cx);
811
812 let fs = FakeFs::new(cx.background());
813 fs.insert_tree(
814 "/root",
815 json!({
816 "dir": {
817 "a.rs": "let a = 1;",
818 },
819 "other.rs": "let b = c;"
820 }),
821 )
822 .await;
823
824 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
825
826 let (worktree, _) = project
827 .update(cx, |project, cx| {
828 project.find_or_create_local_worktree("/root/other.rs", false, cx)
829 })
830 .await
831 .unwrap();
832 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
833
834 project.update(cx, |project, cx| {
835 project
836 .update_diagnostics(
837 LanguageServerId(0),
838 lsp::PublishDiagnosticsParams {
839 uri: Url::from_file_path("/root/other.rs").unwrap(),
840 version: None,
841 diagnostics: vec![lsp::Diagnostic {
842 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
843 severity: Some(lsp::DiagnosticSeverity::ERROR),
844 message: "unknown variable 'c'".to_string(),
845 ..Default::default()
846 }],
847 },
848 &[],
849 cx,
850 )
851 .unwrap();
852 });
853
854 let buffer = project
855 .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
856 .await
857 .unwrap();
858 buffer.read_with(cx, |buffer, _| {
859 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
860 assert_eq!(
861 chunks
862 .iter()
863 .map(|(s, d)| (s.as_str(), *d))
864 .collect::<Vec<_>>(),
865 &[
866 ("let b = ", None),
867 ("c", Some(DiagnosticSeverity::ERROR)),
868 (";", None),
869 ]
870 );
871 });
872
873 project.read_with(cx, |project, cx| {
874 assert_eq!(project.diagnostic_summaries(cx).next(), None);
875 assert_eq!(project.diagnostic_summary(cx).error_count, 0);
876 });
877}
878
879#[gpui::test]
880async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
881 init_test(cx);
882
883 let progress_token = "the-progress-token";
884 let mut language = Language::new(
885 LanguageConfig {
886 name: "Rust".into(),
887 path_suffixes: vec!["rs".to_string()],
888 ..Default::default()
889 },
890 Some(tree_sitter_rust::language()),
891 );
892 let mut fake_servers = language
893 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
894 disk_based_diagnostics_progress_token: Some(progress_token.into()),
895 disk_based_diagnostics_sources: vec!["disk".into()],
896 ..Default::default()
897 }))
898 .await;
899
900 let fs = FakeFs::new(cx.background());
901 fs.insert_tree(
902 "/dir",
903 json!({
904 "a.rs": "fn a() { A }",
905 "b.rs": "const y: i32 = 1",
906 }),
907 )
908 .await;
909
910 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
911 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
912 let worktree_id = project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
913
914 // Cause worktree to start the fake language server
915 let _buffer = project
916 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
917 .await
918 .unwrap();
919
920 let mut events = subscribe(&project, cx);
921
922 let fake_server = fake_servers.next().await.unwrap();
923 assert_eq!(
924 events.next().await.unwrap(),
925 Event::LanguageServerAdded(LanguageServerId(0)),
926 );
927
928 fake_server
929 .start_progress(format!("{}/0", progress_token))
930 .await;
931 assert_eq!(
932 events.next().await.unwrap(),
933 Event::DiskBasedDiagnosticsStarted {
934 language_server_id: LanguageServerId(0),
935 }
936 );
937
938 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
939 uri: Url::from_file_path("/dir/a.rs").unwrap(),
940 version: None,
941 diagnostics: vec![lsp::Diagnostic {
942 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
943 severity: Some(lsp::DiagnosticSeverity::ERROR),
944 message: "undefined variable 'A'".to_string(),
945 ..Default::default()
946 }],
947 });
948 assert_eq!(
949 events.next().await.unwrap(),
950 Event::DiagnosticsUpdated {
951 language_server_id: LanguageServerId(0),
952 path: (worktree_id, Path::new("a.rs")).into()
953 }
954 );
955
956 fake_server.end_progress(format!("{}/0", progress_token));
957 assert_eq!(
958 events.next().await.unwrap(),
959 Event::DiskBasedDiagnosticsFinished {
960 language_server_id: LanguageServerId(0)
961 }
962 );
963
964 let buffer = project
965 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
966 .await
967 .unwrap();
968
969 buffer.read_with(cx, |buffer, _| {
970 let snapshot = buffer.snapshot();
971 let diagnostics = snapshot
972 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
973 .collect::<Vec<_>>();
974 assert_eq!(
975 diagnostics,
976 &[DiagnosticEntry {
977 range: Point::new(0, 9)..Point::new(0, 10),
978 diagnostic: Diagnostic {
979 severity: lsp::DiagnosticSeverity::ERROR,
980 message: "undefined variable 'A'".to_string(),
981 group_id: 0,
982 is_primary: true,
983 ..Default::default()
984 }
985 }]
986 )
987 });
988
989 // Ensure publishing empty diagnostics twice only results in one update event.
990 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
991 uri: Url::from_file_path("/dir/a.rs").unwrap(),
992 version: None,
993 diagnostics: Default::default(),
994 });
995 assert_eq!(
996 events.next().await.unwrap(),
997 Event::DiagnosticsUpdated {
998 language_server_id: LanguageServerId(0),
999 path: (worktree_id, Path::new("a.rs")).into()
1000 }
1001 );
1002
1003 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1004 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1005 version: None,
1006 diagnostics: Default::default(),
1007 });
1008 cx.foreground().run_until_parked();
1009 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1010}
1011
1012#[gpui::test]
1013async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1014 init_test(cx);
1015
1016 let progress_token = "the-progress-token";
1017 let mut language = Language::new(
1018 LanguageConfig {
1019 path_suffixes: vec!["rs".to_string()],
1020 ..Default::default()
1021 },
1022 None,
1023 );
1024 let mut fake_servers = language
1025 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1026 disk_based_diagnostics_sources: vec!["disk".into()],
1027 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1028 ..Default::default()
1029 }))
1030 .await;
1031
1032 let fs = FakeFs::new(cx.background());
1033 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1034
1035 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1036 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1037
1038 let buffer = project
1039 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1040 .await
1041 .unwrap();
1042
1043 // Simulate diagnostics starting to update.
1044 let fake_server = fake_servers.next().await.unwrap();
1045 fake_server.start_progress(progress_token).await;
1046
1047 // Restart the server before the diagnostics finish updating.
1048 project.update(cx, |project, cx| {
1049 project.restart_language_servers_for_buffers([buffer], cx);
1050 });
1051 let mut events = subscribe(&project, cx);
1052
1053 // Simulate the newly started server sending more diagnostics.
1054 let fake_server = fake_servers.next().await.unwrap();
1055 assert_eq!(
1056 events.next().await.unwrap(),
1057 Event::LanguageServerAdded(LanguageServerId(1))
1058 );
1059 fake_server.start_progress(progress_token).await;
1060 assert_eq!(
1061 events.next().await.unwrap(),
1062 Event::DiskBasedDiagnosticsStarted {
1063 language_server_id: LanguageServerId(1)
1064 }
1065 );
1066 project.read_with(cx, |project, _| {
1067 assert_eq!(
1068 project
1069 .language_servers_running_disk_based_diagnostics()
1070 .collect::<Vec<_>>(),
1071 [LanguageServerId(1)]
1072 );
1073 });
1074
1075 // All diagnostics are considered done, despite the old server's diagnostic
1076 // task never completing.
1077 fake_server.end_progress(progress_token);
1078 assert_eq!(
1079 events.next().await.unwrap(),
1080 Event::DiskBasedDiagnosticsFinished {
1081 language_server_id: LanguageServerId(1)
1082 }
1083 );
1084 project.read_with(cx, |project, _| {
1085 assert_eq!(
1086 project
1087 .language_servers_running_disk_based_diagnostics()
1088 .collect::<Vec<_>>(),
1089 [LanguageServerId(0); 0]
1090 );
1091 });
1092}
1093
1094#[gpui::test]
1095async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1096 init_test(cx);
1097
1098 let mut language = Language::new(
1099 LanguageConfig {
1100 path_suffixes: vec!["rs".to_string()],
1101 ..Default::default()
1102 },
1103 None,
1104 );
1105 let mut fake_servers = language
1106 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1107 ..Default::default()
1108 }))
1109 .await;
1110
1111 let fs = FakeFs::new(cx.background());
1112 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1113
1114 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1115 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1116
1117 let buffer = project
1118 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1119 .await
1120 .unwrap();
1121
1122 // Publish diagnostics
1123 let fake_server = fake_servers.next().await.unwrap();
1124 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1125 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1126 version: None,
1127 diagnostics: vec![lsp::Diagnostic {
1128 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1129 severity: Some(lsp::DiagnosticSeverity::ERROR),
1130 message: "the message".to_string(),
1131 ..Default::default()
1132 }],
1133 });
1134
1135 cx.foreground().run_until_parked();
1136 buffer.read_with(cx, |buffer, _| {
1137 assert_eq!(
1138 buffer
1139 .snapshot()
1140 .diagnostics_in_range::<_, usize>(0..1, false)
1141 .map(|entry| entry.diagnostic.message.clone())
1142 .collect::<Vec<_>>(),
1143 ["the message".to_string()]
1144 );
1145 });
1146 project.read_with(cx, |project, cx| {
1147 assert_eq!(
1148 project.diagnostic_summary(cx),
1149 DiagnosticSummary {
1150 error_count: 1,
1151 warning_count: 0,
1152 }
1153 );
1154 });
1155
1156 project.update(cx, |project, cx| {
1157 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1158 });
1159
1160 // The diagnostics are cleared.
1161 cx.foreground().run_until_parked();
1162 buffer.read_with(cx, |buffer, _| {
1163 assert_eq!(
1164 buffer
1165 .snapshot()
1166 .diagnostics_in_range::<_, usize>(0..1, false)
1167 .map(|entry| entry.diagnostic.message.clone())
1168 .collect::<Vec<_>>(),
1169 Vec::<String>::new(),
1170 );
1171 });
1172 project.read_with(cx, |project, cx| {
1173 assert_eq!(
1174 project.diagnostic_summary(cx),
1175 DiagnosticSummary {
1176 error_count: 0,
1177 warning_count: 0,
1178 }
1179 );
1180 });
1181}
1182
1183#[gpui::test]
1184async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1185 init_test(cx);
1186
1187 let mut language = Language::new(
1188 LanguageConfig {
1189 path_suffixes: vec!["rs".to_string()],
1190 ..Default::default()
1191 },
1192 None,
1193 );
1194 let mut fake_servers = language
1195 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1196 name: "the-lsp",
1197 ..Default::default()
1198 }))
1199 .await;
1200
1201 let fs = FakeFs::new(cx.background());
1202 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1203
1204 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1205 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1206
1207 let buffer = project
1208 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1209 .await
1210 .unwrap();
1211
1212 // Before restarting the server, report diagnostics with an unknown buffer version.
1213 let fake_server = fake_servers.next().await.unwrap();
1214 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1215 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1216 version: Some(10000),
1217 diagnostics: Vec::new(),
1218 });
1219 cx.foreground().run_until_parked();
1220
1221 project.update(cx, |project, cx| {
1222 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1223 });
1224 let mut fake_server = fake_servers.next().await.unwrap();
1225 let notification = fake_server
1226 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1227 .await
1228 .text_document;
1229 assert_eq!(notification.version, 0);
1230}
1231
1232#[gpui::test]
1233async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1234 init_test(cx);
1235
1236 let mut rust = Language::new(
1237 LanguageConfig {
1238 name: Arc::from("Rust"),
1239 path_suffixes: vec!["rs".to_string()],
1240 ..Default::default()
1241 },
1242 None,
1243 );
1244 let mut fake_rust_servers = rust
1245 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1246 name: "rust-lsp",
1247 ..Default::default()
1248 }))
1249 .await;
1250 let mut js = Language::new(
1251 LanguageConfig {
1252 name: Arc::from("JavaScript"),
1253 path_suffixes: vec!["js".to_string()],
1254 ..Default::default()
1255 },
1256 None,
1257 );
1258 let mut fake_js_servers = js
1259 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1260 name: "js-lsp",
1261 ..Default::default()
1262 }))
1263 .await;
1264
1265 let fs = FakeFs::new(cx.background());
1266 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1267 .await;
1268
1269 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1270 project.update(cx, |project, _| {
1271 project.languages.add(Arc::new(rust));
1272 project.languages.add(Arc::new(js));
1273 });
1274
1275 let _rs_buffer = project
1276 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1277 .await
1278 .unwrap();
1279 let _js_buffer = project
1280 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
1281 .await
1282 .unwrap();
1283
1284 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1285 assert_eq!(
1286 fake_rust_server_1
1287 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1288 .await
1289 .text_document
1290 .uri
1291 .as_str(),
1292 "file:///dir/a.rs"
1293 );
1294
1295 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1296 assert_eq!(
1297 fake_js_server
1298 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1299 .await
1300 .text_document
1301 .uri
1302 .as_str(),
1303 "file:///dir/b.js"
1304 );
1305
1306 // Disable Rust language server, ensuring only that server gets stopped.
1307 cx.update(|cx| {
1308 cx.update_global(|settings: &mut SettingsStore, cx| {
1309 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1310 settings.languages.insert(
1311 Arc::from("Rust"),
1312 LanguageSettingsContent {
1313 enable_language_server: Some(false),
1314 ..Default::default()
1315 },
1316 );
1317 });
1318 })
1319 });
1320 fake_rust_server_1
1321 .receive_notification::<lsp::notification::Exit>()
1322 .await;
1323
1324 // Enable Rust and disable JavaScript language servers, ensuring that the
1325 // former gets started again and that the latter stops.
1326 cx.update(|cx| {
1327 cx.update_global(|settings: &mut SettingsStore, cx| {
1328 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1329 settings.languages.insert(
1330 Arc::from("Rust"),
1331 LanguageSettingsContent {
1332 enable_language_server: Some(true),
1333 ..Default::default()
1334 },
1335 );
1336 settings.languages.insert(
1337 Arc::from("JavaScript"),
1338 LanguageSettingsContent {
1339 enable_language_server: Some(false),
1340 ..Default::default()
1341 },
1342 );
1343 });
1344 })
1345 });
1346 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1347 assert_eq!(
1348 fake_rust_server_2
1349 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1350 .await
1351 .text_document
1352 .uri
1353 .as_str(),
1354 "file:///dir/a.rs"
1355 );
1356 fake_js_server
1357 .receive_notification::<lsp::notification::Exit>()
1358 .await;
1359}
1360
1361#[gpui::test(iterations = 3)]
1362async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1363 init_test(cx);
1364
1365 let mut language = Language::new(
1366 LanguageConfig {
1367 name: "Rust".into(),
1368 path_suffixes: vec!["rs".to_string()],
1369 ..Default::default()
1370 },
1371 Some(tree_sitter_rust::language()),
1372 );
1373 let mut fake_servers = language
1374 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1375 disk_based_diagnostics_sources: vec!["disk".into()],
1376 ..Default::default()
1377 }))
1378 .await;
1379
1380 let text = "
1381 fn a() { A }
1382 fn b() { BB }
1383 fn c() { CCC }
1384 "
1385 .unindent();
1386
1387 let fs = FakeFs::new(cx.background());
1388 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1389
1390 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1391 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1392
1393 let buffer = project
1394 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1395 .await
1396 .unwrap();
1397
1398 let mut fake_server = fake_servers.next().await.unwrap();
1399 let open_notification = fake_server
1400 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1401 .await;
1402
1403 // Edit the buffer, moving the content down
1404 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1405 let change_notification_1 = fake_server
1406 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1407 .await;
1408 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1409
1410 // Report some diagnostics for the initial version of the buffer
1411 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1412 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1413 version: Some(open_notification.text_document.version),
1414 diagnostics: vec![
1415 lsp::Diagnostic {
1416 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1417 severity: Some(DiagnosticSeverity::ERROR),
1418 message: "undefined variable 'A'".to_string(),
1419 source: Some("disk".to_string()),
1420 ..Default::default()
1421 },
1422 lsp::Diagnostic {
1423 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1424 severity: Some(DiagnosticSeverity::ERROR),
1425 message: "undefined variable 'BB'".to_string(),
1426 source: Some("disk".to_string()),
1427 ..Default::default()
1428 },
1429 lsp::Diagnostic {
1430 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1431 severity: Some(DiagnosticSeverity::ERROR),
1432 source: Some("disk".to_string()),
1433 message: "undefined variable 'CCC'".to_string(),
1434 ..Default::default()
1435 },
1436 ],
1437 });
1438
1439 // The diagnostics have moved down since they were created.
1440 buffer.next_notification(cx).await;
1441 cx.foreground().run_until_parked();
1442 buffer.read_with(cx, |buffer, _| {
1443 assert_eq!(
1444 buffer
1445 .snapshot()
1446 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1447 .collect::<Vec<_>>(),
1448 &[
1449 DiagnosticEntry {
1450 range: Point::new(3, 9)..Point::new(3, 11),
1451 diagnostic: Diagnostic {
1452 source: Some("disk".into()),
1453 severity: DiagnosticSeverity::ERROR,
1454 message: "undefined variable 'BB'".to_string(),
1455 is_disk_based: true,
1456 group_id: 1,
1457 is_primary: true,
1458 ..Default::default()
1459 },
1460 },
1461 DiagnosticEntry {
1462 range: Point::new(4, 9)..Point::new(4, 12),
1463 diagnostic: Diagnostic {
1464 source: Some("disk".into()),
1465 severity: DiagnosticSeverity::ERROR,
1466 message: "undefined variable 'CCC'".to_string(),
1467 is_disk_based: true,
1468 group_id: 2,
1469 is_primary: true,
1470 ..Default::default()
1471 }
1472 }
1473 ]
1474 );
1475 assert_eq!(
1476 chunks_with_diagnostics(buffer, 0..buffer.len()),
1477 [
1478 ("\n\nfn a() { ".to_string(), None),
1479 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1480 (" }\nfn b() { ".to_string(), None),
1481 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1482 (" }\nfn c() { ".to_string(), None),
1483 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1484 (" }\n".to_string(), None),
1485 ]
1486 );
1487 assert_eq!(
1488 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1489 [
1490 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1491 (" }\nfn c() { ".to_string(), None),
1492 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1493 ]
1494 );
1495 });
1496
1497 // Ensure overlapping diagnostics are highlighted correctly.
1498 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1499 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1500 version: Some(open_notification.text_document.version),
1501 diagnostics: vec![
1502 lsp::Diagnostic {
1503 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1504 severity: Some(DiagnosticSeverity::ERROR),
1505 message: "undefined variable 'A'".to_string(),
1506 source: Some("disk".to_string()),
1507 ..Default::default()
1508 },
1509 lsp::Diagnostic {
1510 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1511 severity: Some(DiagnosticSeverity::WARNING),
1512 message: "unreachable statement".to_string(),
1513 source: Some("disk".to_string()),
1514 ..Default::default()
1515 },
1516 ],
1517 });
1518
1519 buffer.next_notification(cx).await;
1520 cx.foreground().run_until_parked();
1521 buffer.read_with(cx, |buffer, _| {
1522 assert_eq!(
1523 buffer
1524 .snapshot()
1525 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1526 .collect::<Vec<_>>(),
1527 &[
1528 DiagnosticEntry {
1529 range: Point::new(2, 9)..Point::new(2, 12),
1530 diagnostic: Diagnostic {
1531 source: Some("disk".into()),
1532 severity: DiagnosticSeverity::WARNING,
1533 message: "unreachable statement".to_string(),
1534 is_disk_based: true,
1535 group_id: 4,
1536 is_primary: true,
1537 ..Default::default()
1538 }
1539 },
1540 DiagnosticEntry {
1541 range: Point::new(2, 9)..Point::new(2, 10),
1542 diagnostic: Diagnostic {
1543 source: Some("disk".into()),
1544 severity: DiagnosticSeverity::ERROR,
1545 message: "undefined variable 'A'".to_string(),
1546 is_disk_based: true,
1547 group_id: 3,
1548 is_primary: true,
1549 ..Default::default()
1550 },
1551 }
1552 ]
1553 );
1554 assert_eq!(
1555 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1556 [
1557 ("fn a() { ".to_string(), None),
1558 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1559 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1560 ("\n".to_string(), None),
1561 ]
1562 );
1563 assert_eq!(
1564 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1565 [
1566 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1567 ("\n".to_string(), None),
1568 ]
1569 );
1570 });
1571
1572 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1573 // changes since the last save.
1574 buffer.update(cx, |buffer, cx| {
1575 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1576 buffer.edit(
1577 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1578 None,
1579 cx,
1580 );
1581 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1582 });
1583 let change_notification_2 = fake_server
1584 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1585 .await;
1586 assert!(
1587 change_notification_2.text_document.version > change_notification_1.text_document.version
1588 );
1589
1590 // Handle out-of-order diagnostics
1591 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1592 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1593 version: Some(change_notification_2.text_document.version),
1594 diagnostics: vec![
1595 lsp::Diagnostic {
1596 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1597 severity: Some(DiagnosticSeverity::ERROR),
1598 message: "undefined variable 'BB'".to_string(),
1599 source: Some("disk".to_string()),
1600 ..Default::default()
1601 },
1602 lsp::Diagnostic {
1603 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1604 severity: Some(DiagnosticSeverity::WARNING),
1605 message: "undefined variable 'A'".to_string(),
1606 source: Some("disk".to_string()),
1607 ..Default::default()
1608 },
1609 ],
1610 });
1611
1612 buffer.next_notification(cx).await;
1613 cx.foreground().run_until_parked();
1614 buffer.read_with(cx, |buffer, _| {
1615 assert_eq!(
1616 buffer
1617 .snapshot()
1618 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1619 .collect::<Vec<_>>(),
1620 &[
1621 DiagnosticEntry {
1622 range: Point::new(2, 21)..Point::new(2, 22),
1623 diagnostic: Diagnostic {
1624 source: Some("disk".into()),
1625 severity: DiagnosticSeverity::WARNING,
1626 message: "undefined variable 'A'".to_string(),
1627 is_disk_based: true,
1628 group_id: 6,
1629 is_primary: true,
1630 ..Default::default()
1631 }
1632 },
1633 DiagnosticEntry {
1634 range: Point::new(3, 9)..Point::new(3, 14),
1635 diagnostic: Diagnostic {
1636 source: Some("disk".into()),
1637 severity: DiagnosticSeverity::ERROR,
1638 message: "undefined variable 'BB'".to_string(),
1639 is_disk_based: true,
1640 group_id: 5,
1641 is_primary: true,
1642 ..Default::default()
1643 },
1644 }
1645 ]
1646 );
1647 });
1648}
1649
1650#[gpui::test]
1651async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1652 init_test(cx);
1653
1654 let text = concat!(
1655 "let one = ;\n", //
1656 "let two = \n",
1657 "let three = 3;\n",
1658 );
1659
1660 let fs = FakeFs::new(cx.background());
1661 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1662
1663 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1664 let buffer = project
1665 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1666 .await
1667 .unwrap();
1668
1669 project.update(cx, |project, cx| {
1670 project
1671 .update_buffer_diagnostics(
1672 &buffer,
1673 LanguageServerId(0),
1674 None,
1675 vec![
1676 DiagnosticEntry {
1677 range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 10)),
1678 diagnostic: Diagnostic {
1679 severity: DiagnosticSeverity::ERROR,
1680 message: "syntax error 1".to_string(),
1681 ..Default::default()
1682 },
1683 },
1684 DiagnosticEntry {
1685 range: Unclipped(PointUtf16::new(1, 10))..Unclipped(PointUtf16::new(1, 10)),
1686 diagnostic: Diagnostic {
1687 severity: DiagnosticSeverity::ERROR,
1688 message: "syntax error 2".to_string(),
1689 ..Default::default()
1690 },
1691 },
1692 ],
1693 cx,
1694 )
1695 .unwrap();
1696 });
1697
1698 // An empty range is extended forward to include the following character.
1699 // At the end of a line, an empty range is extended backward to include
1700 // the preceding character.
1701 buffer.read_with(cx, |buffer, _| {
1702 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1703 assert_eq!(
1704 chunks
1705 .iter()
1706 .map(|(s, d)| (s.as_str(), *d))
1707 .collect::<Vec<_>>(),
1708 &[
1709 ("let one = ", None),
1710 (";", Some(DiagnosticSeverity::ERROR)),
1711 ("\nlet two =", None),
1712 (" ", Some(DiagnosticSeverity::ERROR)),
1713 ("\nlet three = 3;\n", None)
1714 ]
1715 );
1716 });
1717}
1718
1719#[gpui::test]
1720async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
1721 init_test(cx);
1722
1723 let fs = FakeFs::new(cx.background());
1724 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
1725 .await;
1726
1727 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1728
1729 project.update(cx, |project, cx| {
1730 project
1731 .update_diagnostic_entries(
1732 LanguageServerId(0),
1733 Path::new("/dir/a.rs").to_owned(),
1734 None,
1735 vec![DiagnosticEntry {
1736 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1737 diagnostic: Diagnostic {
1738 severity: DiagnosticSeverity::ERROR,
1739 is_primary: true,
1740 message: "syntax error a1".to_string(),
1741 ..Default::default()
1742 },
1743 }],
1744 cx,
1745 )
1746 .unwrap();
1747 project
1748 .update_diagnostic_entries(
1749 LanguageServerId(1),
1750 Path::new("/dir/a.rs").to_owned(),
1751 None,
1752 vec![DiagnosticEntry {
1753 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1754 diagnostic: Diagnostic {
1755 severity: DiagnosticSeverity::ERROR,
1756 is_primary: true,
1757 message: "syntax error b1".to_string(),
1758 ..Default::default()
1759 },
1760 }],
1761 cx,
1762 )
1763 .unwrap();
1764
1765 assert_eq!(
1766 project.diagnostic_summary(cx),
1767 DiagnosticSummary {
1768 error_count: 2,
1769 warning_count: 0,
1770 }
1771 );
1772 });
1773}
1774
1775#[gpui::test]
1776async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
1777 init_test(cx);
1778
1779 let mut language = Language::new(
1780 LanguageConfig {
1781 name: "Rust".into(),
1782 path_suffixes: vec!["rs".to_string()],
1783 ..Default::default()
1784 },
1785 Some(tree_sitter_rust::language()),
1786 );
1787 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
1788
1789 let text = "
1790 fn a() {
1791 f1();
1792 }
1793 fn b() {
1794 f2();
1795 }
1796 fn c() {
1797 f3();
1798 }
1799 "
1800 .unindent();
1801
1802 let fs = FakeFs::new(cx.background());
1803 fs.insert_tree(
1804 "/dir",
1805 json!({
1806 "a.rs": text.clone(),
1807 }),
1808 )
1809 .await;
1810
1811 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1812 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1813 let buffer = project
1814 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1815 .await
1816 .unwrap();
1817
1818 let mut fake_server = fake_servers.next().await.unwrap();
1819 let lsp_document_version = fake_server
1820 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1821 .await
1822 .text_document
1823 .version;
1824
1825 // Simulate editing the buffer after the language server computes some edits.
1826 buffer.update(cx, |buffer, cx| {
1827 buffer.edit(
1828 [(
1829 Point::new(0, 0)..Point::new(0, 0),
1830 "// above first function\n",
1831 )],
1832 None,
1833 cx,
1834 );
1835 buffer.edit(
1836 [(
1837 Point::new(2, 0)..Point::new(2, 0),
1838 " // inside first function\n",
1839 )],
1840 None,
1841 cx,
1842 );
1843 buffer.edit(
1844 [(
1845 Point::new(6, 4)..Point::new(6, 4),
1846 "// inside second function ",
1847 )],
1848 None,
1849 cx,
1850 );
1851
1852 assert_eq!(
1853 buffer.text(),
1854 "
1855 // above first function
1856 fn a() {
1857 // inside first function
1858 f1();
1859 }
1860 fn b() {
1861 // inside second function f2();
1862 }
1863 fn c() {
1864 f3();
1865 }
1866 "
1867 .unindent()
1868 );
1869 });
1870
1871 let edits = project
1872 .update(cx, |project, cx| {
1873 project.edits_from_lsp(
1874 &buffer,
1875 vec![
1876 // replace body of first function
1877 lsp::TextEdit {
1878 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
1879 new_text: "
1880 fn a() {
1881 f10();
1882 }
1883 "
1884 .unindent(),
1885 },
1886 // edit inside second function
1887 lsp::TextEdit {
1888 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
1889 new_text: "00".into(),
1890 },
1891 // edit inside third function via two distinct edits
1892 lsp::TextEdit {
1893 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
1894 new_text: "4000".into(),
1895 },
1896 lsp::TextEdit {
1897 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
1898 new_text: "".into(),
1899 },
1900 ],
1901 LanguageServerId(0),
1902 Some(lsp_document_version),
1903 cx,
1904 )
1905 })
1906 .await
1907 .unwrap();
1908
1909 buffer.update(cx, |buffer, cx| {
1910 for (range, new_text) in edits {
1911 buffer.edit([(range, new_text)], None, cx);
1912 }
1913 assert_eq!(
1914 buffer.text(),
1915 "
1916 // above first function
1917 fn a() {
1918 // inside first function
1919 f10();
1920 }
1921 fn b() {
1922 // inside second function f200();
1923 }
1924 fn c() {
1925 f4000();
1926 }
1927 "
1928 .unindent()
1929 );
1930 });
1931}
1932
1933#[gpui::test]
1934async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
1935 init_test(cx);
1936
1937 let text = "
1938 use a::b;
1939 use a::c;
1940
1941 fn f() {
1942 b();
1943 c();
1944 }
1945 "
1946 .unindent();
1947
1948 let fs = FakeFs::new(cx.background());
1949 fs.insert_tree(
1950 "/dir",
1951 json!({
1952 "a.rs": text.clone(),
1953 }),
1954 )
1955 .await;
1956
1957 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1958 let buffer = project
1959 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1960 .await
1961 .unwrap();
1962
1963 // Simulate the language server sending us a small edit in the form of a very large diff.
1964 // Rust-analyzer does this when performing a merge-imports code action.
1965 let edits = project
1966 .update(cx, |project, cx| {
1967 project.edits_from_lsp(
1968 &buffer,
1969 [
1970 // Replace the first use statement without editing the semicolon.
1971 lsp::TextEdit {
1972 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
1973 new_text: "a::{b, c}".into(),
1974 },
1975 // Reinsert the remainder of the file between the semicolon and the final
1976 // newline of the file.
1977 lsp::TextEdit {
1978 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1979 new_text: "\n\n".into(),
1980 },
1981 lsp::TextEdit {
1982 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1983 new_text: "
1984 fn f() {
1985 b();
1986 c();
1987 }"
1988 .unindent(),
1989 },
1990 // Delete everything after the first newline of the file.
1991 lsp::TextEdit {
1992 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
1993 new_text: "".into(),
1994 },
1995 ],
1996 LanguageServerId(0),
1997 None,
1998 cx,
1999 )
2000 })
2001 .await
2002 .unwrap();
2003
2004 buffer.update(cx, |buffer, cx| {
2005 let edits = edits
2006 .into_iter()
2007 .map(|(range, text)| {
2008 (
2009 range.start.to_point(buffer)..range.end.to_point(buffer),
2010 text,
2011 )
2012 })
2013 .collect::<Vec<_>>();
2014
2015 assert_eq!(
2016 edits,
2017 [
2018 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2019 (Point::new(1, 0)..Point::new(2, 0), "".into())
2020 ]
2021 );
2022
2023 for (range, new_text) in edits {
2024 buffer.edit([(range, new_text)], None, cx);
2025 }
2026 assert_eq!(
2027 buffer.text(),
2028 "
2029 use a::{b, c};
2030
2031 fn f() {
2032 b();
2033 c();
2034 }
2035 "
2036 .unindent()
2037 );
2038 });
2039}
2040
2041#[gpui::test]
2042async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
2043 init_test(cx);
2044
2045 let text = "
2046 use a::b;
2047 use a::c;
2048
2049 fn f() {
2050 b();
2051 c();
2052 }
2053 "
2054 .unindent();
2055
2056 let fs = FakeFs::new(cx.background());
2057 fs.insert_tree(
2058 "/dir",
2059 json!({
2060 "a.rs": text.clone(),
2061 }),
2062 )
2063 .await;
2064
2065 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2066 let buffer = project
2067 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2068 .await
2069 .unwrap();
2070
2071 // Simulate the language server sending us edits in a non-ordered fashion,
2072 // with ranges sometimes being inverted or pointing to invalid locations.
2073 let edits = project
2074 .update(cx, |project, cx| {
2075 project.edits_from_lsp(
2076 &buffer,
2077 [
2078 lsp::TextEdit {
2079 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2080 new_text: "\n\n".into(),
2081 },
2082 lsp::TextEdit {
2083 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2084 new_text: "a::{b, c}".into(),
2085 },
2086 lsp::TextEdit {
2087 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2088 new_text: "".into(),
2089 },
2090 lsp::TextEdit {
2091 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2092 new_text: "
2093 fn f() {
2094 b();
2095 c();
2096 }"
2097 .unindent(),
2098 },
2099 ],
2100 LanguageServerId(0),
2101 None,
2102 cx,
2103 )
2104 })
2105 .await
2106 .unwrap();
2107
2108 buffer.update(cx, |buffer, cx| {
2109 let edits = edits
2110 .into_iter()
2111 .map(|(range, text)| {
2112 (
2113 range.start.to_point(buffer)..range.end.to_point(buffer),
2114 text,
2115 )
2116 })
2117 .collect::<Vec<_>>();
2118
2119 assert_eq!(
2120 edits,
2121 [
2122 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2123 (Point::new(1, 0)..Point::new(2, 0), "".into())
2124 ]
2125 );
2126
2127 for (range, new_text) in edits {
2128 buffer.edit([(range, new_text)], None, cx);
2129 }
2130 assert_eq!(
2131 buffer.text(),
2132 "
2133 use a::{b, c};
2134
2135 fn f() {
2136 b();
2137 c();
2138 }
2139 "
2140 .unindent()
2141 );
2142 });
2143}
2144
2145fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2146 buffer: &Buffer,
2147 range: Range<T>,
2148) -> Vec<(String, Option<DiagnosticSeverity>)> {
2149 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2150 for chunk in buffer.snapshot().chunks(range, true) {
2151 if chunks.last().map_or(false, |prev_chunk| {
2152 prev_chunk.1 == chunk.diagnostic_severity
2153 }) {
2154 chunks.last_mut().unwrap().0.push_str(chunk.text);
2155 } else {
2156 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2157 }
2158 }
2159 chunks
2160}
2161
2162#[gpui::test(iterations = 10)]
2163async fn test_definition(cx: &mut gpui::TestAppContext) {
2164 init_test(cx);
2165
2166 let mut language = Language::new(
2167 LanguageConfig {
2168 name: "Rust".into(),
2169 path_suffixes: vec!["rs".to_string()],
2170 ..Default::default()
2171 },
2172 Some(tree_sitter_rust::language()),
2173 );
2174 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
2175
2176 let fs = FakeFs::new(cx.background());
2177 fs.insert_tree(
2178 "/dir",
2179 json!({
2180 "a.rs": "const fn a() { A }",
2181 "b.rs": "const y: i32 = crate::a()",
2182 }),
2183 )
2184 .await;
2185
2186 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2187 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2188
2189 let buffer = project
2190 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
2191 .await
2192 .unwrap();
2193
2194 let fake_server = fake_servers.next().await.unwrap();
2195 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2196 let params = params.text_document_position_params;
2197 assert_eq!(
2198 params.text_document.uri.to_file_path().unwrap(),
2199 Path::new("/dir/b.rs"),
2200 );
2201 assert_eq!(params.position, lsp::Position::new(0, 22));
2202
2203 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2204 lsp::Location::new(
2205 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
2206 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2207 ),
2208 )))
2209 });
2210
2211 let mut definitions = project
2212 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2213 .await
2214 .unwrap();
2215
2216 // Assert no new language server started
2217 cx.foreground().run_until_parked();
2218 assert!(fake_servers.try_next().is_err());
2219
2220 assert_eq!(definitions.len(), 1);
2221 let definition = definitions.pop().unwrap();
2222 cx.update(|cx| {
2223 let target_buffer = definition.target.buffer.read(cx);
2224 assert_eq!(
2225 target_buffer
2226 .file()
2227 .unwrap()
2228 .as_local()
2229 .unwrap()
2230 .abs_path(cx),
2231 Path::new("/dir/a.rs"),
2232 );
2233 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2234 assert_eq!(
2235 list_worktrees(&project, cx),
2236 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
2237 );
2238
2239 drop(definition);
2240 });
2241 cx.read(|cx| {
2242 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2243 });
2244
2245 fn list_worktrees<'a>(
2246 project: &'a ModelHandle<Project>,
2247 cx: &'a AppContext,
2248 ) -> Vec<(&'a Path, bool)> {
2249 project
2250 .read(cx)
2251 .worktrees(cx)
2252 .map(|worktree| {
2253 let worktree = worktree.read(cx);
2254 (
2255 worktree.as_local().unwrap().abs_path().as_ref(),
2256 worktree.is_visible(),
2257 )
2258 })
2259 .collect::<Vec<_>>()
2260 }
2261}
2262
2263#[gpui::test]
2264async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2265 init_test(cx);
2266
2267 let mut language = Language::new(
2268 LanguageConfig {
2269 name: "TypeScript".into(),
2270 path_suffixes: vec!["ts".to_string()],
2271 ..Default::default()
2272 },
2273 Some(tree_sitter_typescript::language_typescript()),
2274 );
2275 let mut fake_language_servers = language
2276 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
2277 capabilities: lsp::ServerCapabilities {
2278 completion_provider: Some(lsp::CompletionOptions {
2279 trigger_characters: Some(vec![":".to_string()]),
2280 ..Default::default()
2281 }),
2282 ..Default::default()
2283 },
2284 ..Default::default()
2285 }))
2286 .await;
2287
2288 let fs = FakeFs::new(cx.background());
2289 fs.insert_tree(
2290 "/dir",
2291 json!({
2292 "a.ts": "",
2293 }),
2294 )
2295 .await;
2296
2297 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2298 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2299 let buffer = project
2300 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2301 .await
2302 .unwrap();
2303
2304 let fake_server = fake_language_servers.next().await.unwrap();
2305
2306 let text = "let a = b.fqn";
2307 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2308 let completions = project.update(cx, |project, cx| {
2309 project.completions(&buffer, text.len(), cx)
2310 });
2311
2312 fake_server
2313 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2314 Ok(Some(lsp::CompletionResponse::Array(vec![
2315 lsp::CompletionItem {
2316 label: "fullyQualifiedName?".into(),
2317 insert_text: Some("fullyQualifiedName".into()),
2318 ..Default::default()
2319 },
2320 ])))
2321 })
2322 .next()
2323 .await;
2324 let completions = completions.await.unwrap();
2325 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
2326 assert_eq!(completions.len(), 1);
2327 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2328 assert_eq!(
2329 completions[0].old_range.to_offset(&snapshot),
2330 text.len() - 3..text.len()
2331 );
2332
2333 let text = "let a = \"atoms/cmp\"";
2334 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2335 let completions = project.update(cx, |project, cx| {
2336 project.completions(&buffer, text.len() - 1, cx)
2337 });
2338
2339 fake_server
2340 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2341 Ok(Some(lsp::CompletionResponse::Array(vec![
2342 lsp::CompletionItem {
2343 label: "component".into(),
2344 ..Default::default()
2345 },
2346 ])))
2347 })
2348 .next()
2349 .await;
2350 let completions = completions.await.unwrap();
2351 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
2352 assert_eq!(completions.len(), 1);
2353 assert_eq!(completions[0].new_text, "component");
2354 assert_eq!(
2355 completions[0].old_range.to_offset(&snapshot),
2356 text.len() - 4..text.len() - 1
2357 );
2358}
2359
2360#[gpui::test]
2361async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2362 init_test(cx);
2363
2364 let mut language = Language::new(
2365 LanguageConfig {
2366 name: "TypeScript".into(),
2367 path_suffixes: vec!["ts".to_string()],
2368 ..Default::default()
2369 },
2370 Some(tree_sitter_typescript::language_typescript()),
2371 );
2372 let mut fake_language_servers = language
2373 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
2374 capabilities: lsp::ServerCapabilities {
2375 completion_provider: Some(lsp::CompletionOptions {
2376 trigger_characters: Some(vec![":".to_string()]),
2377 ..Default::default()
2378 }),
2379 ..Default::default()
2380 },
2381 ..Default::default()
2382 }))
2383 .await;
2384
2385 let fs = FakeFs::new(cx.background());
2386 fs.insert_tree(
2387 "/dir",
2388 json!({
2389 "a.ts": "",
2390 }),
2391 )
2392 .await;
2393
2394 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2395 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2396 let buffer = project
2397 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2398 .await
2399 .unwrap();
2400
2401 let fake_server = fake_language_servers.next().await.unwrap();
2402
2403 let text = "let a = b.fqn";
2404 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2405 let completions = project.update(cx, |project, cx| {
2406 project.completions(&buffer, text.len(), cx)
2407 });
2408
2409 fake_server
2410 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2411 Ok(Some(lsp::CompletionResponse::Array(vec![
2412 lsp::CompletionItem {
2413 label: "fullyQualifiedName?".into(),
2414 insert_text: Some("fully\rQualified\r\nName".into()),
2415 ..Default::default()
2416 },
2417 ])))
2418 })
2419 .next()
2420 .await;
2421 let completions = completions.await.unwrap();
2422 assert_eq!(completions.len(), 1);
2423 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2424}
2425
2426#[gpui::test(iterations = 10)]
2427async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2428 init_test(cx);
2429
2430 let mut language = Language::new(
2431 LanguageConfig {
2432 name: "TypeScript".into(),
2433 path_suffixes: vec!["ts".to_string()],
2434 ..Default::default()
2435 },
2436 None,
2437 );
2438 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
2439
2440 let fs = FakeFs::new(cx.background());
2441 fs.insert_tree(
2442 "/dir",
2443 json!({
2444 "a.ts": "a",
2445 }),
2446 )
2447 .await;
2448
2449 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2450 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2451 let buffer = project
2452 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2453 .await
2454 .unwrap();
2455
2456 let fake_server = fake_language_servers.next().await.unwrap();
2457
2458 // Language server returns code actions that contain commands, and not edits.
2459 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
2460 fake_server
2461 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2462 Ok(Some(vec![
2463 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2464 title: "The code action".into(),
2465 command: Some(lsp::Command {
2466 title: "The command".into(),
2467 command: "_the/command".into(),
2468 arguments: Some(vec![json!("the-argument")]),
2469 }),
2470 ..Default::default()
2471 }),
2472 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2473 title: "two".into(),
2474 ..Default::default()
2475 }),
2476 ]))
2477 })
2478 .next()
2479 .await;
2480
2481 let action = actions.await.unwrap()[0].clone();
2482 let apply = project.update(cx, |project, cx| {
2483 project.apply_code_action(buffer.clone(), action, true, cx)
2484 });
2485
2486 // Resolving the code action does not populate its edits. In absence of
2487 // edits, we must execute the given command.
2488 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2489 |action, _| async move { Ok(action) },
2490 );
2491
2492 // While executing the command, the language server sends the editor
2493 // a `workspaceEdit` request.
2494 fake_server
2495 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2496 let fake = fake_server.clone();
2497 move |params, _| {
2498 assert_eq!(params.command, "_the/command");
2499 let fake = fake.clone();
2500 async move {
2501 fake.server
2502 .request::<lsp::request::ApplyWorkspaceEdit>(
2503 lsp::ApplyWorkspaceEditParams {
2504 label: None,
2505 edit: lsp::WorkspaceEdit {
2506 changes: Some(
2507 [(
2508 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2509 vec![lsp::TextEdit {
2510 range: lsp::Range::new(
2511 lsp::Position::new(0, 0),
2512 lsp::Position::new(0, 0),
2513 ),
2514 new_text: "X".into(),
2515 }],
2516 )]
2517 .into_iter()
2518 .collect(),
2519 ),
2520 ..Default::default()
2521 },
2522 },
2523 )
2524 .await
2525 .unwrap();
2526 Ok(Some(json!(null)))
2527 }
2528 }
2529 })
2530 .next()
2531 .await;
2532
2533 // Applying the code action returns a project transaction containing the edits
2534 // sent by the language server in its `workspaceEdit` request.
2535 let transaction = apply.await.unwrap();
2536 assert!(transaction.0.contains_key(&buffer));
2537 buffer.update(cx, |buffer, cx| {
2538 assert_eq!(buffer.text(), "Xa");
2539 buffer.undo(cx);
2540 assert_eq!(buffer.text(), "a");
2541 });
2542}
2543
2544#[gpui::test(iterations = 10)]
2545async fn test_save_file(cx: &mut gpui::TestAppContext) {
2546 init_test(cx);
2547
2548 let fs = FakeFs::new(cx.background());
2549 fs.insert_tree(
2550 "/dir",
2551 json!({
2552 "file1": "the old contents",
2553 }),
2554 )
2555 .await;
2556
2557 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2558 let buffer = project
2559 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2560 .await
2561 .unwrap();
2562 buffer.update(cx, |buffer, cx| {
2563 assert_eq!(buffer.text(), "the old contents");
2564 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2565 });
2566
2567 project
2568 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2569 .await
2570 .unwrap();
2571
2572 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2573 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2574}
2575
2576#[gpui::test]
2577async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2578 init_test(cx);
2579
2580 let fs = FakeFs::new(cx.background());
2581 fs.insert_tree(
2582 "/dir",
2583 json!({
2584 "file1": "the old contents",
2585 }),
2586 )
2587 .await;
2588
2589 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2590 let buffer = project
2591 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2592 .await
2593 .unwrap();
2594 buffer.update(cx, |buffer, cx| {
2595 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2596 });
2597
2598 project
2599 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2600 .await
2601 .unwrap();
2602
2603 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2604 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2605}
2606
2607#[gpui::test]
2608async fn test_save_as(cx: &mut gpui::TestAppContext) {
2609 init_test(cx);
2610
2611 let fs = FakeFs::new(cx.background());
2612 fs.insert_tree("/dir", json!({})).await;
2613
2614 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2615
2616 let languages = project.read_with(cx, |project, _| project.languages().clone());
2617 languages.register(
2618 "/some/path",
2619 LanguageConfig {
2620 name: "Rust".into(),
2621 path_suffixes: vec!["rs".into()],
2622 ..Default::default()
2623 },
2624 tree_sitter_rust::language(),
2625 vec![],
2626 |_| Default::default(),
2627 );
2628
2629 let buffer = project.update(cx, |project, cx| {
2630 project.create_buffer("", None, cx).unwrap()
2631 });
2632 buffer.update(cx, |buffer, cx| {
2633 buffer.edit([(0..0, "abc")], None, cx);
2634 assert!(buffer.is_dirty());
2635 assert!(!buffer.has_conflict());
2636 assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text");
2637 });
2638 project
2639 .update(cx, |project, cx| {
2640 project.save_buffer_as(buffer.clone(), "/dir/file1.rs".into(), cx)
2641 })
2642 .await
2643 .unwrap();
2644 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
2645
2646 cx.foreground().run_until_parked();
2647 buffer.read_with(cx, |buffer, cx| {
2648 assert_eq!(
2649 buffer.file().unwrap().full_path(cx),
2650 Path::new("dir/file1.rs")
2651 );
2652 assert!(!buffer.is_dirty());
2653 assert!(!buffer.has_conflict());
2654 assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust");
2655 });
2656
2657 let opened_buffer = project
2658 .update(cx, |project, cx| {
2659 project.open_local_buffer("/dir/file1.rs", cx)
2660 })
2661 .await
2662 .unwrap();
2663 assert_eq!(opened_buffer, buffer);
2664}
2665
2666#[gpui::test(retries = 5)]
2667async fn test_rescan_and_remote_updates(
2668 deterministic: Arc<Deterministic>,
2669 cx: &mut gpui::TestAppContext,
2670) {
2671 init_test(cx);
2672 cx.foreground().allow_parking();
2673
2674 let dir = temp_tree(json!({
2675 "a": {
2676 "file1": "",
2677 "file2": "",
2678 "file3": "",
2679 },
2680 "b": {
2681 "c": {
2682 "file4": "",
2683 "file5": "",
2684 }
2685 }
2686 }));
2687
2688 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
2689 let rpc = project.read_with(cx, |p, _| p.client.clone());
2690
2691 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2692 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
2693 async move { buffer.await.unwrap() }
2694 };
2695 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
2696 project.read_with(cx, |project, cx| {
2697 let tree = project.worktrees(cx).next().unwrap();
2698 tree.read(cx)
2699 .entry_for_path(path)
2700 .unwrap_or_else(|| panic!("no entry for path {}", path))
2701 .id
2702 })
2703 };
2704
2705 let buffer2 = buffer_for_path("a/file2", cx).await;
2706 let buffer3 = buffer_for_path("a/file3", cx).await;
2707 let buffer4 = buffer_for_path("b/c/file4", cx).await;
2708 let buffer5 = buffer_for_path("b/c/file5", cx).await;
2709
2710 let file2_id = id_for_path("a/file2", cx);
2711 let file3_id = id_for_path("a/file3", cx);
2712 let file4_id = id_for_path("b/c/file4", cx);
2713
2714 // Create a remote copy of this worktree.
2715 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2716
2717 let metadata = tree.read_with(cx, |tree, _| tree.as_local().unwrap().metadata_proto());
2718
2719 let updates = Arc::new(Mutex::new(Vec::new()));
2720 tree.update(cx, |tree, cx| {
2721 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
2722 let updates = updates.clone();
2723 move |update| {
2724 updates.lock().push(update);
2725 async { true }
2726 }
2727 });
2728 });
2729
2730 let remote = cx.update(|cx| Worktree::remote(1, 1, metadata, rpc.clone(), cx));
2731 deterministic.run_until_parked();
2732
2733 cx.read(|cx| {
2734 assert!(!buffer2.read(cx).is_dirty());
2735 assert!(!buffer3.read(cx).is_dirty());
2736 assert!(!buffer4.read(cx).is_dirty());
2737 assert!(!buffer5.read(cx).is_dirty());
2738 });
2739
2740 // Rename and delete files and directories.
2741 tree.flush_fs_events(cx).await;
2742 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
2743 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
2744 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
2745 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
2746 tree.flush_fs_events(cx).await;
2747
2748 let expected_paths = vec![
2749 "a",
2750 "a/file1",
2751 "a/file2.new",
2752 "b",
2753 "d",
2754 "d/file3",
2755 "d/file4",
2756 ];
2757
2758 cx.read(|app| {
2759 assert_eq!(
2760 tree.read(app)
2761 .paths()
2762 .map(|p| p.to_str().unwrap())
2763 .collect::<Vec<_>>(),
2764 expected_paths
2765 );
2766
2767 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
2768 assert_eq!(id_for_path("d/file3", cx), file3_id);
2769 assert_eq!(id_for_path("d/file4", cx), file4_id);
2770
2771 assert_eq!(
2772 buffer2.read(app).file().unwrap().path().as_ref(),
2773 Path::new("a/file2.new")
2774 );
2775 assert_eq!(
2776 buffer3.read(app).file().unwrap().path().as_ref(),
2777 Path::new("d/file3")
2778 );
2779 assert_eq!(
2780 buffer4.read(app).file().unwrap().path().as_ref(),
2781 Path::new("d/file4")
2782 );
2783 assert_eq!(
2784 buffer5.read(app).file().unwrap().path().as_ref(),
2785 Path::new("b/c/file5")
2786 );
2787
2788 assert!(!buffer2.read(app).file().unwrap().is_deleted());
2789 assert!(!buffer3.read(app).file().unwrap().is_deleted());
2790 assert!(!buffer4.read(app).file().unwrap().is_deleted());
2791 assert!(buffer5.read(app).file().unwrap().is_deleted());
2792 });
2793
2794 // Update the remote worktree. Check that it becomes consistent with the
2795 // local worktree.
2796 deterministic.run_until_parked();
2797 remote.update(cx, |remote, _| {
2798 for update in updates.lock().drain(..) {
2799 remote.as_remote_mut().unwrap().update_from_remote(update);
2800 }
2801 });
2802 deterministic.run_until_parked();
2803 remote.read_with(cx, |remote, _| {
2804 assert_eq!(
2805 remote
2806 .paths()
2807 .map(|p| p.to_str().unwrap())
2808 .collect::<Vec<_>>(),
2809 expected_paths
2810 );
2811 });
2812}
2813
2814#[gpui::test(iterations = 10)]
2815async fn test_buffer_identity_across_renames(
2816 deterministic: Arc<Deterministic>,
2817 cx: &mut gpui::TestAppContext,
2818) {
2819 init_test(cx);
2820
2821 let fs = FakeFs::new(cx.background());
2822 fs.insert_tree(
2823 "/dir",
2824 json!({
2825 "a": {
2826 "file1": "",
2827 }
2828 }),
2829 )
2830 .await;
2831
2832 let project = Project::test(fs, [Path::new("/dir")], cx).await;
2833 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2834 let tree_id = tree.read_with(cx, |tree, _| tree.id());
2835
2836 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
2837 project.read_with(cx, |project, cx| {
2838 let tree = project.worktrees(cx).next().unwrap();
2839 tree.read(cx)
2840 .entry_for_path(path)
2841 .unwrap_or_else(|| panic!("no entry for path {}", path))
2842 .id
2843 })
2844 };
2845
2846 let dir_id = id_for_path("a", cx);
2847 let file_id = id_for_path("a/file1", cx);
2848 let buffer = project
2849 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
2850 .await
2851 .unwrap();
2852 buffer.read_with(cx, |buffer, _| assert!(!buffer.is_dirty()));
2853
2854 project
2855 .update(cx, |project, cx| {
2856 project.rename_entry(dir_id, Path::new("b"), cx)
2857 })
2858 .unwrap()
2859 .await
2860 .unwrap();
2861 deterministic.run_until_parked();
2862 assert_eq!(id_for_path("b", cx), dir_id);
2863 assert_eq!(id_for_path("b/file1", cx), file_id);
2864 buffer.read_with(cx, |buffer, _| assert!(!buffer.is_dirty()));
2865}
2866
2867#[gpui::test]
2868async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
2869 init_test(cx);
2870
2871 let fs = FakeFs::new(cx.background());
2872 fs.insert_tree(
2873 "/dir",
2874 json!({
2875 "a.txt": "a-contents",
2876 "b.txt": "b-contents",
2877 }),
2878 )
2879 .await;
2880
2881 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2882
2883 // Spawn multiple tasks to open paths, repeating some paths.
2884 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
2885 (
2886 p.open_local_buffer("/dir/a.txt", cx),
2887 p.open_local_buffer("/dir/b.txt", cx),
2888 p.open_local_buffer("/dir/a.txt", cx),
2889 )
2890 });
2891
2892 let buffer_a_1 = buffer_a_1.await.unwrap();
2893 let buffer_a_2 = buffer_a_2.await.unwrap();
2894 let buffer_b = buffer_b.await.unwrap();
2895 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
2896 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
2897
2898 // There is only one buffer per path.
2899 let buffer_a_id = buffer_a_1.id();
2900 assert_eq!(buffer_a_2.id(), buffer_a_id);
2901
2902 // Open the same path again while it is still open.
2903 drop(buffer_a_1);
2904 let buffer_a_3 = project
2905 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
2906 .await
2907 .unwrap();
2908
2909 // There's still only one buffer per path.
2910 assert_eq!(buffer_a_3.id(), buffer_a_id);
2911}
2912
2913#[gpui::test]
2914async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
2915 init_test(cx);
2916
2917 let fs = FakeFs::new(cx.background());
2918 fs.insert_tree(
2919 "/dir",
2920 json!({
2921 "file1": "abc",
2922 "file2": "def",
2923 "file3": "ghi",
2924 }),
2925 )
2926 .await;
2927
2928 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2929
2930 let buffer1 = project
2931 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2932 .await
2933 .unwrap();
2934 let events = Rc::new(RefCell::new(Vec::new()));
2935
2936 // initially, the buffer isn't dirty.
2937 buffer1.update(cx, |buffer, cx| {
2938 cx.subscribe(&buffer1, {
2939 let events = events.clone();
2940 move |_, _, event, _| match event {
2941 BufferEvent::Operation(_) => {}
2942 _ => events.borrow_mut().push(event.clone()),
2943 }
2944 })
2945 .detach();
2946
2947 assert!(!buffer.is_dirty());
2948 assert!(events.borrow().is_empty());
2949
2950 buffer.edit([(1..2, "")], None, cx);
2951 });
2952
2953 // after the first edit, the buffer is dirty, and emits a dirtied event.
2954 buffer1.update(cx, |buffer, cx| {
2955 assert!(buffer.text() == "ac");
2956 assert!(buffer.is_dirty());
2957 assert_eq!(
2958 *events.borrow(),
2959 &[language::Event::Edited, language::Event::DirtyChanged]
2960 );
2961 events.borrow_mut().clear();
2962 buffer.did_save(
2963 buffer.version(),
2964 buffer.as_rope().fingerprint(),
2965 buffer.file().unwrap().mtime(),
2966 cx,
2967 );
2968 });
2969
2970 // after saving, the buffer is not dirty, and emits a saved event.
2971 buffer1.update(cx, |buffer, cx| {
2972 assert!(!buffer.is_dirty());
2973 assert_eq!(*events.borrow(), &[language::Event::Saved]);
2974 events.borrow_mut().clear();
2975
2976 buffer.edit([(1..1, "B")], None, cx);
2977 buffer.edit([(2..2, "D")], None, cx);
2978 });
2979
2980 // after editing again, the buffer is dirty, and emits another dirty event.
2981 buffer1.update(cx, |buffer, cx| {
2982 assert!(buffer.text() == "aBDc");
2983 assert!(buffer.is_dirty());
2984 assert_eq!(
2985 *events.borrow(),
2986 &[
2987 language::Event::Edited,
2988 language::Event::DirtyChanged,
2989 language::Event::Edited,
2990 ],
2991 );
2992 events.borrow_mut().clear();
2993
2994 // After restoring the buffer to its previously-saved state,
2995 // the buffer is not considered dirty anymore.
2996 buffer.edit([(1..3, "")], None, cx);
2997 assert!(buffer.text() == "ac");
2998 assert!(!buffer.is_dirty());
2999 });
3000
3001 assert_eq!(
3002 *events.borrow(),
3003 &[language::Event::Edited, language::Event::DirtyChanged]
3004 );
3005
3006 // When a file is deleted, the buffer is considered dirty.
3007 let events = Rc::new(RefCell::new(Vec::new()));
3008 let buffer2 = project
3009 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3010 .await
3011 .unwrap();
3012 buffer2.update(cx, |_, cx| {
3013 cx.subscribe(&buffer2, {
3014 let events = events.clone();
3015 move |_, _, event, _| events.borrow_mut().push(event.clone())
3016 })
3017 .detach();
3018 });
3019
3020 fs.remove_file("/dir/file2".as_ref(), Default::default())
3021 .await
3022 .unwrap();
3023 cx.foreground().run_until_parked();
3024 buffer2.read_with(cx, |buffer, _| assert!(buffer.is_dirty()));
3025 assert_eq!(
3026 *events.borrow(),
3027 &[
3028 language::Event::DirtyChanged,
3029 language::Event::FileHandleChanged
3030 ]
3031 );
3032
3033 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3034 let events = Rc::new(RefCell::new(Vec::new()));
3035 let buffer3 = project
3036 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
3037 .await
3038 .unwrap();
3039 buffer3.update(cx, |_, cx| {
3040 cx.subscribe(&buffer3, {
3041 let events = events.clone();
3042 move |_, _, event, _| events.borrow_mut().push(event.clone())
3043 })
3044 .detach();
3045 });
3046
3047 buffer3.update(cx, |buffer, cx| {
3048 buffer.edit([(0..0, "x")], None, cx);
3049 });
3050 events.borrow_mut().clear();
3051 fs.remove_file("/dir/file3".as_ref(), Default::default())
3052 .await
3053 .unwrap();
3054 cx.foreground().run_until_parked();
3055 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
3056 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
3057}
3058
3059#[gpui::test]
3060async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3061 init_test(cx);
3062
3063 let initial_contents = "aaa\nbbbbb\nc\n";
3064 let fs = FakeFs::new(cx.background());
3065 fs.insert_tree(
3066 "/dir",
3067 json!({
3068 "the-file": initial_contents,
3069 }),
3070 )
3071 .await;
3072 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3073 let buffer = project
3074 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
3075 .await
3076 .unwrap();
3077
3078 let anchors = (0..3)
3079 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
3080 .collect::<Vec<_>>();
3081
3082 // Change the file on disk, adding two new lines of text, and removing
3083 // one line.
3084 buffer.read_with(cx, |buffer, _| {
3085 assert!(!buffer.is_dirty());
3086 assert!(!buffer.has_conflict());
3087 });
3088 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3089 fs.save(
3090 "/dir/the-file".as_ref(),
3091 &new_contents.into(),
3092 LineEnding::Unix,
3093 )
3094 .await
3095 .unwrap();
3096
3097 // Because the buffer was not modified, it is reloaded from disk. Its
3098 // contents are edited according to the diff between the old and new
3099 // file contents.
3100 cx.foreground().run_until_parked();
3101 buffer.update(cx, |buffer, _| {
3102 assert_eq!(buffer.text(), new_contents);
3103 assert!(!buffer.is_dirty());
3104 assert!(!buffer.has_conflict());
3105
3106 let anchor_positions = anchors
3107 .iter()
3108 .map(|anchor| anchor.to_point(&*buffer))
3109 .collect::<Vec<_>>();
3110 assert_eq!(
3111 anchor_positions,
3112 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
3113 );
3114 });
3115
3116 // Modify the buffer
3117 buffer.update(cx, |buffer, cx| {
3118 buffer.edit([(0..0, " ")], None, cx);
3119 assert!(buffer.is_dirty());
3120 assert!(!buffer.has_conflict());
3121 });
3122
3123 // Change the file on disk again, adding blank lines to the beginning.
3124 fs.save(
3125 "/dir/the-file".as_ref(),
3126 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3127 LineEnding::Unix,
3128 )
3129 .await
3130 .unwrap();
3131
3132 // Because the buffer is modified, it doesn't reload from disk, but is
3133 // marked as having a conflict.
3134 cx.foreground().run_until_parked();
3135 buffer.read_with(cx, |buffer, _| {
3136 assert!(buffer.has_conflict());
3137 });
3138}
3139
3140#[gpui::test]
3141async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3142 init_test(cx);
3143
3144 let fs = FakeFs::new(cx.background());
3145 fs.insert_tree(
3146 "/dir",
3147 json!({
3148 "file1": "a\nb\nc\n",
3149 "file2": "one\r\ntwo\r\nthree\r\n",
3150 }),
3151 )
3152 .await;
3153
3154 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3155 let buffer1 = project
3156 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3157 .await
3158 .unwrap();
3159 let buffer2 = project
3160 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3161 .await
3162 .unwrap();
3163
3164 buffer1.read_with(cx, |buffer, _| {
3165 assert_eq!(buffer.text(), "a\nb\nc\n");
3166 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3167 });
3168 buffer2.read_with(cx, |buffer, _| {
3169 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3170 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3171 });
3172
3173 // Change a file's line endings on disk from unix to windows. The buffer's
3174 // state updates correctly.
3175 fs.save(
3176 "/dir/file1".as_ref(),
3177 &"aaa\nb\nc\n".into(),
3178 LineEnding::Windows,
3179 )
3180 .await
3181 .unwrap();
3182 cx.foreground().run_until_parked();
3183 buffer1.read_with(cx, |buffer, _| {
3184 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3185 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3186 });
3187
3188 // Save a file with windows line endings. The file is written correctly.
3189 buffer2.update(cx, |buffer, cx| {
3190 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3191 });
3192 project
3193 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3194 .await
3195 .unwrap();
3196 assert_eq!(
3197 fs.load("/dir/file2".as_ref()).await.unwrap(),
3198 "one\r\ntwo\r\nthree\r\nfour\r\n",
3199 );
3200}
3201
3202#[gpui::test]
3203async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3204 init_test(cx);
3205
3206 let fs = FakeFs::new(cx.background());
3207 fs.insert_tree(
3208 "/the-dir",
3209 json!({
3210 "a.rs": "
3211 fn foo(mut v: Vec<usize>) {
3212 for x in &v {
3213 v.push(1);
3214 }
3215 }
3216 "
3217 .unindent(),
3218 }),
3219 )
3220 .await;
3221
3222 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3223 let buffer = project
3224 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3225 .await
3226 .unwrap();
3227
3228 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3229 let message = lsp::PublishDiagnosticsParams {
3230 uri: buffer_uri.clone(),
3231 diagnostics: vec![
3232 lsp::Diagnostic {
3233 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3234 severity: Some(DiagnosticSeverity::WARNING),
3235 message: "error 1".to_string(),
3236 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3237 location: lsp::Location {
3238 uri: buffer_uri.clone(),
3239 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3240 },
3241 message: "error 1 hint 1".to_string(),
3242 }]),
3243 ..Default::default()
3244 },
3245 lsp::Diagnostic {
3246 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3247 severity: Some(DiagnosticSeverity::HINT),
3248 message: "error 1 hint 1".to_string(),
3249 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3250 location: lsp::Location {
3251 uri: buffer_uri.clone(),
3252 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3253 },
3254 message: "original diagnostic".to_string(),
3255 }]),
3256 ..Default::default()
3257 },
3258 lsp::Diagnostic {
3259 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3260 severity: Some(DiagnosticSeverity::ERROR),
3261 message: "error 2".to_string(),
3262 related_information: Some(vec![
3263 lsp::DiagnosticRelatedInformation {
3264 location: lsp::Location {
3265 uri: buffer_uri.clone(),
3266 range: lsp::Range::new(
3267 lsp::Position::new(1, 13),
3268 lsp::Position::new(1, 15),
3269 ),
3270 },
3271 message: "error 2 hint 1".to_string(),
3272 },
3273 lsp::DiagnosticRelatedInformation {
3274 location: lsp::Location {
3275 uri: buffer_uri.clone(),
3276 range: lsp::Range::new(
3277 lsp::Position::new(1, 13),
3278 lsp::Position::new(1, 15),
3279 ),
3280 },
3281 message: "error 2 hint 2".to_string(),
3282 },
3283 ]),
3284 ..Default::default()
3285 },
3286 lsp::Diagnostic {
3287 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3288 severity: Some(DiagnosticSeverity::HINT),
3289 message: "error 2 hint 1".to_string(),
3290 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3291 location: lsp::Location {
3292 uri: buffer_uri.clone(),
3293 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3294 },
3295 message: "original diagnostic".to_string(),
3296 }]),
3297 ..Default::default()
3298 },
3299 lsp::Diagnostic {
3300 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3301 severity: Some(DiagnosticSeverity::HINT),
3302 message: "error 2 hint 2".to_string(),
3303 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3304 location: lsp::Location {
3305 uri: buffer_uri,
3306 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3307 },
3308 message: "original diagnostic".to_string(),
3309 }]),
3310 ..Default::default()
3311 },
3312 ],
3313 version: None,
3314 };
3315
3316 project
3317 .update(cx, |p, cx| {
3318 p.update_diagnostics(LanguageServerId(0), message, &[], cx)
3319 })
3320 .unwrap();
3321 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
3322
3323 assert_eq!(
3324 buffer
3325 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3326 .collect::<Vec<_>>(),
3327 &[
3328 DiagnosticEntry {
3329 range: Point::new(1, 8)..Point::new(1, 9),
3330 diagnostic: Diagnostic {
3331 severity: DiagnosticSeverity::WARNING,
3332 message: "error 1".to_string(),
3333 group_id: 1,
3334 is_primary: true,
3335 ..Default::default()
3336 }
3337 },
3338 DiagnosticEntry {
3339 range: Point::new(1, 8)..Point::new(1, 9),
3340 diagnostic: Diagnostic {
3341 severity: DiagnosticSeverity::HINT,
3342 message: "error 1 hint 1".to_string(),
3343 group_id: 1,
3344 is_primary: false,
3345 ..Default::default()
3346 }
3347 },
3348 DiagnosticEntry {
3349 range: Point::new(1, 13)..Point::new(1, 15),
3350 diagnostic: Diagnostic {
3351 severity: DiagnosticSeverity::HINT,
3352 message: "error 2 hint 1".to_string(),
3353 group_id: 0,
3354 is_primary: false,
3355 ..Default::default()
3356 }
3357 },
3358 DiagnosticEntry {
3359 range: Point::new(1, 13)..Point::new(1, 15),
3360 diagnostic: Diagnostic {
3361 severity: DiagnosticSeverity::HINT,
3362 message: "error 2 hint 2".to_string(),
3363 group_id: 0,
3364 is_primary: false,
3365 ..Default::default()
3366 }
3367 },
3368 DiagnosticEntry {
3369 range: Point::new(2, 8)..Point::new(2, 17),
3370 diagnostic: Diagnostic {
3371 severity: DiagnosticSeverity::ERROR,
3372 message: "error 2".to_string(),
3373 group_id: 0,
3374 is_primary: true,
3375 ..Default::default()
3376 }
3377 }
3378 ]
3379 );
3380
3381 assert_eq!(
3382 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3383 &[
3384 DiagnosticEntry {
3385 range: Point::new(1, 13)..Point::new(1, 15),
3386 diagnostic: Diagnostic {
3387 severity: DiagnosticSeverity::HINT,
3388 message: "error 2 hint 1".to_string(),
3389 group_id: 0,
3390 is_primary: false,
3391 ..Default::default()
3392 }
3393 },
3394 DiagnosticEntry {
3395 range: Point::new(1, 13)..Point::new(1, 15),
3396 diagnostic: Diagnostic {
3397 severity: DiagnosticSeverity::HINT,
3398 message: "error 2 hint 2".to_string(),
3399 group_id: 0,
3400 is_primary: false,
3401 ..Default::default()
3402 }
3403 },
3404 DiagnosticEntry {
3405 range: Point::new(2, 8)..Point::new(2, 17),
3406 diagnostic: Diagnostic {
3407 severity: DiagnosticSeverity::ERROR,
3408 message: "error 2".to_string(),
3409 group_id: 0,
3410 is_primary: true,
3411 ..Default::default()
3412 }
3413 }
3414 ]
3415 );
3416
3417 assert_eq!(
3418 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3419 &[
3420 DiagnosticEntry {
3421 range: Point::new(1, 8)..Point::new(1, 9),
3422 diagnostic: Diagnostic {
3423 severity: DiagnosticSeverity::WARNING,
3424 message: "error 1".to_string(),
3425 group_id: 1,
3426 is_primary: true,
3427 ..Default::default()
3428 }
3429 },
3430 DiagnosticEntry {
3431 range: Point::new(1, 8)..Point::new(1, 9),
3432 diagnostic: Diagnostic {
3433 severity: DiagnosticSeverity::HINT,
3434 message: "error 1 hint 1".to_string(),
3435 group_id: 1,
3436 is_primary: false,
3437 ..Default::default()
3438 }
3439 },
3440 ]
3441 );
3442}
3443
3444#[gpui::test]
3445async fn test_rename(cx: &mut gpui::TestAppContext) {
3446 init_test(cx);
3447
3448 let mut language = Language::new(
3449 LanguageConfig {
3450 name: "Rust".into(),
3451 path_suffixes: vec!["rs".to_string()],
3452 ..Default::default()
3453 },
3454 Some(tree_sitter_rust::language()),
3455 );
3456 let mut fake_servers = language
3457 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
3458 capabilities: lsp::ServerCapabilities {
3459 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
3460 prepare_provider: Some(true),
3461 work_done_progress_options: Default::default(),
3462 })),
3463 ..Default::default()
3464 },
3465 ..Default::default()
3466 }))
3467 .await;
3468
3469 let fs = FakeFs::new(cx.background());
3470 fs.insert_tree(
3471 "/dir",
3472 json!({
3473 "one.rs": "const ONE: usize = 1;",
3474 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3475 }),
3476 )
3477 .await;
3478
3479 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3480 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
3481 let buffer = project
3482 .update(cx, |project, cx| {
3483 project.open_local_buffer("/dir/one.rs", cx)
3484 })
3485 .await
3486 .unwrap();
3487
3488 let fake_server = fake_servers.next().await.unwrap();
3489
3490 let response = project.update(cx, |project, cx| {
3491 project.prepare_rename(buffer.clone(), 7, cx)
3492 });
3493 fake_server
3494 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
3495 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3496 assert_eq!(params.position, lsp::Position::new(0, 7));
3497 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
3498 lsp::Position::new(0, 6),
3499 lsp::Position::new(0, 9),
3500 ))))
3501 })
3502 .next()
3503 .await
3504 .unwrap();
3505 let range = response.await.unwrap().unwrap();
3506 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
3507 assert_eq!(range, 6..9);
3508
3509 let response = project.update(cx, |project, cx| {
3510 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
3511 });
3512 fake_server
3513 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
3514 assert_eq!(
3515 params.text_document_position.text_document.uri.as_str(),
3516 "file:///dir/one.rs"
3517 );
3518 assert_eq!(
3519 params.text_document_position.position,
3520 lsp::Position::new(0, 7)
3521 );
3522 assert_eq!(params.new_name, "THREE");
3523 Ok(Some(lsp::WorkspaceEdit {
3524 changes: Some(
3525 [
3526 (
3527 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
3528 vec![lsp::TextEdit::new(
3529 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
3530 "THREE".to_string(),
3531 )],
3532 ),
3533 (
3534 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
3535 vec![
3536 lsp::TextEdit::new(
3537 lsp::Range::new(
3538 lsp::Position::new(0, 24),
3539 lsp::Position::new(0, 27),
3540 ),
3541 "THREE".to_string(),
3542 ),
3543 lsp::TextEdit::new(
3544 lsp::Range::new(
3545 lsp::Position::new(0, 35),
3546 lsp::Position::new(0, 38),
3547 ),
3548 "THREE".to_string(),
3549 ),
3550 ],
3551 ),
3552 ]
3553 .into_iter()
3554 .collect(),
3555 ),
3556 ..Default::default()
3557 }))
3558 })
3559 .next()
3560 .await
3561 .unwrap();
3562 let mut transaction = response.await.unwrap().0;
3563 assert_eq!(transaction.len(), 2);
3564 assert_eq!(
3565 transaction
3566 .remove_entry(&buffer)
3567 .unwrap()
3568 .0
3569 .read_with(cx, |buffer, _| buffer.text()),
3570 "const THREE: usize = 1;"
3571 );
3572 assert_eq!(
3573 transaction
3574 .into_keys()
3575 .next()
3576 .unwrap()
3577 .read_with(cx, |buffer, _| buffer.text()),
3578 "const TWO: usize = one::THREE + one::THREE;"
3579 );
3580}
3581
3582#[gpui::test]
3583async fn test_search(cx: &mut gpui::TestAppContext) {
3584 init_test(cx);
3585
3586 let fs = FakeFs::new(cx.background());
3587 fs.insert_tree(
3588 "/dir",
3589 json!({
3590 "one.rs": "const ONE: usize = 1;",
3591 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3592 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3593 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3594 }),
3595 )
3596 .await;
3597 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3598 assert_eq!(
3599 search(
3600 &project,
3601 SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()).unwrap(),
3602 cx
3603 )
3604 .await
3605 .unwrap(),
3606 HashMap::from_iter([
3607 ("two.rs".to_string(), vec![6..9]),
3608 ("three.rs".to_string(), vec![37..40])
3609 ])
3610 );
3611
3612 let buffer_4 = project
3613 .update(cx, |project, cx| {
3614 project.open_local_buffer("/dir/four.rs", cx)
3615 })
3616 .await
3617 .unwrap();
3618 buffer_4.update(cx, |buffer, cx| {
3619 let text = "two::TWO";
3620 buffer.edit([(20..28, text), (31..43, text)], None, cx);
3621 });
3622
3623 assert_eq!(
3624 search(
3625 &project,
3626 SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()).unwrap(),
3627 cx
3628 )
3629 .await
3630 .unwrap(),
3631 HashMap::from_iter([
3632 ("two.rs".to_string(), vec![6..9]),
3633 ("three.rs".to_string(), vec![37..40]),
3634 ("four.rs".to_string(), vec![25..28, 36..39])
3635 ])
3636 );
3637}
3638
3639#[gpui::test]
3640async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
3641 init_test(cx);
3642
3643 let search_query = "file";
3644
3645 let fs = FakeFs::new(cx.background());
3646 fs.insert_tree(
3647 "/dir",
3648 json!({
3649 "one.rs": r#"// Rust file one"#,
3650 "one.ts": r#"// TypeScript file one"#,
3651 "two.rs": r#"// Rust file two"#,
3652 "two.ts": r#"// TypeScript file two"#,
3653 }),
3654 )
3655 .await;
3656 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3657
3658 assert!(
3659 search(
3660 &project,
3661 SearchQuery::text(
3662 search_query,
3663 false,
3664 true,
3665 vec![PathMatcher::new("*.odd").unwrap()],
3666 Vec::new()
3667 )
3668 .unwrap(),
3669 cx
3670 )
3671 .await
3672 .unwrap()
3673 .is_empty(),
3674 "If no inclusions match, no files should be returned"
3675 );
3676
3677 assert_eq!(
3678 search(
3679 &project,
3680 SearchQuery::text(
3681 search_query,
3682 false,
3683 true,
3684 vec![PathMatcher::new("*.rs").unwrap()],
3685 Vec::new()
3686 )
3687 .unwrap(),
3688 cx
3689 )
3690 .await
3691 .unwrap(),
3692 HashMap::from_iter([
3693 ("one.rs".to_string(), vec![8..12]),
3694 ("two.rs".to_string(), vec![8..12]),
3695 ]),
3696 "Rust only search should give only Rust files"
3697 );
3698
3699 assert_eq!(
3700 search(
3701 &project,
3702 SearchQuery::text(
3703 search_query,
3704 false,
3705 true,
3706 vec![
3707 PathMatcher::new("*.ts").unwrap(),
3708 PathMatcher::new("*.odd").unwrap(),
3709 ],
3710 Vec::new()
3711 ).unwrap(),
3712 cx
3713 )
3714 .await
3715 .unwrap(),
3716 HashMap::from_iter([
3717 ("one.ts".to_string(), vec![14..18]),
3718 ("two.ts".to_string(), vec![14..18]),
3719 ]),
3720 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
3721 );
3722
3723 assert_eq!(
3724 search(
3725 &project,
3726 SearchQuery::text(
3727 search_query,
3728 false,
3729 true,
3730 vec![
3731 PathMatcher::new("*.rs").unwrap(),
3732 PathMatcher::new("*.ts").unwrap(),
3733 PathMatcher::new("*.odd").unwrap(),
3734 ],
3735 Vec::new()
3736 ).unwrap(),
3737 cx
3738 )
3739 .await
3740 .unwrap(),
3741 HashMap::from_iter([
3742 ("one.rs".to_string(), vec![8..12]),
3743 ("one.ts".to_string(), vec![14..18]),
3744 ("two.rs".to_string(), vec![8..12]),
3745 ("two.ts".to_string(), vec![14..18]),
3746 ]),
3747 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
3748 );
3749}
3750
3751#[gpui::test]
3752async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
3753 init_test(cx);
3754
3755 let search_query = "file";
3756
3757 let fs = FakeFs::new(cx.background());
3758 fs.insert_tree(
3759 "/dir",
3760 json!({
3761 "one.rs": r#"// Rust file one"#,
3762 "one.ts": r#"// TypeScript file one"#,
3763 "two.rs": r#"// Rust file two"#,
3764 "two.ts": r#"// TypeScript file two"#,
3765 }),
3766 )
3767 .await;
3768 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3769
3770 assert_eq!(
3771 search(
3772 &project,
3773 SearchQuery::text(
3774 search_query,
3775 false,
3776 true,
3777 Vec::new(),
3778 vec![PathMatcher::new("*.odd").unwrap()],
3779 )
3780 .unwrap(),
3781 cx
3782 )
3783 .await
3784 .unwrap(),
3785 HashMap::from_iter([
3786 ("one.rs".to_string(), vec![8..12]),
3787 ("one.ts".to_string(), vec![14..18]),
3788 ("two.rs".to_string(), vec![8..12]),
3789 ("two.ts".to_string(), vec![14..18]),
3790 ]),
3791 "If no exclusions match, all files should be returned"
3792 );
3793
3794 assert_eq!(
3795 search(
3796 &project,
3797 SearchQuery::text(
3798 search_query,
3799 false,
3800 true,
3801 Vec::new(),
3802 vec![PathMatcher::new("*.rs").unwrap()],
3803 )
3804 .unwrap(),
3805 cx
3806 )
3807 .await
3808 .unwrap(),
3809 HashMap::from_iter([
3810 ("one.ts".to_string(), vec![14..18]),
3811 ("two.ts".to_string(), vec![14..18]),
3812 ]),
3813 "Rust exclusion search should give only TypeScript files"
3814 );
3815
3816 assert_eq!(
3817 search(
3818 &project,
3819 SearchQuery::text(
3820 search_query,
3821 false,
3822 true,
3823 Vec::new(),
3824 vec![
3825 PathMatcher::new("*.ts").unwrap(),
3826 PathMatcher::new("*.odd").unwrap(),
3827 ],
3828 ).unwrap(),
3829 cx
3830 )
3831 .await
3832 .unwrap(),
3833 HashMap::from_iter([
3834 ("one.rs".to_string(), vec![8..12]),
3835 ("two.rs".to_string(), vec![8..12]),
3836 ]),
3837 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
3838 );
3839
3840 assert!(
3841 search(
3842 &project,
3843 SearchQuery::text(
3844 search_query,
3845 false,
3846 true,
3847 Vec::new(),
3848 vec![
3849 PathMatcher::new("*.rs").unwrap(),
3850 PathMatcher::new("*.ts").unwrap(),
3851 PathMatcher::new("*.odd").unwrap(),
3852 ],
3853 ).unwrap(),
3854 cx
3855 )
3856 .await
3857 .unwrap().is_empty(),
3858 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
3859 );
3860}
3861
3862#[gpui::test]
3863async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
3864 init_test(cx);
3865
3866 let search_query = "file";
3867
3868 let fs = FakeFs::new(cx.background());
3869 fs.insert_tree(
3870 "/dir",
3871 json!({
3872 "one.rs": r#"// Rust file one"#,
3873 "one.ts": r#"// TypeScript file one"#,
3874 "two.rs": r#"// Rust file two"#,
3875 "two.ts": r#"// TypeScript file two"#,
3876 }),
3877 )
3878 .await;
3879 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3880
3881 assert!(
3882 search(
3883 &project,
3884 SearchQuery::text(
3885 search_query,
3886 false,
3887 true,
3888 vec![PathMatcher::new("*.odd").unwrap()],
3889 vec![PathMatcher::new("*.odd").unwrap()],
3890 )
3891 .unwrap(),
3892 cx
3893 )
3894 .await
3895 .unwrap()
3896 .is_empty(),
3897 "If both no exclusions and inclusions match, exclusions should win and return nothing"
3898 );
3899
3900 assert!(
3901 search(
3902 &project,
3903 SearchQuery::text(
3904 search_query,
3905 false,
3906 true,
3907 vec![PathMatcher::new("*.ts").unwrap()],
3908 vec![PathMatcher::new("*.ts").unwrap()],
3909 ).unwrap(),
3910 cx
3911 )
3912 .await
3913 .unwrap()
3914 .is_empty(),
3915 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
3916 );
3917
3918 assert!(
3919 search(
3920 &project,
3921 SearchQuery::text(
3922 search_query,
3923 false,
3924 true,
3925 vec![
3926 PathMatcher::new("*.ts").unwrap(),
3927 PathMatcher::new("*.odd").unwrap()
3928 ],
3929 vec![
3930 PathMatcher::new("*.ts").unwrap(),
3931 PathMatcher::new("*.odd").unwrap()
3932 ],
3933 )
3934 .unwrap(),
3935 cx
3936 )
3937 .await
3938 .unwrap()
3939 .is_empty(),
3940 "Non-matching inclusions and exclusions should not change that."
3941 );
3942
3943 assert_eq!(
3944 search(
3945 &project,
3946 SearchQuery::text(
3947 search_query,
3948 false,
3949 true,
3950 vec![
3951 PathMatcher::new("*.ts").unwrap(),
3952 PathMatcher::new("*.odd").unwrap()
3953 ],
3954 vec![
3955 PathMatcher::new("*.rs").unwrap(),
3956 PathMatcher::new("*.odd").unwrap()
3957 ],
3958 )
3959 .unwrap(),
3960 cx
3961 )
3962 .await
3963 .unwrap(),
3964 HashMap::from_iter([
3965 ("one.ts".to_string(), vec![14..18]),
3966 ("two.ts".to_string(), vec![14..18]),
3967 ]),
3968 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
3969 );
3970}
3971
3972#[test]
3973fn test_glob_literal_prefix() {
3974 assert_eq!(glob_literal_prefix("**/*.js"), "");
3975 assert_eq!(glob_literal_prefix("node_modules/**/*.js"), "node_modules");
3976 assert_eq!(glob_literal_prefix("foo/{bar,baz}.js"), "foo");
3977 assert_eq!(glob_literal_prefix("foo/bar/baz.js"), "foo/bar/baz.js");
3978}
3979
3980async fn search(
3981 project: &ModelHandle<Project>,
3982 query: SearchQuery,
3983 cx: &mut gpui::TestAppContext,
3984) -> Result<HashMap<String, Vec<Range<usize>>>> {
3985 let mut search_rx = project.update(cx, |project, cx| project.search(query, cx));
3986 let mut result = HashMap::default();
3987 while let Some((buffer, range)) = search_rx.next().await {
3988 result.entry(buffer).or_insert(range);
3989 }
3990 Ok(result
3991 .into_iter()
3992 .map(|(buffer, ranges)| {
3993 buffer.read_with(cx, |buffer, _| {
3994 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
3995 let ranges = ranges
3996 .into_iter()
3997 .map(|range| range.to_offset(buffer))
3998 .collect::<Vec<_>>();
3999 (path, ranges)
4000 })
4001 })
4002 .collect())
4003}
4004
4005fn init_test(cx: &mut gpui::TestAppContext) {
4006 cx.foreground().forbid_parking();
4007
4008 cx.update(|cx| {
4009 cx.set_global(SettingsStore::test(cx));
4010 language::init(cx);
4011 Project::init_settings(cx);
4012 });
4013}