1use crate::{worktree::WorktreeHandle, Event, *};
2use fs::{FakeFs, LineEnding, RealFs};
3use futures::{future, StreamExt};
4use globset::Glob;
5use gpui::{executor::Deterministic, test::subscribe, AppContext};
6use language::{
7 language_settings::{AllLanguageSettings, LanguageSettingsContent},
8 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
9 OffsetRangeExt, Point, ToPoint,
10};
11use lsp::Url;
12use parking_lot::Mutex;
13use pretty_assertions::assert_eq;
14use serde_json::json;
15use std::{cell::RefCell, os::unix, rc::Rc, task::Poll};
16use unindent::Unindent as _;
17use util::{assert_set_eq, test::temp_tree};
18
19#[cfg(test)]
20#[ctor::ctor]
21fn init_logger() {
22 if std::env::var("RUST_LOG").is_ok() {
23 env_logger::init();
24 }
25}
26
27#[gpui::test]
28async fn test_symlinks(cx: &mut gpui::TestAppContext) {
29 init_test(cx);
30 cx.foreground().allow_parking();
31
32 let dir = temp_tree(json!({
33 "root": {
34 "apple": "",
35 "banana": {
36 "carrot": {
37 "date": "",
38 "endive": "",
39 }
40 },
41 "fennel": {
42 "grape": "",
43 }
44 }
45 }));
46
47 let root_link_path = dir.path().join("root_link");
48 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
49 unix::fs::symlink(
50 &dir.path().join("root/fennel"),
51 &dir.path().join("root/finnochio"),
52 )
53 .unwrap();
54
55 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
56 project.read_with(cx, |project, cx| {
57 let tree = project.worktrees(cx).next().unwrap().read(cx);
58 assert_eq!(tree.file_count(), 5);
59 assert_eq!(
60 tree.inode_for_path("fennel/grape"),
61 tree.inode_for_path("finnochio/grape")
62 );
63 });
64}
65
66#[gpui::test]
67async fn test_managing_project_specific_settings(
68 deterministic: Arc<Deterministic>,
69 cx: &mut gpui::TestAppContext,
70) {
71 init_test(cx);
72
73 let fs = FakeFs::new(cx.background());
74 fs.insert_tree(
75 "/the-root",
76 json!({
77 ".zed": {
78 "settings.json": r#"{ "tab_size": 8 }"#
79 },
80 "a": {
81 "a.rs": "fn a() {\n A\n}"
82 },
83 "b": {
84 ".zed": {
85 "settings.json": r#"{ "tab_size": 2 }"#
86 },
87 "b.rs": "fn b() {\n B\n}"
88 }
89 }),
90 )
91 .await;
92
93 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
94 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
95
96 deterministic.run_until_parked();
97 cx.read(|cx| {
98 let tree = worktree.read(cx);
99
100 let settings_a = language_settings(
101 None,
102 Some(
103 &(File::for_entry(
104 tree.entry_for_path("a/a.rs").unwrap().clone(),
105 worktree.clone(),
106 ) as _),
107 ),
108 cx,
109 );
110 let settings_b = language_settings(
111 None,
112 Some(
113 &(File::for_entry(
114 tree.entry_for_path("b/b.rs").unwrap().clone(),
115 worktree.clone(),
116 ) as _),
117 ),
118 cx,
119 );
120
121 assert_eq!(settings_a.tab_size.get(), 8);
122 assert_eq!(settings_b.tab_size.get(), 2);
123 });
124}
125
126#[gpui::test]
127async fn test_managing_language_servers(
128 deterministic: Arc<Deterministic>,
129 cx: &mut gpui::TestAppContext,
130) {
131 init_test(cx);
132
133 let mut rust_language = Language::new(
134 LanguageConfig {
135 name: "Rust".into(),
136 path_suffixes: vec!["rs".to_string()],
137 ..Default::default()
138 },
139 Some(tree_sitter_rust::language()),
140 );
141 let mut json_language = Language::new(
142 LanguageConfig {
143 name: "JSON".into(),
144 path_suffixes: vec!["json".to_string()],
145 ..Default::default()
146 },
147 None,
148 );
149 let mut fake_rust_servers = rust_language
150 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
151 name: "the-rust-language-server",
152 capabilities: lsp::ServerCapabilities {
153 completion_provider: Some(lsp::CompletionOptions {
154 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
155 ..Default::default()
156 }),
157 ..Default::default()
158 },
159 ..Default::default()
160 }))
161 .await;
162 let mut fake_json_servers = json_language
163 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
164 name: "the-json-language-server",
165 capabilities: lsp::ServerCapabilities {
166 completion_provider: Some(lsp::CompletionOptions {
167 trigger_characters: Some(vec![":".to_string()]),
168 ..Default::default()
169 }),
170 ..Default::default()
171 },
172 ..Default::default()
173 }))
174 .await;
175
176 let fs = FakeFs::new(cx.background());
177 fs.insert_tree(
178 "/the-root",
179 json!({
180 "test.rs": "const A: i32 = 1;",
181 "test2.rs": "",
182 "Cargo.toml": "a = 1",
183 "package.json": "{\"a\": 1}",
184 }),
185 )
186 .await;
187
188 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
189
190 // Open a buffer without an associated language server.
191 let toml_buffer = project
192 .update(cx, |project, cx| {
193 project.open_local_buffer("/the-root/Cargo.toml", cx)
194 })
195 .await
196 .unwrap();
197
198 // Open a buffer with an associated language server before the language for it has been loaded.
199 let rust_buffer = project
200 .update(cx, |project, cx| {
201 project.open_local_buffer("/the-root/test.rs", cx)
202 })
203 .await
204 .unwrap();
205 rust_buffer.read_with(cx, |buffer, _| {
206 assert_eq!(buffer.language().map(|l| l.name()), None);
207 });
208
209 // Now we add the languages to the project, and ensure they get assigned to all
210 // the relevant open buffers.
211 project.update(cx, |project, _| {
212 project.languages.add(Arc::new(json_language));
213 project.languages.add(Arc::new(rust_language));
214 });
215 deterministic.run_until_parked();
216 rust_buffer.read_with(cx, |buffer, _| {
217 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
218 });
219
220 // A server is started up, and it is notified about Rust files.
221 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
222 assert_eq!(
223 fake_rust_server
224 .receive_notification::<lsp::notification::DidOpenTextDocument>()
225 .await
226 .text_document,
227 lsp::TextDocumentItem {
228 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
229 version: 0,
230 text: "const A: i32 = 1;".to_string(),
231 language_id: Default::default()
232 }
233 );
234
235 // The buffer is configured based on the language server's capabilities.
236 rust_buffer.read_with(cx, |buffer, _| {
237 assert_eq!(
238 buffer.completion_triggers(),
239 &[".".to_string(), "::".to_string()]
240 );
241 });
242 toml_buffer.read_with(cx, |buffer, _| {
243 assert!(buffer.completion_triggers().is_empty());
244 });
245
246 // Edit a buffer. The changes are reported to the language server.
247 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
248 assert_eq!(
249 fake_rust_server
250 .receive_notification::<lsp::notification::DidChangeTextDocument>()
251 .await
252 .text_document,
253 lsp::VersionedTextDocumentIdentifier::new(
254 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
255 1
256 )
257 );
258
259 // Open a third buffer with a different associated language server.
260 let json_buffer = project
261 .update(cx, |project, cx| {
262 project.open_local_buffer("/the-root/package.json", cx)
263 })
264 .await
265 .unwrap();
266
267 // A json language server is started up and is only notified about the json buffer.
268 let mut fake_json_server = fake_json_servers.next().await.unwrap();
269 assert_eq!(
270 fake_json_server
271 .receive_notification::<lsp::notification::DidOpenTextDocument>()
272 .await
273 .text_document,
274 lsp::TextDocumentItem {
275 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
276 version: 0,
277 text: "{\"a\": 1}".to_string(),
278 language_id: Default::default()
279 }
280 );
281
282 // This buffer is configured based on the second language server's
283 // capabilities.
284 json_buffer.read_with(cx, |buffer, _| {
285 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
286 });
287
288 // When opening another buffer whose language server is already running,
289 // it is also configured based on the existing language server's capabilities.
290 let rust_buffer2 = project
291 .update(cx, |project, cx| {
292 project.open_local_buffer("/the-root/test2.rs", cx)
293 })
294 .await
295 .unwrap();
296 rust_buffer2.read_with(cx, |buffer, _| {
297 assert_eq!(
298 buffer.completion_triggers(),
299 &[".".to_string(), "::".to_string()]
300 );
301 });
302
303 // Changes are reported only to servers matching the buffer's language.
304 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
305 rust_buffer2.update(cx, |buffer, cx| {
306 buffer.edit([(0..0, "let x = 1;")], None, cx)
307 });
308 assert_eq!(
309 fake_rust_server
310 .receive_notification::<lsp::notification::DidChangeTextDocument>()
311 .await
312 .text_document,
313 lsp::VersionedTextDocumentIdentifier::new(
314 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
315 1
316 )
317 );
318
319 // Save notifications are reported to all servers.
320 project
321 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
322 .await
323 .unwrap();
324 assert_eq!(
325 fake_rust_server
326 .receive_notification::<lsp::notification::DidSaveTextDocument>()
327 .await
328 .text_document,
329 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
330 );
331 assert_eq!(
332 fake_json_server
333 .receive_notification::<lsp::notification::DidSaveTextDocument>()
334 .await
335 .text_document,
336 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
337 );
338
339 // Renames are reported only to servers matching the buffer's language.
340 fs.rename(
341 Path::new("/the-root/test2.rs"),
342 Path::new("/the-root/test3.rs"),
343 Default::default(),
344 )
345 .await
346 .unwrap();
347 assert_eq!(
348 fake_rust_server
349 .receive_notification::<lsp::notification::DidCloseTextDocument>()
350 .await
351 .text_document,
352 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
353 );
354 assert_eq!(
355 fake_rust_server
356 .receive_notification::<lsp::notification::DidOpenTextDocument>()
357 .await
358 .text_document,
359 lsp::TextDocumentItem {
360 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
361 version: 0,
362 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
363 language_id: Default::default()
364 },
365 );
366
367 rust_buffer2.update(cx, |buffer, cx| {
368 buffer.update_diagnostics(
369 LanguageServerId(0),
370 DiagnosticSet::from_sorted_entries(
371 vec![DiagnosticEntry {
372 diagnostic: Default::default(),
373 range: Anchor::MIN..Anchor::MAX,
374 }],
375 &buffer.snapshot(),
376 ),
377 cx,
378 );
379 assert_eq!(
380 buffer
381 .snapshot()
382 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
383 .count(),
384 1
385 );
386 });
387
388 // When the rename changes the extension of the file, the buffer gets closed on the old
389 // language server and gets opened on the new one.
390 fs.rename(
391 Path::new("/the-root/test3.rs"),
392 Path::new("/the-root/test3.json"),
393 Default::default(),
394 )
395 .await
396 .unwrap();
397 assert_eq!(
398 fake_rust_server
399 .receive_notification::<lsp::notification::DidCloseTextDocument>()
400 .await
401 .text_document,
402 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
403 );
404 assert_eq!(
405 fake_json_server
406 .receive_notification::<lsp::notification::DidOpenTextDocument>()
407 .await
408 .text_document,
409 lsp::TextDocumentItem {
410 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
411 version: 0,
412 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
413 language_id: Default::default()
414 },
415 );
416
417 // We clear the diagnostics, since the language has changed.
418 rust_buffer2.read_with(cx, |buffer, _| {
419 assert_eq!(
420 buffer
421 .snapshot()
422 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
423 .count(),
424 0
425 );
426 });
427
428 // The renamed file's version resets after changing language server.
429 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
430 assert_eq!(
431 fake_json_server
432 .receive_notification::<lsp::notification::DidChangeTextDocument>()
433 .await
434 .text_document,
435 lsp::VersionedTextDocumentIdentifier::new(
436 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
437 1
438 )
439 );
440
441 // Restart language servers
442 project.update(cx, |project, cx| {
443 project.restart_language_servers_for_buffers(
444 vec![rust_buffer.clone(), json_buffer.clone()],
445 cx,
446 );
447 });
448
449 let mut rust_shutdown_requests = fake_rust_server
450 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
451 let mut json_shutdown_requests = fake_json_server
452 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
453 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
454
455 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
456 let mut fake_json_server = fake_json_servers.next().await.unwrap();
457
458 // Ensure rust document is reopened in new rust language server
459 assert_eq!(
460 fake_rust_server
461 .receive_notification::<lsp::notification::DidOpenTextDocument>()
462 .await
463 .text_document,
464 lsp::TextDocumentItem {
465 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
466 version: 0,
467 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
468 language_id: Default::default()
469 }
470 );
471
472 // Ensure json documents are reopened in new json language server
473 assert_set_eq!(
474 [
475 fake_json_server
476 .receive_notification::<lsp::notification::DidOpenTextDocument>()
477 .await
478 .text_document,
479 fake_json_server
480 .receive_notification::<lsp::notification::DidOpenTextDocument>()
481 .await
482 .text_document,
483 ],
484 [
485 lsp::TextDocumentItem {
486 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
487 version: 0,
488 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
489 language_id: Default::default()
490 },
491 lsp::TextDocumentItem {
492 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
493 version: 0,
494 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
495 language_id: Default::default()
496 }
497 ]
498 );
499
500 // Close notifications are reported only to servers matching the buffer's language.
501 cx.update(|_| drop(json_buffer));
502 let close_message = lsp::DidCloseTextDocumentParams {
503 text_document: lsp::TextDocumentIdentifier::new(
504 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
505 ),
506 };
507 assert_eq!(
508 fake_json_server
509 .receive_notification::<lsp::notification::DidCloseTextDocument>()
510 .await,
511 close_message,
512 );
513}
514
515#[gpui::test]
516async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
517 init_test(cx);
518
519 let mut language = Language::new(
520 LanguageConfig {
521 name: "Rust".into(),
522 path_suffixes: vec!["rs".to_string()],
523 ..Default::default()
524 },
525 Some(tree_sitter_rust::language()),
526 );
527 let mut fake_servers = language
528 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
529 name: "the-language-server",
530 ..Default::default()
531 }))
532 .await;
533
534 let fs = FakeFs::new(cx.background());
535 fs.insert_tree(
536 "/the-root",
537 json!({
538 ".gitignore": "target\n",
539 "src": {
540 "a.rs": "",
541 "b.rs": "",
542 },
543 "target": {
544 "x": {
545 "out": {
546 "x.rs": ""
547 }
548 },
549 "y": {
550 "out": {
551 "y.rs": "",
552 }
553 },
554 "z": {
555 "out": {
556 "z.rs": ""
557 }
558 }
559 }
560 }),
561 )
562 .await;
563
564 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
565 project.update(cx, |project, _| {
566 project.languages.add(Arc::new(language));
567 });
568 cx.foreground().run_until_parked();
569
570 // Start the language server by opening a buffer with a compatible file extension.
571 let _buffer = project
572 .update(cx, |project, cx| {
573 project.open_local_buffer("/the-root/src/a.rs", cx)
574 })
575 .await
576 .unwrap();
577
578 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
579 project.read_with(cx, |project, cx| {
580 let worktree = project.worktrees(cx).next().unwrap();
581 assert_eq!(
582 worktree
583 .read(cx)
584 .snapshot()
585 .entries(true)
586 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
587 .collect::<Vec<_>>(),
588 &[
589 (Path::new(""), false),
590 (Path::new(".gitignore"), false),
591 (Path::new("src"), false),
592 (Path::new("src/a.rs"), false),
593 (Path::new("src/b.rs"), false),
594 (Path::new("target"), true),
595 ]
596 );
597 });
598
599 let prev_read_dir_count = fs.read_dir_call_count();
600
601 // Keep track of the FS events reported to the language server.
602 let fake_server = fake_servers.next().await.unwrap();
603 let file_changes = Arc::new(Mutex::new(Vec::new()));
604 fake_server
605 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
606 registrations: vec![lsp::Registration {
607 id: Default::default(),
608 method: "workspace/didChangeWatchedFiles".to_string(),
609 register_options: serde_json::to_value(
610 lsp::DidChangeWatchedFilesRegistrationOptions {
611 watchers: vec![
612 lsp::FileSystemWatcher {
613 glob_pattern: lsp::GlobPattern::String(
614 "/the-root/Cargo.toml".to_string(),
615 ),
616 kind: None,
617 },
618 lsp::FileSystemWatcher {
619 glob_pattern: lsp::GlobPattern::String(
620 "/the-root/src/*.{rs,c}".to_string(),
621 ),
622 kind: None,
623 },
624 lsp::FileSystemWatcher {
625 glob_pattern: lsp::GlobPattern::String(
626 "/the-root/target/y/**/*.rs".to_string(),
627 ),
628 kind: None,
629 },
630 ],
631 },
632 )
633 .ok(),
634 }],
635 })
636 .await
637 .unwrap();
638 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
639 let file_changes = file_changes.clone();
640 move |params, _| {
641 let mut file_changes = file_changes.lock();
642 file_changes.extend(params.changes);
643 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
644 }
645 });
646
647 cx.foreground().run_until_parked();
648 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
649 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
650
651 // Now the language server has asked us to watch an ignored directory path,
652 // so we recursively load it.
653 project.read_with(cx, |project, cx| {
654 let worktree = project.worktrees(cx).next().unwrap();
655 assert_eq!(
656 worktree
657 .read(cx)
658 .snapshot()
659 .entries(true)
660 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
661 .collect::<Vec<_>>(),
662 &[
663 (Path::new(""), false),
664 (Path::new(".gitignore"), false),
665 (Path::new("src"), false),
666 (Path::new("src/a.rs"), false),
667 (Path::new("src/b.rs"), false),
668 (Path::new("target"), true),
669 (Path::new("target/x"), true),
670 (Path::new("target/y"), true),
671 (Path::new("target/y/out"), true),
672 (Path::new("target/y/out/y.rs"), true),
673 (Path::new("target/z"), true),
674 ]
675 );
676 });
677
678 // Perform some file system mutations, two of which match the watched patterns,
679 // and one of which does not.
680 fs.create_file("/the-root/src/c.rs".as_ref(), Default::default())
681 .await
682 .unwrap();
683 fs.create_file("/the-root/src/d.txt".as_ref(), Default::default())
684 .await
685 .unwrap();
686 fs.remove_file("/the-root/src/b.rs".as_ref(), Default::default())
687 .await
688 .unwrap();
689 fs.create_file("/the-root/target/x/out/x2.rs".as_ref(), Default::default())
690 .await
691 .unwrap();
692 fs.create_file("/the-root/target/y/out/y2.rs".as_ref(), Default::default())
693 .await
694 .unwrap();
695
696 // The language server receives events for the FS mutations that match its watch patterns.
697 cx.foreground().run_until_parked();
698 assert_eq!(
699 &*file_changes.lock(),
700 &[
701 lsp::FileEvent {
702 uri: lsp::Url::from_file_path("/the-root/src/b.rs").unwrap(),
703 typ: lsp::FileChangeType::DELETED,
704 },
705 lsp::FileEvent {
706 uri: lsp::Url::from_file_path("/the-root/src/c.rs").unwrap(),
707 typ: lsp::FileChangeType::CREATED,
708 },
709 lsp::FileEvent {
710 uri: lsp::Url::from_file_path("/the-root/target/y/out/y2.rs").unwrap(),
711 typ: lsp::FileChangeType::CREATED,
712 },
713 ]
714 );
715}
716
717#[gpui::test]
718async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
719 init_test(cx);
720
721 let fs = FakeFs::new(cx.background());
722 fs.insert_tree(
723 "/dir",
724 json!({
725 "a.rs": "let a = 1;",
726 "b.rs": "let b = 2;"
727 }),
728 )
729 .await;
730
731 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
732
733 let buffer_a = project
734 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
735 .await
736 .unwrap();
737 let buffer_b = project
738 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
739 .await
740 .unwrap();
741
742 project.update(cx, |project, cx| {
743 project
744 .update_diagnostics(
745 LanguageServerId(0),
746 lsp::PublishDiagnosticsParams {
747 uri: Url::from_file_path("/dir/a.rs").unwrap(),
748 version: None,
749 diagnostics: vec![lsp::Diagnostic {
750 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
751 severity: Some(lsp::DiagnosticSeverity::ERROR),
752 message: "error 1".to_string(),
753 ..Default::default()
754 }],
755 },
756 &[],
757 cx,
758 )
759 .unwrap();
760 project
761 .update_diagnostics(
762 LanguageServerId(0),
763 lsp::PublishDiagnosticsParams {
764 uri: Url::from_file_path("/dir/b.rs").unwrap(),
765 version: None,
766 diagnostics: vec![lsp::Diagnostic {
767 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
768 severity: Some(lsp::DiagnosticSeverity::WARNING),
769 message: "error 2".to_string(),
770 ..Default::default()
771 }],
772 },
773 &[],
774 cx,
775 )
776 .unwrap();
777 });
778
779 buffer_a.read_with(cx, |buffer, _| {
780 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
781 assert_eq!(
782 chunks
783 .iter()
784 .map(|(s, d)| (s.as_str(), *d))
785 .collect::<Vec<_>>(),
786 &[
787 ("let ", None),
788 ("a", Some(DiagnosticSeverity::ERROR)),
789 (" = 1;", None),
790 ]
791 );
792 });
793 buffer_b.read_with(cx, |buffer, _| {
794 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
795 assert_eq!(
796 chunks
797 .iter()
798 .map(|(s, d)| (s.as_str(), *d))
799 .collect::<Vec<_>>(),
800 &[
801 ("let ", None),
802 ("b", Some(DiagnosticSeverity::WARNING)),
803 (" = 2;", None),
804 ]
805 );
806 });
807}
808
809#[gpui::test]
810async fn test_hidden_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
811 init_test(cx);
812
813 let fs = FakeFs::new(cx.background());
814 fs.insert_tree(
815 "/root",
816 json!({
817 "dir": {
818 "a.rs": "let a = 1;",
819 },
820 "other.rs": "let b = c;"
821 }),
822 )
823 .await;
824
825 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
826
827 let (worktree, _) = project
828 .update(cx, |project, cx| {
829 project.find_or_create_local_worktree("/root/other.rs", false, cx)
830 })
831 .await
832 .unwrap();
833 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
834
835 project.update(cx, |project, cx| {
836 project
837 .update_diagnostics(
838 LanguageServerId(0),
839 lsp::PublishDiagnosticsParams {
840 uri: Url::from_file_path("/root/other.rs").unwrap(),
841 version: None,
842 diagnostics: vec![lsp::Diagnostic {
843 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
844 severity: Some(lsp::DiagnosticSeverity::ERROR),
845 message: "unknown variable 'c'".to_string(),
846 ..Default::default()
847 }],
848 },
849 &[],
850 cx,
851 )
852 .unwrap();
853 });
854
855 let buffer = project
856 .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
857 .await
858 .unwrap();
859 buffer.read_with(cx, |buffer, _| {
860 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
861 assert_eq!(
862 chunks
863 .iter()
864 .map(|(s, d)| (s.as_str(), *d))
865 .collect::<Vec<_>>(),
866 &[
867 ("let b = ", None),
868 ("c", Some(DiagnosticSeverity::ERROR)),
869 (";", None),
870 ]
871 );
872 });
873
874 project.read_with(cx, |project, cx| {
875 assert_eq!(project.diagnostic_summaries(cx).next(), None);
876 assert_eq!(project.diagnostic_summary(cx).error_count, 0);
877 });
878}
879
880#[gpui::test]
881async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
882 init_test(cx);
883
884 let progress_token = "the-progress-token";
885 let mut language = Language::new(
886 LanguageConfig {
887 name: "Rust".into(),
888 path_suffixes: vec!["rs".to_string()],
889 ..Default::default()
890 },
891 Some(tree_sitter_rust::language()),
892 );
893 let mut fake_servers = language
894 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
895 disk_based_diagnostics_progress_token: Some(progress_token.into()),
896 disk_based_diagnostics_sources: vec!["disk".into()],
897 ..Default::default()
898 }))
899 .await;
900
901 let fs = FakeFs::new(cx.background());
902 fs.insert_tree(
903 "/dir",
904 json!({
905 "a.rs": "fn a() { A }",
906 "b.rs": "const y: i32 = 1",
907 }),
908 )
909 .await;
910
911 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
912 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
913 let worktree_id = project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
914
915 // Cause worktree to start the fake language server
916 let _buffer = project
917 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
918 .await
919 .unwrap();
920
921 let mut events = subscribe(&project, cx);
922
923 let fake_server = fake_servers.next().await.unwrap();
924 assert_eq!(
925 events.next().await.unwrap(),
926 Event::LanguageServerAdded(LanguageServerId(0)),
927 );
928
929 fake_server
930 .start_progress(format!("{}/0", progress_token))
931 .await;
932 assert_eq!(
933 events.next().await.unwrap(),
934 Event::DiskBasedDiagnosticsStarted {
935 language_server_id: LanguageServerId(0),
936 }
937 );
938
939 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
940 uri: Url::from_file_path("/dir/a.rs").unwrap(),
941 version: None,
942 diagnostics: vec![lsp::Diagnostic {
943 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
944 severity: Some(lsp::DiagnosticSeverity::ERROR),
945 message: "undefined variable 'A'".to_string(),
946 ..Default::default()
947 }],
948 });
949 assert_eq!(
950 events.next().await.unwrap(),
951 Event::DiagnosticsUpdated {
952 language_server_id: LanguageServerId(0),
953 path: (worktree_id, Path::new("a.rs")).into()
954 }
955 );
956
957 fake_server.end_progress(format!("{}/0", progress_token));
958 assert_eq!(
959 events.next().await.unwrap(),
960 Event::DiskBasedDiagnosticsFinished {
961 language_server_id: LanguageServerId(0)
962 }
963 );
964
965 let buffer = project
966 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
967 .await
968 .unwrap();
969
970 buffer.read_with(cx, |buffer, _| {
971 let snapshot = buffer.snapshot();
972 let diagnostics = snapshot
973 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
974 .collect::<Vec<_>>();
975 assert_eq!(
976 diagnostics,
977 &[DiagnosticEntry {
978 range: Point::new(0, 9)..Point::new(0, 10),
979 diagnostic: Diagnostic {
980 severity: lsp::DiagnosticSeverity::ERROR,
981 message: "undefined variable 'A'".to_string(),
982 group_id: 0,
983 is_primary: true,
984 ..Default::default()
985 }
986 }]
987 )
988 });
989
990 // Ensure publishing empty diagnostics twice only results in one update event.
991 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
992 uri: Url::from_file_path("/dir/a.rs").unwrap(),
993 version: None,
994 diagnostics: Default::default(),
995 });
996 assert_eq!(
997 events.next().await.unwrap(),
998 Event::DiagnosticsUpdated {
999 language_server_id: LanguageServerId(0),
1000 path: (worktree_id, Path::new("a.rs")).into()
1001 }
1002 );
1003
1004 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1005 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1006 version: None,
1007 diagnostics: Default::default(),
1008 });
1009 cx.foreground().run_until_parked();
1010 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1011}
1012
1013#[gpui::test]
1014async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1015 init_test(cx);
1016
1017 let progress_token = "the-progress-token";
1018 let mut language = Language::new(
1019 LanguageConfig {
1020 path_suffixes: vec!["rs".to_string()],
1021 ..Default::default()
1022 },
1023 None,
1024 );
1025 let mut fake_servers = language
1026 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1027 disk_based_diagnostics_sources: vec!["disk".into()],
1028 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1029 ..Default::default()
1030 }))
1031 .await;
1032
1033 let fs = FakeFs::new(cx.background());
1034 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1035
1036 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1037 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1038
1039 let buffer = project
1040 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1041 .await
1042 .unwrap();
1043
1044 // Simulate diagnostics starting to update.
1045 let fake_server = fake_servers.next().await.unwrap();
1046 fake_server.start_progress(progress_token).await;
1047
1048 // Restart the server before the diagnostics finish updating.
1049 project.update(cx, |project, cx| {
1050 project.restart_language_servers_for_buffers([buffer], cx);
1051 });
1052 let mut events = subscribe(&project, cx);
1053
1054 // Simulate the newly started server sending more diagnostics.
1055 let fake_server = fake_servers.next().await.unwrap();
1056 assert_eq!(
1057 events.next().await.unwrap(),
1058 Event::LanguageServerAdded(LanguageServerId(1))
1059 );
1060 fake_server.start_progress(progress_token).await;
1061 assert_eq!(
1062 events.next().await.unwrap(),
1063 Event::DiskBasedDiagnosticsStarted {
1064 language_server_id: LanguageServerId(1)
1065 }
1066 );
1067 project.read_with(cx, |project, _| {
1068 assert_eq!(
1069 project
1070 .language_servers_running_disk_based_diagnostics()
1071 .collect::<Vec<_>>(),
1072 [LanguageServerId(1)]
1073 );
1074 });
1075
1076 // All diagnostics are considered done, despite the old server's diagnostic
1077 // task never completing.
1078 fake_server.end_progress(progress_token);
1079 assert_eq!(
1080 events.next().await.unwrap(),
1081 Event::DiskBasedDiagnosticsFinished {
1082 language_server_id: LanguageServerId(1)
1083 }
1084 );
1085 project.read_with(cx, |project, _| {
1086 assert_eq!(
1087 project
1088 .language_servers_running_disk_based_diagnostics()
1089 .collect::<Vec<_>>(),
1090 [LanguageServerId(0); 0]
1091 );
1092 });
1093}
1094
1095#[gpui::test]
1096async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1097 init_test(cx);
1098
1099 let mut language = Language::new(
1100 LanguageConfig {
1101 path_suffixes: vec!["rs".to_string()],
1102 ..Default::default()
1103 },
1104 None,
1105 );
1106 let mut fake_servers = language
1107 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1108 ..Default::default()
1109 }))
1110 .await;
1111
1112 let fs = FakeFs::new(cx.background());
1113 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1114
1115 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1116 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1117
1118 let buffer = project
1119 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1120 .await
1121 .unwrap();
1122
1123 // Publish diagnostics
1124 let fake_server = fake_servers.next().await.unwrap();
1125 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1126 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1127 version: None,
1128 diagnostics: vec![lsp::Diagnostic {
1129 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1130 severity: Some(lsp::DiagnosticSeverity::ERROR),
1131 message: "the message".to_string(),
1132 ..Default::default()
1133 }],
1134 });
1135
1136 cx.foreground().run_until_parked();
1137 buffer.read_with(cx, |buffer, _| {
1138 assert_eq!(
1139 buffer
1140 .snapshot()
1141 .diagnostics_in_range::<_, usize>(0..1, false)
1142 .map(|entry| entry.diagnostic.message.clone())
1143 .collect::<Vec<_>>(),
1144 ["the message".to_string()]
1145 );
1146 });
1147 project.read_with(cx, |project, cx| {
1148 assert_eq!(
1149 project.diagnostic_summary(cx),
1150 DiagnosticSummary {
1151 error_count: 1,
1152 warning_count: 0,
1153 }
1154 );
1155 });
1156
1157 project.update(cx, |project, cx| {
1158 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1159 });
1160
1161 // The diagnostics are cleared.
1162 cx.foreground().run_until_parked();
1163 buffer.read_with(cx, |buffer, _| {
1164 assert_eq!(
1165 buffer
1166 .snapshot()
1167 .diagnostics_in_range::<_, usize>(0..1, false)
1168 .map(|entry| entry.diagnostic.message.clone())
1169 .collect::<Vec<_>>(),
1170 Vec::<String>::new(),
1171 );
1172 });
1173 project.read_with(cx, |project, cx| {
1174 assert_eq!(
1175 project.diagnostic_summary(cx),
1176 DiagnosticSummary {
1177 error_count: 0,
1178 warning_count: 0,
1179 }
1180 );
1181 });
1182}
1183
1184#[gpui::test]
1185async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1186 init_test(cx);
1187
1188 let mut language = Language::new(
1189 LanguageConfig {
1190 path_suffixes: vec!["rs".to_string()],
1191 ..Default::default()
1192 },
1193 None,
1194 );
1195 let mut fake_servers = language
1196 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1197 name: "the-lsp",
1198 ..Default::default()
1199 }))
1200 .await;
1201
1202 let fs = FakeFs::new(cx.background());
1203 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1204
1205 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1206 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1207
1208 let buffer = project
1209 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1210 .await
1211 .unwrap();
1212
1213 // Before restarting the server, report diagnostics with an unknown buffer version.
1214 let fake_server = fake_servers.next().await.unwrap();
1215 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1216 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1217 version: Some(10000),
1218 diagnostics: Vec::new(),
1219 });
1220 cx.foreground().run_until_parked();
1221
1222 project.update(cx, |project, cx| {
1223 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1224 });
1225 let mut fake_server = fake_servers.next().await.unwrap();
1226 let notification = fake_server
1227 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1228 .await
1229 .text_document;
1230 assert_eq!(notification.version, 0);
1231}
1232
1233#[gpui::test]
1234async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1235 init_test(cx);
1236
1237 let mut rust = Language::new(
1238 LanguageConfig {
1239 name: Arc::from("Rust"),
1240 path_suffixes: vec!["rs".to_string()],
1241 ..Default::default()
1242 },
1243 None,
1244 );
1245 let mut fake_rust_servers = rust
1246 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1247 name: "rust-lsp",
1248 ..Default::default()
1249 }))
1250 .await;
1251 let mut js = Language::new(
1252 LanguageConfig {
1253 name: Arc::from("JavaScript"),
1254 path_suffixes: vec!["js".to_string()],
1255 ..Default::default()
1256 },
1257 None,
1258 );
1259 let mut fake_js_servers = js
1260 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1261 name: "js-lsp",
1262 ..Default::default()
1263 }))
1264 .await;
1265
1266 let fs = FakeFs::new(cx.background());
1267 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1268 .await;
1269
1270 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1271 project.update(cx, |project, _| {
1272 project.languages.add(Arc::new(rust));
1273 project.languages.add(Arc::new(js));
1274 });
1275
1276 let _rs_buffer = project
1277 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1278 .await
1279 .unwrap();
1280 let _js_buffer = project
1281 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
1282 .await
1283 .unwrap();
1284
1285 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1286 assert_eq!(
1287 fake_rust_server_1
1288 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1289 .await
1290 .text_document
1291 .uri
1292 .as_str(),
1293 "file:///dir/a.rs"
1294 );
1295
1296 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1297 assert_eq!(
1298 fake_js_server
1299 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1300 .await
1301 .text_document
1302 .uri
1303 .as_str(),
1304 "file:///dir/b.js"
1305 );
1306
1307 // Disable Rust language server, ensuring only that server gets stopped.
1308 cx.update(|cx| {
1309 cx.update_global(|settings: &mut SettingsStore, cx| {
1310 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1311 settings.languages.insert(
1312 Arc::from("Rust"),
1313 LanguageSettingsContent {
1314 enable_language_server: Some(false),
1315 ..Default::default()
1316 },
1317 );
1318 });
1319 })
1320 });
1321 fake_rust_server_1
1322 .receive_notification::<lsp::notification::Exit>()
1323 .await;
1324
1325 // Enable Rust and disable JavaScript language servers, ensuring that the
1326 // former gets started again and that the latter stops.
1327 cx.update(|cx| {
1328 cx.update_global(|settings: &mut SettingsStore, cx| {
1329 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1330 settings.languages.insert(
1331 Arc::from("Rust"),
1332 LanguageSettingsContent {
1333 enable_language_server: Some(true),
1334 ..Default::default()
1335 },
1336 );
1337 settings.languages.insert(
1338 Arc::from("JavaScript"),
1339 LanguageSettingsContent {
1340 enable_language_server: Some(false),
1341 ..Default::default()
1342 },
1343 );
1344 });
1345 })
1346 });
1347 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1348 assert_eq!(
1349 fake_rust_server_2
1350 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1351 .await
1352 .text_document
1353 .uri
1354 .as_str(),
1355 "file:///dir/a.rs"
1356 );
1357 fake_js_server
1358 .receive_notification::<lsp::notification::Exit>()
1359 .await;
1360}
1361
1362#[gpui::test(iterations = 3)]
1363async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1364 init_test(cx);
1365
1366 let mut language = Language::new(
1367 LanguageConfig {
1368 name: "Rust".into(),
1369 path_suffixes: vec!["rs".to_string()],
1370 ..Default::default()
1371 },
1372 Some(tree_sitter_rust::language()),
1373 );
1374 let mut fake_servers = language
1375 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1376 disk_based_diagnostics_sources: vec!["disk".into()],
1377 ..Default::default()
1378 }))
1379 .await;
1380
1381 let text = "
1382 fn a() { A }
1383 fn b() { BB }
1384 fn c() { CCC }
1385 "
1386 .unindent();
1387
1388 let fs = FakeFs::new(cx.background());
1389 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1390
1391 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1392 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1393
1394 let buffer = project
1395 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1396 .await
1397 .unwrap();
1398
1399 let mut fake_server = fake_servers.next().await.unwrap();
1400 let open_notification = fake_server
1401 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1402 .await;
1403
1404 // Edit the buffer, moving the content down
1405 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1406 let change_notification_1 = fake_server
1407 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1408 .await;
1409 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1410
1411 // Report some diagnostics for the initial version of the buffer
1412 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1413 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1414 version: Some(open_notification.text_document.version),
1415 diagnostics: vec![
1416 lsp::Diagnostic {
1417 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1418 severity: Some(DiagnosticSeverity::ERROR),
1419 message: "undefined variable 'A'".to_string(),
1420 source: Some("disk".to_string()),
1421 ..Default::default()
1422 },
1423 lsp::Diagnostic {
1424 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1425 severity: Some(DiagnosticSeverity::ERROR),
1426 message: "undefined variable 'BB'".to_string(),
1427 source: Some("disk".to_string()),
1428 ..Default::default()
1429 },
1430 lsp::Diagnostic {
1431 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1432 severity: Some(DiagnosticSeverity::ERROR),
1433 source: Some("disk".to_string()),
1434 message: "undefined variable 'CCC'".to_string(),
1435 ..Default::default()
1436 },
1437 ],
1438 });
1439
1440 // The diagnostics have moved down since they were created.
1441 buffer.next_notification(cx).await;
1442 cx.foreground().run_until_parked();
1443 buffer.read_with(cx, |buffer, _| {
1444 assert_eq!(
1445 buffer
1446 .snapshot()
1447 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1448 .collect::<Vec<_>>(),
1449 &[
1450 DiagnosticEntry {
1451 range: Point::new(3, 9)..Point::new(3, 11),
1452 diagnostic: Diagnostic {
1453 source: Some("disk".into()),
1454 severity: DiagnosticSeverity::ERROR,
1455 message: "undefined variable 'BB'".to_string(),
1456 is_disk_based: true,
1457 group_id: 1,
1458 is_primary: true,
1459 ..Default::default()
1460 },
1461 },
1462 DiagnosticEntry {
1463 range: Point::new(4, 9)..Point::new(4, 12),
1464 diagnostic: Diagnostic {
1465 source: Some("disk".into()),
1466 severity: DiagnosticSeverity::ERROR,
1467 message: "undefined variable 'CCC'".to_string(),
1468 is_disk_based: true,
1469 group_id: 2,
1470 is_primary: true,
1471 ..Default::default()
1472 }
1473 }
1474 ]
1475 );
1476 assert_eq!(
1477 chunks_with_diagnostics(buffer, 0..buffer.len()),
1478 [
1479 ("\n\nfn a() { ".to_string(), None),
1480 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1481 (" }\nfn b() { ".to_string(), None),
1482 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1483 (" }\nfn c() { ".to_string(), None),
1484 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1485 (" }\n".to_string(), None),
1486 ]
1487 );
1488 assert_eq!(
1489 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1490 [
1491 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1492 (" }\nfn c() { ".to_string(), None),
1493 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1494 ]
1495 );
1496 });
1497
1498 // Ensure overlapping diagnostics are highlighted correctly.
1499 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1500 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1501 version: Some(open_notification.text_document.version),
1502 diagnostics: vec![
1503 lsp::Diagnostic {
1504 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1505 severity: Some(DiagnosticSeverity::ERROR),
1506 message: "undefined variable 'A'".to_string(),
1507 source: Some("disk".to_string()),
1508 ..Default::default()
1509 },
1510 lsp::Diagnostic {
1511 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1512 severity: Some(DiagnosticSeverity::WARNING),
1513 message: "unreachable statement".to_string(),
1514 source: Some("disk".to_string()),
1515 ..Default::default()
1516 },
1517 ],
1518 });
1519
1520 buffer.next_notification(cx).await;
1521 cx.foreground().run_until_parked();
1522 buffer.read_with(cx, |buffer, _| {
1523 assert_eq!(
1524 buffer
1525 .snapshot()
1526 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1527 .collect::<Vec<_>>(),
1528 &[
1529 DiagnosticEntry {
1530 range: Point::new(2, 9)..Point::new(2, 12),
1531 diagnostic: Diagnostic {
1532 source: Some("disk".into()),
1533 severity: DiagnosticSeverity::WARNING,
1534 message: "unreachable statement".to_string(),
1535 is_disk_based: true,
1536 group_id: 4,
1537 is_primary: true,
1538 ..Default::default()
1539 }
1540 },
1541 DiagnosticEntry {
1542 range: Point::new(2, 9)..Point::new(2, 10),
1543 diagnostic: Diagnostic {
1544 source: Some("disk".into()),
1545 severity: DiagnosticSeverity::ERROR,
1546 message: "undefined variable 'A'".to_string(),
1547 is_disk_based: true,
1548 group_id: 3,
1549 is_primary: true,
1550 ..Default::default()
1551 },
1552 }
1553 ]
1554 );
1555 assert_eq!(
1556 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1557 [
1558 ("fn a() { ".to_string(), None),
1559 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1560 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1561 ("\n".to_string(), None),
1562 ]
1563 );
1564 assert_eq!(
1565 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1566 [
1567 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1568 ("\n".to_string(), None),
1569 ]
1570 );
1571 });
1572
1573 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1574 // changes since the last save.
1575 buffer.update(cx, |buffer, cx| {
1576 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1577 buffer.edit(
1578 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1579 None,
1580 cx,
1581 );
1582 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1583 });
1584 let change_notification_2 = fake_server
1585 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1586 .await;
1587 assert!(
1588 change_notification_2.text_document.version > change_notification_1.text_document.version
1589 );
1590
1591 // Handle out-of-order diagnostics
1592 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1593 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1594 version: Some(change_notification_2.text_document.version),
1595 diagnostics: vec![
1596 lsp::Diagnostic {
1597 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1598 severity: Some(DiagnosticSeverity::ERROR),
1599 message: "undefined variable 'BB'".to_string(),
1600 source: Some("disk".to_string()),
1601 ..Default::default()
1602 },
1603 lsp::Diagnostic {
1604 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1605 severity: Some(DiagnosticSeverity::WARNING),
1606 message: "undefined variable 'A'".to_string(),
1607 source: Some("disk".to_string()),
1608 ..Default::default()
1609 },
1610 ],
1611 });
1612
1613 buffer.next_notification(cx).await;
1614 cx.foreground().run_until_parked();
1615 buffer.read_with(cx, |buffer, _| {
1616 assert_eq!(
1617 buffer
1618 .snapshot()
1619 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1620 .collect::<Vec<_>>(),
1621 &[
1622 DiagnosticEntry {
1623 range: Point::new(2, 21)..Point::new(2, 22),
1624 diagnostic: Diagnostic {
1625 source: Some("disk".into()),
1626 severity: DiagnosticSeverity::WARNING,
1627 message: "undefined variable 'A'".to_string(),
1628 is_disk_based: true,
1629 group_id: 6,
1630 is_primary: true,
1631 ..Default::default()
1632 }
1633 },
1634 DiagnosticEntry {
1635 range: Point::new(3, 9)..Point::new(3, 14),
1636 diagnostic: Diagnostic {
1637 source: Some("disk".into()),
1638 severity: DiagnosticSeverity::ERROR,
1639 message: "undefined variable 'BB'".to_string(),
1640 is_disk_based: true,
1641 group_id: 5,
1642 is_primary: true,
1643 ..Default::default()
1644 },
1645 }
1646 ]
1647 );
1648 });
1649}
1650
1651#[gpui::test]
1652async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1653 init_test(cx);
1654
1655 let text = concat!(
1656 "let one = ;\n", //
1657 "let two = \n",
1658 "let three = 3;\n",
1659 );
1660
1661 let fs = FakeFs::new(cx.background());
1662 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1663
1664 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1665 let buffer = project
1666 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1667 .await
1668 .unwrap();
1669
1670 project.update(cx, |project, cx| {
1671 project
1672 .update_buffer_diagnostics(
1673 &buffer,
1674 LanguageServerId(0),
1675 None,
1676 vec![
1677 DiagnosticEntry {
1678 range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 10)),
1679 diagnostic: Diagnostic {
1680 severity: DiagnosticSeverity::ERROR,
1681 message: "syntax error 1".to_string(),
1682 ..Default::default()
1683 },
1684 },
1685 DiagnosticEntry {
1686 range: Unclipped(PointUtf16::new(1, 10))..Unclipped(PointUtf16::new(1, 10)),
1687 diagnostic: Diagnostic {
1688 severity: DiagnosticSeverity::ERROR,
1689 message: "syntax error 2".to_string(),
1690 ..Default::default()
1691 },
1692 },
1693 ],
1694 cx,
1695 )
1696 .unwrap();
1697 });
1698
1699 // An empty range is extended forward to include the following character.
1700 // At the end of a line, an empty range is extended backward to include
1701 // the preceding character.
1702 buffer.read_with(cx, |buffer, _| {
1703 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1704 assert_eq!(
1705 chunks
1706 .iter()
1707 .map(|(s, d)| (s.as_str(), *d))
1708 .collect::<Vec<_>>(),
1709 &[
1710 ("let one = ", None),
1711 (";", Some(DiagnosticSeverity::ERROR)),
1712 ("\nlet two =", None),
1713 (" ", Some(DiagnosticSeverity::ERROR)),
1714 ("\nlet three = 3;\n", None)
1715 ]
1716 );
1717 });
1718}
1719
1720#[gpui::test]
1721async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
1722 init_test(cx);
1723
1724 let fs = FakeFs::new(cx.background());
1725 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
1726 .await;
1727
1728 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1729
1730 project.update(cx, |project, cx| {
1731 project
1732 .update_diagnostic_entries(
1733 LanguageServerId(0),
1734 Path::new("/dir/a.rs").to_owned(),
1735 None,
1736 vec![DiagnosticEntry {
1737 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1738 diagnostic: Diagnostic {
1739 severity: DiagnosticSeverity::ERROR,
1740 is_primary: true,
1741 message: "syntax error a1".to_string(),
1742 ..Default::default()
1743 },
1744 }],
1745 cx,
1746 )
1747 .unwrap();
1748 project
1749 .update_diagnostic_entries(
1750 LanguageServerId(1),
1751 Path::new("/dir/a.rs").to_owned(),
1752 None,
1753 vec![DiagnosticEntry {
1754 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1755 diagnostic: Diagnostic {
1756 severity: DiagnosticSeverity::ERROR,
1757 is_primary: true,
1758 message: "syntax error b1".to_string(),
1759 ..Default::default()
1760 },
1761 }],
1762 cx,
1763 )
1764 .unwrap();
1765
1766 assert_eq!(
1767 project.diagnostic_summary(cx),
1768 DiagnosticSummary {
1769 error_count: 2,
1770 warning_count: 0,
1771 }
1772 );
1773 });
1774}
1775
1776#[gpui::test]
1777async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
1778 init_test(cx);
1779
1780 let mut language = Language::new(
1781 LanguageConfig {
1782 name: "Rust".into(),
1783 path_suffixes: vec!["rs".to_string()],
1784 ..Default::default()
1785 },
1786 Some(tree_sitter_rust::language()),
1787 );
1788 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
1789
1790 let text = "
1791 fn a() {
1792 f1();
1793 }
1794 fn b() {
1795 f2();
1796 }
1797 fn c() {
1798 f3();
1799 }
1800 "
1801 .unindent();
1802
1803 let fs = FakeFs::new(cx.background());
1804 fs.insert_tree(
1805 "/dir",
1806 json!({
1807 "a.rs": text.clone(),
1808 }),
1809 )
1810 .await;
1811
1812 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1813 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1814 let buffer = project
1815 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1816 .await
1817 .unwrap();
1818
1819 let mut fake_server = fake_servers.next().await.unwrap();
1820 let lsp_document_version = fake_server
1821 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1822 .await
1823 .text_document
1824 .version;
1825
1826 // Simulate editing the buffer after the language server computes some edits.
1827 buffer.update(cx, |buffer, cx| {
1828 buffer.edit(
1829 [(
1830 Point::new(0, 0)..Point::new(0, 0),
1831 "// above first function\n",
1832 )],
1833 None,
1834 cx,
1835 );
1836 buffer.edit(
1837 [(
1838 Point::new(2, 0)..Point::new(2, 0),
1839 " // inside first function\n",
1840 )],
1841 None,
1842 cx,
1843 );
1844 buffer.edit(
1845 [(
1846 Point::new(6, 4)..Point::new(6, 4),
1847 "// inside second function ",
1848 )],
1849 None,
1850 cx,
1851 );
1852
1853 assert_eq!(
1854 buffer.text(),
1855 "
1856 // above first function
1857 fn a() {
1858 // inside first function
1859 f1();
1860 }
1861 fn b() {
1862 // inside second function f2();
1863 }
1864 fn c() {
1865 f3();
1866 }
1867 "
1868 .unindent()
1869 );
1870 });
1871
1872 let edits = project
1873 .update(cx, |project, cx| {
1874 project.edits_from_lsp(
1875 &buffer,
1876 vec![
1877 // replace body of first function
1878 lsp::TextEdit {
1879 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
1880 new_text: "
1881 fn a() {
1882 f10();
1883 }
1884 "
1885 .unindent(),
1886 },
1887 // edit inside second function
1888 lsp::TextEdit {
1889 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
1890 new_text: "00".into(),
1891 },
1892 // edit inside third function via two distinct edits
1893 lsp::TextEdit {
1894 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
1895 new_text: "4000".into(),
1896 },
1897 lsp::TextEdit {
1898 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
1899 new_text: "".into(),
1900 },
1901 ],
1902 LanguageServerId(0),
1903 Some(lsp_document_version),
1904 cx,
1905 )
1906 })
1907 .await
1908 .unwrap();
1909
1910 buffer.update(cx, |buffer, cx| {
1911 for (range, new_text) in edits {
1912 buffer.edit([(range, new_text)], None, cx);
1913 }
1914 assert_eq!(
1915 buffer.text(),
1916 "
1917 // above first function
1918 fn a() {
1919 // inside first function
1920 f10();
1921 }
1922 fn b() {
1923 // inside second function f200();
1924 }
1925 fn c() {
1926 f4000();
1927 }
1928 "
1929 .unindent()
1930 );
1931 });
1932}
1933
1934#[gpui::test]
1935async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
1936 init_test(cx);
1937
1938 let text = "
1939 use a::b;
1940 use a::c;
1941
1942 fn f() {
1943 b();
1944 c();
1945 }
1946 "
1947 .unindent();
1948
1949 let fs = FakeFs::new(cx.background());
1950 fs.insert_tree(
1951 "/dir",
1952 json!({
1953 "a.rs": text.clone(),
1954 }),
1955 )
1956 .await;
1957
1958 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1959 let buffer = project
1960 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1961 .await
1962 .unwrap();
1963
1964 // Simulate the language server sending us a small edit in the form of a very large diff.
1965 // Rust-analyzer does this when performing a merge-imports code action.
1966 let edits = project
1967 .update(cx, |project, cx| {
1968 project.edits_from_lsp(
1969 &buffer,
1970 [
1971 // Replace the first use statement without editing the semicolon.
1972 lsp::TextEdit {
1973 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
1974 new_text: "a::{b, c}".into(),
1975 },
1976 // Reinsert the remainder of the file between the semicolon and the final
1977 // newline of the file.
1978 lsp::TextEdit {
1979 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1980 new_text: "\n\n".into(),
1981 },
1982 lsp::TextEdit {
1983 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1984 new_text: "
1985 fn f() {
1986 b();
1987 c();
1988 }"
1989 .unindent(),
1990 },
1991 // Delete everything after the first newline of the file.
1992 lsp::TextEdit {
1993 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
1994 new_text: "".into(),
1995 },
1996 ],
1997 LanguageServerId(0),
1998 None,
1999 cx,
2000 )
2001 })
2002 .await
2003 .unwrap();
2004
2005 buffer.update(cx, |buffer, cx| {
2006 let edits = edits
2007 .into_iter()
2008 .map(|(range, text)| {
2009 (
2010 range.start.to_point(buffer)..range.end.to_point(buffer),
2011 text,
2012 )
2013 })
2014 .collect::<Vec<_>>();
2015
2016 assert_eq!(
2017 edits,
2018 [
2019 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2020 (Point::new(1, 0)..Point::new(2, 0), "".into())
2021 ]
2022 );
2023
2024 for (range, new_text) in edits {
2025 buffer.edit([(range, new_text)], None, cx);
2026 }
2027 assert_eq!(
2028 buffer.text(),
2029 "
2030 use a::{b, c};
2031
2032 fn f() {
2033 b();
2034 c();
2035 }
2036 "
2037 .unindent()
2038 );
2039 });
2040}
2041
2042#[gpui::test]
2043async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
2044 init_test(cx);
2045
2046 let text = "
2047 use a::b;
2048 use a::c;
2049
2050 fn f() {
2051 b();
2052 c();
2053 }
2054 "
2055 .unindent();
2056
2057 let fs = FakeFs::new(cx.background());
2058 fs.insert_tree(
2059 "/dir",
2060 json!({
2061 "a.rs": text.clone(),
2062 }),
2063 )
2064 .await;
2065
2066 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2067 let buffer = project
2068 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2069 .await
2070 .unwrap();
2071
2072 // Simulate the language server sending us edits in a non-ordered fashion,
2073 // with ranges sometimes being inverted or pointing to invalid locations.
2074 let edits = project
2075 .update(cx, |project, cx| {
2076 project.edits_from_lsp(
2077 &buffer,
2078 [
2079 lsp::TextEdit {
2080 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2081 new_text: "\n\n".into(),
2082 },
2083 lsp::TextEdit {
2084 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2085 new_text: "a::{b, c}".into(),
2086 },
2087 lsp::TextEdit {
2088 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2089 new_text: "".into(),
2090 },
2091 lsp::TextEdit {
2092 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2093 new_text: "
2094 fn f() {
2095 b();
2096 c();
2097 }"
2098 .unindent(),
2099 },
2100 ],
2101 LanguageServerId(0),
2102 None,
2103 cx,
2104 )
2105 })
2106 .await
2107 .unwrap();
2108
2109 buffer.update(cx, |buffer, cx| {
2110 let edits = edits
2111 .into_iter()
2112 .map(|(range, text)| {
2113 (
2114 range.start.to_point(buffer)..range.end.to_point(buffer),
2115 text,
2116 )
2117 })
2118 .collect::<Vec<_>>();
2119
2120 assert_eq!(
2121 edits,
2122 [
2123 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2124 (Point::new(1, 0)..Point::new(2, 0), "".into())
2125 ]
2126 );
2127
2128 for (range, new_text) in edits {
2129 buffer.edit([(range, new_text)], None, cx);
2130 }
2131 assert_eq!(
2132 buffer.text(),
2133 "
2134 use a::{b, c};
2135
2136 fn f() {
2137 b();
2138 c();
2139 }
2140 "
2141 .unindent()
2142 );
2143 });
2144}
2145
2146fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2147 buffer: &Buffer,
2148 range: Range<T>,
2149) -> Vec<(String, Option<DiagnosticSeverity>)> {
2150 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2151 for chunk in buffer.snapshot().chunks(range, true) {
2152 if chunks.last().map_or(false, |prev_chunk| {
2153 prev_chunk.1 == chunk.diagnostic_severity
2154 }) {
2155 chunks.last_mut().unwrap().0.push_str(chunk.text);
2156 } else {
2157 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2158 }
2159 }
2160 chunks
2161}
2162
2163#[gpui::test(iterations = 10)]
2164async fn test_definition(cx: &mut gpui::TestAppContext) {
2165 init_test(cx);
2166
2167 let mut language = Language::new(
2168 LanguageConfig {
2169 name: "Rust".into(),
2170 path_suffixes: vec!["rs".to_string()],
2171 ..Default::default()
2172 },
2173 Some(tree_sitter_rust::language()),
2174 );
2175 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
2176
2177 let fs = FakeFs::new(cx.background());
2178 fs.insert_tree(
2179 "/dir",
2180 json!({
2181 "a.rs": "const fn a() { A }",
2182 "b.rs": "const y: i32 = crate::a()",
2183 }),
2184 )
2185 .await;
2186
2187 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2188 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2189
2190 let buffer = project
2191 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
2192 .await
2193 .unwrap();
2194
2195 let fake_server = fake_servers.next().await.unwrap();
2196 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2197 let params = params.text_document_position_params;
2198 assert_eq!(
2199 params.text_document.uri.to_file_path().unwrap(),
2200 Path::new("/dir/b.rs"),
2201 );
2202 assert_eq!(params.position, lsp::Position::new(0, 22));
2203
2204 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2205 lsp::Location::new(
2206 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
2207 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2208 ),
2209 )))
2210 });
2211
2212 let mut definitions = project
2213 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2214 .await
2215 .unwrap();
2216
2217 // Assert no new language server started
2218 cx.foreground().run_until_parked();
2219 assert!(fake_servers.try_next().is_err());
2220
2221 assert_eq!(definitions.len(), 1);
2222 let definition = definitions.pop().unwrap();
2223 cx.update(|cx| {
2224 let target_buffer = definition.target.buffer.read(cx);
2225 assert_eq!(
2226 target_buffer
2227 .file()
2228 .unwrap()
2229 .as_local()
2230 .unwrap()
2231 .abs_path(cx),
2232 Path::new("/dir/a.rs"),
2233 );
2234 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2235 assert_eq!(
2236 list_worktrees(&project, cx),
2237 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
2238 );
2239
2240 drop(definition);
2241 });
2242 cx.read(|cx| {
2243 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2244 });
2245
2246 fn list_worktrees<'a>(
2247 project: &'a ModelHandle<Project>,
2248 cx: &'a AppContext,
2249 ) -> Vec<(&'a Path, bool)> {
2250 project
2251 .read(cx)
2252 .worktrees(cx)
2253 .map(|worktree| {
2254 let worktree = worktree.read(cx);
2255 (
2256 worktree.as_local().unwrap().abs_path().as_ref(),
2257 worktree.is_visible(),
2258 )
2259 })
2260 .collect::<Vec<_>>()
2261 }
2262}
2263
2264#[gpui::test]
2265async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2266 init_test(cx);
2267
2268 let mut language = Language::new(
2269 LanguageConfig {
2270 name: "TypeScript".into(),
2271 path_suffixes: vec!["ts".to_string()],
2272 ..Default::default()
2273 },
2274 Some(tree_sitter_typescript::language_typescript()),
2275 );
2276 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
2277
2278 let fs = FakeFs::new(cx.background());
2279 fs.insert_tree(
2280 "/dir",
2281 json!({
2282 "a.ts": "",
2283 }),
2284 )
2285 .await;
2286
2287 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2288 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2289 let buffer = project
2290 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2291 .await
2292 .unwrap();
2293
2294 let fake_server = fake_language_servers.next().await.unwrap();
2295
2296 let text = "let a = b.fqn";
2297 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2298 let completions = project.update(cx, |project, cx| {
2299 project.completions(&buffer, text.len(), cx)
2300 });
2301
2302 fake_server
2303 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2304 Ok(Some(lsp::CompletionResponse::Array(vec![
2305 lsp::CompletionItem {
2306 label: "fullyQualifiedName?".into(),
2307 insert_text: Some("fullyQualifiedName".into()),
2308 ..Default::default()
2309 },
2310 ])))
2311 })
2312 .next()
2313 .await;
2314 let completions = completions.await.unwrap();
2315 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
2316 assert_eq!(completions.len(), 1);
2317 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2318 assert_eq!(
2319 completions[0].old_range.to_offset(&snapshot),
2320 text.len() - 3..text.len()
2321 );
2322
2323 let text = "let a = \"atoms/cmp\"";
2324 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2325 let completions = project.update(cx, |project, cx| {
2326 project.completions(&buffer, text.len() - 1, cx)
2327 });
2328
2329 fake_server
2330 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2331 Ok(Some(lsp::CompletionResponse::Array(vec![
2332 lsp::CompletionItem {
2333 label: "component".into(),
2334 ..Default::default()
2335 },
2336 ])))
2337 })
2338 .next()
2339 .await;
2340 let completions = completions.await.unwrap();
2341 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
2342 assert_eq!(completions.len(), 1);
2343 assert_eq!(completions[0].new_text, "component");
2344 assert_eq!(
2345 completions[0].old_range.to_offset(&snapshot),
2346 text.len() - 4..text.len() - 1
2347 );
2348}
2349
2350#[gpui::test]
2351async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2352 init_test(cx);
2353
2354 let mut language = Language::new(
2355 LanguageConfig {
2356 name: "TypeScript".into(),
2357 path_suffixes: vec!["ts".to_string()],
2358 ..Default::default()
2359 },
2360 Some(tree_sitter_typescript::language_typescript()),
2361 );
2362 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
2363
2364 let fs = FakeFs::new(cx.background());
2365 fs.insert_tree(
2366 "/dir",
2367 json!({
2368 "a.ts": "",
2369 }),
2370 )
2371 .await;
2372
2373 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2374 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2375 let buffer = project
2376 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2377 .await
2378 .unwrap();
2379
2380 let fake_server = fake_language_servers.next().await.unwrap();
2381
2382 let text = "let a = b.fqn";
2383 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2384 let completions = project.update(cx, |project, cx| {
2385 project.completions(&buffer, text.len(), cx)
2386 });
2387
2388 fake_server
2389 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2390 Ok(Some(lsp::CompletionResponse::Array(vec![
2391 lsp::CompletionItem {
2392 label: "fullyQualifiedName?".into(),
2393 insert_text: Some("fully\rQualified\r\nName".into()),
2394 ..Default::default()
2395 },
2396 ])))
2397 })
2398 .next()
2399 .await;
2400 let completions = completions.await.unwrap();
2401 assert_eq!(completions.len(), 1);
2402 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2403}
2404
2405#[gpui::test(iterations = 10)]
2406async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2407 init_test(cx);
2408
2409 let mut language = Language::new(
2410 LanguageConfig {
2411 name: "TypeScript".into(),
2412 path_suffixes: vec!["ts".to_string()],
2413 ..Default::default()
2414 },
2415 None,
2416 );
2417 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
2418
2419 let fs = FakeFs::new(cx.background());
2420 fs.insert_tree(
2421 "/dir",
2422 json!({
2423 "a.ts": "a",
2424 }),
2425 )
2426 .await;
2427
2428 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2429 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2430 let buffer = project
2431 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2432 .await
2433 .unwrap();
2434
2435 let fake_server = fake_language_servers.next().await.unwrap();
2436
2437 // Language server returns code actions that contain commands, and not edits.
2438 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
2439 fake_server
2440 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2441 Ok(Some(vec![
2442 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2443 title: "The code action".into(),
2444 command: Some(lsp::Command {
2445 title: "The command".into(),
2446 command: "_the/command".into(),
2447 arguments: Some(vec![json!("the-argument")]),
2448 }),
2449 ..Default::default()
2450 }),
2451 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2452 title: "two".into(),
2453 ..Default::default()
2454 }),
2455 ]))
2456 })
2457 .next()
2458 .await;
2459
2460 let action = actions.await.unwrap()[0].clone();
2461 let apply = project.update(cx, |project, cx| {
2462 project.apply_code_action(buffer.clone(), action, true, cx)
2463 });
2464
2465 // Resolving the code action does not populate its edits. In absence of
2466 // edits, we must execute the given command.
2467 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2468 |action, _| async move { Ok(action) },
2469 );
2470
2471 // While executing the command, the language server sends the editor
2472 // a `workspaceEdit` request.
2473 fake_server
2474 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2475 let fake = fake_server.clone();
2476 move |params, _| {
2477 assert_eq!(params.command, "_the/command");
2478 let fake = fake.clone();
2479 async move {
2480 fake.server
2481 .request::<lsp::request::ApplyWorkspaceEdit>(
2482 lsp::ApplyWorkspaceEditParams {
2483 label: None,
2484 edit: lsp::WorkspaceEdit {
2485 changes: Some(
2486 [(
2487 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2488 vec![lsp::TextEdit {
2489 range: lsp::Range::new(
2490 lsp::Position::new(0, 0),
2491 lsp::Position::new(0, 0),
2492 ),
2493 new_text: "X".into(),
2494 }],
2495 )]
2496 .into_iter()
2497 .collect(),
2498 ),
2499 ..Default::default()
2500 },
2501 },
2502 )
2503 .await
2504 .unwrap();
2505 Ok(Some(json!(null)))
2506 }
2507 }
2508 })
2509 .next()
2510 .await;
2511
2512 // Applying the code action returns a project transaction containing the edits
2513 // sent by the language server in its `workspaceEdit` request.
2514 let transaction = apply.await.unwrap();
2515 assert!(transaction.0.contains_key(&buffer));
2516 buffer.update(cx, |buffer, cx| {
2517 assert_eq!(buffer.text(), "Xa");
2518 buffer.undo(cx);
2519 assert_eq!(buffer.text(), "a");
2520 });
2521}
2522
2523#[gpui::test(iterations = 10)]
2524async fn test_save_file(cx: &mut gpui::TestAppContext) {
2525 init_test(cx);
2526
2527 let fs = FakeFs::new(cx.background());
2528 fs.insert_tree(
2529 "/dir",
2530 json!({
2531 "file1": "the old contents",
2532 }),
2533 )
2534 .await;
2535
2536 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2537 let buffer = project
2538 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2539 .await
2540 .unwrap();
2541 buffer.update(cx, |buffer, cx| {
2542 assert_eq!(buffer.text(), "the old contents");
2543 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2544 });
2545
2546 project
2547 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2548 .await
2549 .unwrap();
2550
2551 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2552 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2553}
2554
2555#[gpui::test]
2556async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2557 init_test(cx);
2558
2559 let fs = FakeFs::new(cx.background());
2560 fs.insert_tree(
2561 "/dir",
2562 json!({
2563 "file1": "the old contents",
2564 }),
2565 )
2566 .await;
2567
2568 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2569 let buffer = project
2570 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2571 .await
2572 .unwrap();
2573 buffer.update(cx, |buffer, cx| {
2574 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2575 });
2576
2577 project
2578 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2579 .await
2580 .unwrap();
2581
2582 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2583 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2584}
2585
2586#[gpui::test]
2587async fn test_save_as(cx: &mut gpui::TestAppContext) {
2588 init_test(cx);
2589
2590 let fs = FakeFs::new(cx.background());
2591 fs.insert_tree("/dir", json!({})).await;
2592
2593 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2594
2595 let languages = project.read_with(cx, |project, _| project.languages().clone());
2596 languages.register(
2597 "/some/path",
2598 LanguageConfig {
2599 name: "Rust".into(),
2600 path_suffixes: vec!["rs".into()],
2601 ..Default::default()
2602 },
2603 tree_sitter_rust::language(),
2604 vec![],
2605 |_| Default::default(),
2606 );
2607
2608 let buffer = project.update(cx, |project, cx| {
2609 project.create_buffer("", None, cx).unwrap()
2610 });
2611 buffer.update(cx, |buffer, cx| {
2612 buffer.edit([(0..0, "abc")], None, cx);
2613 assert!(buffer.is_dirty());
2614 assert!(!buffer.has_conflict());
2615 assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text");
2616 });
2617 project
2618 .update(cx, |project, cx| {
2619 project.save_buffer_as(buffer.clone(), "/dir/file1.rs".into(), cx)
2620 })
2621 .await
2622 .unwrap();
2623 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
2624
2625 cx.foreground().run_until_parked();
2626 buffer.read_with(cx, |buffer, cx| {
2627 assert_eq!(
2628 buffer.file().unwrap().full_path(cx),
2629 Path::new("dir/file1.rs")
2630 );
2631 assert!(!buffer.is_dirty());
2632 assert!(!buffer.has_conflict());
2633 assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust");
2634 });
2635
2636 let opened_buffer = project
2637 .update(cx, |project, cx| {
2638 project.open_local_buffer("/dir/file1.rs", cx)
2639 })
2640 .await
2641 .unwrap();
2642 assert_eq!(opened_buffer, buffer);
2643}
2644
2645#[gpui::test(retries = 5)]
2646async fn test_rescan_and_remote_updates(
2647 deterministic: Arc<Deterministic>,
2648 cx: &mut gpui::TestAppContext,
2649) {
2650 init_test(cx);
2651 cx.foreground().allow_parking();
2652
2653 let dir = temp_tree(json!({
2654 "a": {
2655 "file1": "",
2656 "file2": "",
2657 "file3": "",
2658 },
2659 "b": {
2660 "c": {
2661 "file4": "",
2662 "file5": "",
2663 }
2664 }
2665 }));
2666
2667 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
2668 let rpc = project.read_with(cx, |p, _| p.client.clone());
2669
2670 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2671 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
2672 async move { buffer.await.unwrap() }
2673 };
2674 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
2675 project.read_with(cx, |project, cx| {
2676 let tree = project.worktrees(cx).next().unwrap();
2677 tree.read(cx)
2678 .entry_for_path(path)
2679 .unwrap_or_else(|| panic!("no entry for path {}", path))
2680 .id
2681 })
2682 };
2683
2684 let buffer2 = buffer_for_path("a/file2", cx).await;
2685 let buffer3 = buffer_for_path("a/file3", cx).await;
2686 let buffer4 = buffer_for_path("b/c/file4", cx).await;
2687 let buffer5 = buffer_for_path("b/c/file5", cx).await;
2688
2689 let file2_id = id_for_path("a/file2", cx);
2690 let file3_id = id_for_path("a/file3", cx);
2691 let file4_id = id_for_path("b/c/file4", cx);
2692
2693 // Create a remote copy of this worktree.
2694 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2695
2696 let metadata = tree.read_with(cx, |tree, _| tree.as_local().unwrap().metadata_proto());
2697
2698 let updates = Arc::new(Mutex::new(Vec::new()));
2699 tree.update(cx, |tree, cx| {
2700 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
2701 let updates = updates.clone();
2702 move |update| {
2703 updates.lock().push(update);
2704 async { true }
2705 }
2706 });
2707 });
2708
2709 let remote = cx.update(|cx| Worktree::remote(1, 1, metadata, rpc.clone(), cx));
2710 deterministic.run_until_parked();
2711
2712 cx.read(|cx| {
2713 assert!(!buffer2.read(cx).is_dirty());
2714 assert!(!buffer3.read(cx).is_dirty());
2715 assert!(!buffer4.read(cx).is_dirty());
2716 assert!(!buffer5.read(cx).is_dirty());
2717 });
2718
2719 // Rename and delete files and directories.
2720 tree.flush_fs_events(cx).await;
2721 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
2722 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
2723 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
2724 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
2725 tree.flush_fs_events(cx).await;
2726
2727 let expected_paths = vec![
2728 "a",
2729 "a/file1",
2730 "a/file2.new",
2731 "b",
2732 "d",
2733 "d/file3",
2734 "d/file4",
2735 ];
2736
2737 cx.read(|app| {
2738 assert_eq!(
2739 tree.read(app)
2740 .paths()
2741 .map(|p| p.to_str().unwrap())
2742 .collect::<Vec<_>>(),
2743 expected_paths
2744 );
2745
2746 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
2747 assert_eq!(id_for_path("d/file3", cx), file3_id);
2748 assert_eq!(id_for_path("d/file4", cx), file4_id);
2749
2750 assert_eq!(
2751 buffer2.read(app).file().unwrap().path().as_ref(),
2752 Path::new("a/file2.new")
2753 );
2754 assert_eq!(
2755 buffer3.read(app).file().unwrap().path().as_ref(),
2756 Path::new("d/file3")
2757 );
2758 assert_eq!(
2759 buffer4.read(app).file().unwrap().path().as_ref(),
2760 Path::new("d/file4")
2761 );
2762 assert_eq!(
2763 buffer5.read(app).file().unwrap().path().as_ref(),
2764 Path::new("b/c/file5")
2765 );
2766
2767 assert!(!buffer2.read(app).file().unwrap().is_deleted());
2768 assert!(!buffer3.read(app).file().unwrap().is_deleted());
2769 assert!(!buffer4.read(app).file().unwrap().is_deleted());
2770 assert!(buffer5.read(app).file().unwrap().is_deleted());
2771 });
2772
2773 // Update the remote worktree. Check that it becomes consistent with the
2774 // local worktree.
2775 deterministic.run_until_parked();
2776 remote.update(cx, |remote, _| {
2777 for update in updates.lock().drain(..) {
2778 remote.as_remote_mut().unwrap().update_from_remote(update);
2779 }
2780 });
2781 deterministic.run_until_parked();
2782 remote.read_with(cx, |remote, _| {
2783 assert_eq!(
2784 remote
2785 .paths()
2786 .map(|p| p.to_str().unwrap())
2787 .collect::<Vec<_>>(),
2788 expected_paths
2789 );
2790 });
2791}
2792
2793#[gpui::test(iterations = 10)]
2794async fn test_buffer_identity_across_renames(
2795 deterministic: Arc<Deterministic>,
2796 cx: &mut gpui::TestAppContext,
2797) {
2798 init_test(cx);
2799
2800 let fs = FakeFs::new(cx.background());
2801 fs.insert_tree(
2802 "/dir",
2803 json!({
2804 "a": {
2805 "file1": "",
2806 }
2807 }),
2808 )
2809 .await;
2810
2811 let project = Project::test(fs, [Path::new("/dir")], cx).await;
2812 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2813 let tree_id = tree.read_with(cx, |tree, _| tree.id());
2814
2815 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
2816 project.read_with(cx, |project, cx| {
2817 let tree = project.worktrees(cx).next().unwrap();
2818 tree.read(cx)
2819 .entry_for_path(path)
2820 .unwrap_or_else(|| panic!("no entry for path {}", path))
2821 .id
2822 })
2823 };
2824
2825 let dir_id = id_for_path("a", cx);
2826 let file_id = id_for_path("a/file1", cx);
2827 let buffer = project
2828 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
2829 .await
2830 .unwrap();
2831 buffer.read_with(cx, |buffer, _| assert!(!buffer.is_dirty()));
2832
2833 project
2834 .update(cx, |project, cx| {
2835 project.rename_entry(dir_id, Path::new("b"), cx)
2836 })
2837 .unwrap()
2838 .await
2839 .unwrap();
2840 deterministic.run_until_parked();
2841 assert_eq!(id_for_path("b", cx), dir_id);
2842 assert_eq!(id_for_path("b/file1", cx), file_id);
2843 buffer.read_with(cx, |buffer, _| assert!(!buffer.is_dirty()));
2844}
2845
2846#[gpui::test]
2847async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
2848 init_test(cx);
2849
2850 let fs = FakeFs::new(cx.background());
2851 fs.insert_tree(
2852 "/dir",
2853 json!({
2854 "a.txt": "a-contents",
2855 "b.txt": "b-contents",
2856 }),
2857 )
2858 .await;
2859
2860 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2861
2862 // Spawn multiple tasks to open paths, repeating some paths.
2863 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
2864 (
2865 p.open_local_buffer("/dir/a.txt", cx),
2866 p.open_local_buffer("/dir/b.txt", cx),
2867 p.open_local_buffer("/dir/a.txt", cx),
2868 )
2869 });
2870
2871 let buffer_a_1 = buffer_a_1.await.unwrap();
2872 let buffer_a_2 = buffer_a_2.await.unwrap();
2873 let buffer_b = buffer_b.await.unwrap();
2874 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
2875 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
2876
2877 // There is only one buffer per path.
2878 let buffer_a_id = buffer_a_1.id();
2879 assert_eq!(buffer_a_2.id(), buffer_a_id);
2880
2881 // Open the same path again while it is still open.
2882 drop(buffer_a_1);
2883 let buffer_a_3 = project
2884 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
2885 .await
2886 .unwrap();
2887
2888 // There's still only one buffer per path.
2889 assert_eq!(buffer_a_3.id(), buffer_a_id);
2890}
2891
2892#[gpui::test]
2893async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
2894 init_test(cx);
2895
2896 let fs = FakeFs::new(cx.background());
2897 fs.insert_tree(
2898 "/dir",
2899 json!({
2900 "file1": "abc",
2901 "file2": "def",
2902 "file3": "ghi",
2903 }),
2904 )
2905 .await;
2906
2907 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2908
2909 let buffer1 = project
2910 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2911 .await
2912 .unwrap();
2913 let events = Rc::new(RefCell::new(Vec::new()));
2914
2915 // initially, the buffer isn't dirty.
2916 buffer1.update(cx, |buffer, cx| {
2917 cx.subscribe(&buffer1, {
2918 let events = events.clone();
2919 move |_, _, event, _| match event {
2920 BufferEvent::Operation(_) => {}
2921 _ => events.borrow_mut().push(event.clone()),
2922 }
2923 })
2924 .detach();
2925
2926 assert!(!buffer.is_dirty());
2927 assert!(events.borrow().is_empty());
2928
2929 buffer.edit([(1..2, "")], None, cx);
2930 });
2931
2932 // after the first edit, the buffer is dirty, and emits a dirtied event.
2933 buffer1.update(cx, |buffer, cx| {
2934 assert!(buffer.text() == "ac");
2935 assert!(buffer.is_dirty());
2936 assert_eq!(
2937 *events.borrow(),
2938 &[language::Event::Edited, language::Event::DirtyChanged]
2939 );
2940 events.borrow_mut().clear();
2941 buffer.did_save(
2942 buffer.version(),
2943 buffer.as_rope().fingerprint(),
2944 buffer.file().unwrap().mtime(),
2945 cx,
2946 );
2947 });
2948
2949 // after saving, the buffer is not dirty, and emits a saved event.
2950 buffer1.update(cx, |buffer, cx| {
2951 assert!(!buffer.is_dirty());
2952 assert_eq!(*events.borrow(), &[language::Event::Saved]);
2953 events.borrow_mut().clear();
2954
2955 buffer.edit([(1..1, "B")], None, cx);
2956 buffer.edit([(2..2, "D")], None, cx);
2957 });
2958
2959 // after editing again, the buffer is dirty, and emits another dirty event.
2960 buffer1.update(cx, |buffer, cx| {
2961 assert!(buffer.text() == "aBDc");
2962 assert!(buffer.is_dirty());
2963 assert_eq!(
2964 *events.borrow(),
2965 &[
2966 language::Event::Edited,
2967 language::Event::DirtyChanged,
2968 language::Event::Edited,
2969 ],
2970 );
2971 events.borrow_mut().clear();
2972
2973 // After restoring the buffer to its previously-saved state,
2974 // the buffer is not considered dirty anymore.
2975 buffer.edit([(1..3, "")], None, cx);
2976 assert!(buffer.text() == "ac");
2977 assert!(!buffer.is_dirty());
2978 });
2979
2980 assert_eq!(
2981 *events.borrow(),
2982 &[language::Event::Edited, language::Event::DirtyChanged]
2983 );
2984
2985 // When a file is deleted, the buffer is considered dirty.
2986 let events = Rc::new(RefCell::new(Vec::new()));
2987 let buffer2 = project
2988 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
2989 .await
2990 .unwrap();
2991 buffer2.update(cx, |_, cx| {
2992 cx.subscribe(&buffer2, {
2993 let events = events.clone();
2994 move |_, _, event, _| events.borrow_mut().push(event.clone())
2995 })
2996 .detach();
2997 });
2998
2999 fs.remove_file("/dir/file2".as_ref(), Default::default())
3000 .await
3001 .unwrap();
3002 cx.foreground().run_until_parked();
3003 buffer2.read_with(cx, |buffer, _| assert!(buffer.is_dirty()));
3004 assert_eq!(
3005 *events.borrow(),
3006 &[
3007 language::Event::DirtyChanged,
3008 language::Event::FileHandleChanged
3009 ]
3010 );
3011
3012 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3013 let events = Rc::new(RefCell::new(Vec::new()));
3014 let buffer3 = project
3015 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
3016 .await
3017 .unwrap();
3018 buffer3.update(cx, |_, cx| {
3019 cx.subscribe(&buffer3, {
3020 let events = events.clone();
3021 move |_, _, event, _| events.borrow_mut().push(event.clone())
3022 })
3023 .detach();
3024 });
3025
3026 buffer3.update(cx, |buffer, cx| {
3027 buffer.edit([(0..0, "x")], None, cx);
3028 });
3029 events.borrow_mut().clear();
3030 fs.remove_file("/dir/file3".as_ref(), Default::default())
3031 .await
3032 .unwrap();
3033 cx.foreground().run_until_parked();
3034 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
3035 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
3036}
3037
3038#[gpui::test]
3039async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3040 init_test(cx);
3041
3042 let initial_contents = "aaa\nbbbbb\nc\n";
3043 let fs = FakeFs::new(cx.background());
3044 fs.insert_tree(
3045 "/dir",
3046 json!({
3047 "the-file": initial_contents,
3048 }),
3049 )
3050 .await;
3051 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3052 let buffer = project
3053 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
3054 .await
3055 .unwrap();
3056
3057 let anchors = (0..3)
3058 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
3059 .collect::<Vec<_>>();
3060
3061 // Change the file on disk, adding two new lines of text, and removing
3062 // one line.
3063 buffer.read_with(cx, |buffer, _| {
3064 assert!(!buffer.is_dirty());
3065 assert!(!buffer.has_conflict());
3066 });
3067 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3068 fs.save(
3069 "/dir/the-file".as_ref(),
3070 &new_contents.into(),
3071 LineEnding::Unix,
3072 )
3073 .await
3074 .unwrap();
3075
3076 // Because the buffer was not modified, it is reloaded from disk. Its
3077 // contents are edited according to the diff between the old and new
3078 // file contents.
3079 cx.foreground().run_until_parked();
3080 buffer.update(cx, |buffer, _| {
3081 assert_eq!(buffer.text(), new_contents);
3082 assert!(!buffer.is_dirty());
3083 assert!(!buffer.has_conflict());
3084
3085 let anchor_positions = anchors
3086 .iter()
3087 .map(|anchor| anchor.to_point(&*buffer))
3088 .collect::<Vec<_>>();
3089 assert_eq!(
3090 anchor_positions,
3091 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
3092 );
3093 });
3094
3095 // Modify the buffer
3096 buffer.update(cx, |buffer, cx| {
3097 buffer.edit([(0..0, " ")], None, cx);
3098 assert!(buffer.is_dirty());
3099 assert!(!buffer.has_conflict());
3100 });
3101
3102 // Change the file on disk again, adding blank lines to the beginning.
3103 fs.save(
3104 "/dir/the-file".as_ref(),
3105 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3106 LineEnding::Unix,
3107 )
3108 .await
3109 .unwrap();
3110
3111 // Because the buffer is modified, it doesn't reload from disk, but is
3112 // marked as having a conflict.
3113 cx.foreground().run_until_parked();
3114 buffer.read_with(cx, |buffer, _| {
3115 assert!(buffer.has_conflict());
3116 });
3117}
3118
3119#[gpui::test]
3120async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3121 init_test(cx);
3122
3123 let fs = FakeFs::new(cx.background());
3124 fs.insert_tree(
3125 "/dir",
3126 json!({
3127 "file1": "a\nb\nc\n",
3128 "file2": "one\r\ntwo\r\nthree\r\n",
3129 }),
3130 )
3131 .await;
3132
3133 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3134 let buffer1 = project
3135 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3136 .await
3137 .unwrap();
3138 let buffer2 = project
3139 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3140 .await
3141 .unwrap();
3142
3143 buffer1.read_with(cx, |buffer, _| {
3144 assert_eq!(buffer.text(), "a\nb\nc\n");
3145 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3146 });
3147 buffer2.read_with(cx, |buffer, _| {
3148 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3149 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3150 });
3151
3152 // Change a file's line endings on disk from unix to windows. The buffer's
3153 // state updates correctly.
3154 fs.save(
3155 "/dir/file1".as_ref(),
3156 &"aaa\nb\nc\n".into(),
3157 LineEnding::Windows,
3158 )
3159 .await
3160 .unwrap();
3161 cx.foreground().run_until_parked();
3162 buffer1.read_with(cx, |buffer, _| {
3163 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3164 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3165 });
3166
3167 // Save a file with windows line endings. The file is written correctly.
3168 buffer2.update(cx, |buffer, cx| {
3169 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3170 });
3171 project
3172 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3173 .await
3174 .unwrap();
3175 assert_eq!(
3176 fs.load("/dir/file2".as_ref()).await.unwrap(),
3177 "one\r\ntwo\r\nthree\r\nfour\r\n",
3178 );
3179}
3180
3181#[gpui::test]
3182async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3183 init_test(cx);
3184
3185 let fs = FakeFs::new(cx.background());
3186 fs.insert_tree(
3187 "/the-dir",
3188 json!({
3189 "a.rs": "
3190 fn foo(mut v: Vec<usize>) {
3191 for x in &v {
3192 v.push(1);
3193 }
3194 }
3195 "
3196 .unindent(),
3197 }),
3198 )
3199 .await;
3200
3201 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3202 let buffer = project
3203 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3204 .await
3205 .unwrap();
3206
3207 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3208 let message = lsp::PublishDiagnosticsParams {
3209 uri: buffer_uri.clone(),
3210 diagnostics: vec![
3211 lsp::Diagnostic {
3212 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3213 severity: Some(DiagnosticSeverity::WARNING),
3214 message: "error 1".to_string(),
3215 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3216 location: lsp::Location {
3217 uri: buffer_uri.clone(),
3218 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3219 },
3220 message: "error 1 hint 1".to_string(),
3221 }]),
3222 ..Default::default()
3223 },
3224 lsp::Diagnostic {
3225 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3226 severity: Some(DiagnosticSeverity::HINT),
3227 message: "error 1 hint 1".to_string(),
3228 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3229 location: lsp::Location {
3230 uri: buffer_uri.clone(),
3231 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3232 },
3233 message: "original diagnostic".to_string(),
3234 }]),
3235 ..Default::default()
3236 },
3237 lsp::Diagnostic {
3238 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3239 severity: Some(DiagnosticSeverity::ERROR),
3240 message: "error 2".to_string(),
3241 related_information: Some(vec![
3242 lsp::DiagnosticRelatedInformation {
3243 location: lsp::Location {
3244 uri: buffer_uri.clone(),
3245 range: lsp::Range::new(
3246 lsp::Position::new(1, 13),
3247 lsp::Position::new(1, 15),
3248 ),
3249 },
3250 message: "error 2 hint 1".to_string(),
3251 },
3252 lsp::DiagnosticRelatedInformation {
3253 location: lsp::Location {
3254 uri: buffer_uri.clone(),
3255 range: lsp::Range::new(
3256 lsp::Position::new(1, 13),
3257 lsp::Position::new(1, 15),
3258 ),
3259 },
3260 message: "error 2 hint 2".to_string(),
3261 },
3262 ]),
3263 ..Default::default()
3264 },
3265 lsp::Diagnostic {
3266 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3267 severity: Some(DiagnosticSeverity::HINT),
3268 message: "error 2 hint 1".to_string(),
3269 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3270 location: lsp::Location {
3271 uri: buffer_uri.clone(),
3272 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3273 },
3274 message: "original diagnostic".to_string(),
3275 }]),
3276 ..Default::default()
3277 },
3278 lsp::Diagnostic {
3279 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3280 severity: Some(DiagnosticSeverity::HINT),
3281 message: "error 2 hint 2".to_string(),
3282 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3283 location: lsp::Location {
3284 uri: buffer_uri,
3285 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3286 },
3287 message: "original diagnostic".to_string(),
3288 }]),
3289 ..Default::default()
3290 },
3291 ],
3292 version: None,
3293 };
3294
3295 project
3296 .update(cx, |p, cx| {
3297 p.update_diagnostics(LanguageServerId(0), message, &[], cx)
3298 })
3299 .unwrap();
3300 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
3301
3302 assert_eq!(
3303 buffer
3304 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3305 .collect::<Vec<_>>(),
3306 &[
3307 DiagnosticEntry {
3308 range: Point::new(1, 8)..Point::new(1, 9),
3309 diagnostic: Diagnostic {
3310 severity: DiagnosticSeverity::WARNING,
3311 message: "error 1".to_string(),
3312 group_id: 1,
3313 is_primary: true,
3314 ..Default::default()
3315 }
3316 },
3317 DiagnosticEntry {
3318 range: Point::new(1, 8)..Point::new(1, 9),
3319 diagnostic: Diagnostic {
3320 severity: DiagnosticSeverity::HINT,
3321 message: "error 1 hint 1".to_string(),
3322 group_id: 1,
3323 is_primary: false,
3324 ..Default::default()
3325 }
3326 },
3327 DiagnosticEntry {
3328 range: Point::new(1, 13)..Point::new(1, 15),
3329 diagnostic: Diagnostic {
3330 severity: DiagnosticSeverity::HINT,
3331 message: "error 2 hint 1".to_string(),
3332 group_id: 0,
3333 is_primary: false,
3334 ..Default::default()
3335 }
3336 },
3337 DiagnosticEntry {
3338 range: Point::new(1, 13)..Point::new(1, 15),
3339 diagnostic: Diagnostic {
3340 severity: DiagnosticSeverity::HINT,
3341 message: "error 2 hint 2".to_string(),
3342 group_id: 0,
3343 is_primary: false,
3344 ..Default::default()
3345 }
3346 },
3347 DiagnosticEntry {
3348 range: Point::new(2, 8)..Point::new(2, 17),
3349 diagnostic: Diagnostic {
3350 severity: DiagnosticSeverity::ERROR,
3351 message: "error 2".to_string(),
3352 group_id: 0,
3353 is_primary: true,
3354 ..Default::default()
3355 }
3356 }
3357 ]
3358 );
3359
3360 assert_eq!(
3361 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3362 &[
3363 DiagnosticEntry {
3364 range: Point::new(1, 13)..Point::new(1, 15),
3365 diagnostic: Diagnostic {
3366 severity: DiagnosticSeverity::HINT,
3367 message: "error 2 hint 1".to_string(),
3368 group_id: 0,
3369 is_primary: false,
3370 ..Default::default()
3371 }
3372 },
3373 DiagnosticEntry {
3374 range: Point::new(1, 13)..Point::new(1, 15),
3375 diagnostic: Diagnostic {
3376 severity: DiagnosticSeverity::HINT,
3377 message: "error 2 hint 2".to_string(),
3378 group_id: 0,
3379 is_primary: false,
3380 ..Default::default()
3381 }
3382 },
3383 DiagnosticEntry {
3384 range: Point::new(2, 8)..Point::new(2, 17),
3385 diagnostic: Diagnostic {
3386 severity: DiagnosticSeverity::ERROR,
3387 message: "error 2".to_string(),
3388 group_id: 0,
3389 is_primary: true,
3390 ..Default::default()
3391 }
3392 }
3393 ]
3394 );
3395
3396 assert_eq!(
3397 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3398 &[
3399 DiagnosticEntry {
3400 range: Point::new(1, 8)..Point::new(1, 9),
3401 diagnostic: Diagnostic {
3402 severity: DiagnosticSeverity::WARNING,
3403 message: "error 1".to_string(),
3404 group_id: 1,
3405 is_primary: true,
3406 ..Default::default()
3407 }
3408 },
3409 DiagnosticEntry {
3410 range: Point::new(1, 8)..Point::new(1, 9),
3411 diagnostic: Diagnostic {
3412 severity: DiagnosticSeverity::HINT,
3413 message: "error 1 hint 1".to_string(),
3414 group_id: 1,
3415 is_primary: false,
3416 ..Default::default()
3417 }
3418 },
3419 ]
3420 );
3421}
3422
3423#[gpui::test]
3424async fn test_rename(cx: &mut gpui::TestAppContext) {
3425 init_test(cx);
3426
3427 let mut language = Language::new(
3428 LanguageConfig {
3429 name: "Rust".into(),
3430 path_suffixes: vec!["rs".to_string()],
3431 ..Default::default()
3432 },
3433 Some(tree_sitter_rust::language()),
3434 );
3435 let mut fake_servers = language
3436 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
3437 capabilities: lsp::ServerCapabilities {
3438 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
3439 prepare_provider: Some(true),
3440 work_done_progress_options: Default::default(),
3441 })),
3442 ..Default::default()
3443 },
3444 ..Default::default()
3445 }))
3446 .await;
3447
3448 let fs = FakeFs::new(cx.background());
3449 fs.insert_tree(
3450 "/dir",
3451 json!({
3452 "one.rs": "const ONE: usize = 1;",
3453 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3454 }),
3455 )
3456 .await;
3457
3458 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3459 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
3460 let buffer = project
3461 .update(cx, |project, cx| {
3462 project.open_local_buffer("/dir/one.rs", cx)
3463 })
3464 .await
3465 .unwrap();
3466
3467 let fake_server = fake_servers.next().await.unwrap();
3468
3469 let response = project.update(cx, |project, cx| {
3470 project.prepare_rename(buffer.clone(), 7, cx)
3471 });
3472 fake_server
3473 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
3474 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3475 assert_eq!(params.position, lsp::Position::new(0, 7));
3476 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
3477 lsp::Position::new(0, 6),
3478 lsp::Position::new(0, 9),
3479 ))))
3480 })
3481 .next()
3482 .await
3483 .unwrap();
3484 let range = response.await.unwrap().unwrap();
3485 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
3486 assert_eq!(range, 6..9);
3487
3488 let response = project.update(cx, |project, cx| {
3489 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
3490 });
3491 fake_server
3492 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
3493 assert_eq!(
3494 params.text_document_position.text_document.uri.as_str(),
3495 "file:///dir/one.rs"
3496 );
3497 assert_eq!(
3498 params.text_document_position.position,
3499 lsp::Position::new(0, 7)
3500 );
3501 assert_eq!(params.new_name, "THREE");
3502 Ok(Some(lsp::WorkspaceEdit {
3503 changes: Some(
3504 [
3505 (
3506 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
3507 vec![lsp::TextEdit::new(
3508 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
3509 "THREE".to_string(),
3510 )],
3511 ),
3512 (
3513 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
3514 vec![
3515 lsp::TextEdit::new(
3516 lsp::Range::new(
3517 lsp::Position::new(0, 24),
3518 lsp::Position::new(0, 27),
3519 ),
3520 "THREE".to_string(),
3521 ),
3522 lsp::TextEdit::new(
3523 lsp::Range::new(
3524 lsp::Position::new(0, 35),
3525 lsp::Position::new(0, 38),
3526 ),
3527 "THREE".to_string(),
3528 ),
3529 ],
3530 ),
3531 ]
3532 .into_iter()
3533 .collect(),
3534 ),
3535 ..Default::default()
3536 }))
3537 })
3538 .next()
3539 .await
3540 .unwrap();
3541 let mut transaction = response.await.unwrap().0;
3542 assert_eq!(transaction.len(), 2);
3543 assert_eq!(
3544 transaction
3545 .remove_entry(&buffer)
3546 .unwrap()
3547 .0
3548 .read_with(cx, |buffer, _| buffer.text()),
3549 "const THREE: usize = 1;"
3550 );
3551 assert_eq!(
3552 transaction
3553 .into_keys()
3554 .next()
3555 .unwrap()
3556 .read_with(cx, |buffer, _| buffer.text()),
3557 "const TWO: usize = one::THREE + one::THREE;"
3558 );
3559}
3560
3561#[gpui::test]
3562async fn test_search(cx: &mut gpui::TestAppContext) {
3563 init_test(cx);
3564
3565 let fs = FakeFs::new(cx.background());
3566 fs.insert_tree(
3567 "/dir",
3568 json!({
3569 "one.rs": "const ONE: usize = 1;",
3570 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3571 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3572 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3573 }),
3574 )
3575 .await;
3576 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3577 assert_eq!(
3578 search(
3579 &project,
3580 SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()),
3581 cx
3582 )
3583 .await
3584 .unwrap(),
3585 HashMap::from_iter([
3586 ("two.rs".to_string(), vec![6..9]),
3587 ("three.rs".to_string(), vec![37..40])
3588 ])
3589 );
3590
3591 let buffer_4 = project
3592 .update(cx, |project, cx| {
3593 project.open_local_buffer("/dir/four.rs", cx)
3594 })
3595 .await
3596 .unwrap();
3597 buffer_4.update(cx, |buffer, cx| {
3598 let text = "two::TWO";
3599 buffer.edit([(20..28, text), (31..43, text)], None, cx);
3600 });
3601
3602 assert_eq!(
3603 search(
3604 &project,
3605 SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()),
3606 cx
3607 )
3608 .await
3609 .unwrap(),
3610 HashMap::from_iter([
3611 ("two.rs".to_string(), vec![6..9]),
3612 ("three.rs".to_string(), vec![37..40]),
3613 ("four.rs".to_string(), vec![25..28, 36..39])
3614 ])
3615 );
3616}
3617
3618#[gpui::test]
3619async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
3620 init_test(cx);
3621
3622 let search_query = "file";
3623
3624 let fs = FakeFs::new(cx.background());
3625 fs.insert_tree(
3626 "/dir",
3627 json!({
3628 "one.rs": r#"// Rust file one"#,
3629 "one.ts": r#"// TypeScript file one"#,
3630 "two.rs": r#"// Rust file two"#,
3631 "two.ts": r#"// TypeScript file two"#,
3632 }),
3633 )
3634 .await;
3635 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3636
3637 assert!(
3638 search(
3639 &project,
3640 SearchQuery::text(
3641 search_query,
3642 false,
3643 true,
3644 vec![Glob::new("*.odd").unwrap().compile_matcher()],
3645 Vec::new()
3646 ),
3647 cx
3648 )
3649 .await
3650 .unwrap()
3651 .is_empty(),
3652 "If no inclusions match, no files should be returned"
3653 );
3654
3655 assert_eq!(
3656 search(
3657 &project,
3658 SearchQuery::text(
3659 search_query,
3660 false,
3661 true,
3662 vec![Glob::new("*.rs").unwrap().compile_matcher()],
3663 Vec::new()
3664 ),
3665 cx
3666 )
3667 .await
3668 .unwrap(),
3669 HashMap::from_iter([
3670 ("one.rs".to_string(), vec![8..12]),
3671 ("two.rs".to_string(), vec![8..12]),
3672 ]),
3673 "Rust only search should give only Rust files"
3674 );
3675
3676 assert_eq!(
3677 search(
3678 &project,
3679 SearchQuery::text(
3680 search_query,
3681 false,
3682 true,
3683 vec![
3684 Glob::new("*.ts").unwrap().compile_matcher(),
3685 Glob::new("*.odd").unwrap().compile_matcher(),
3686 ],
3687 Vec::new()
3688 ),
3689 cx
3690 )
3691 .await
3692 .unwrap(),
3693 HashMap::from_iter([
3694 ("one.ts".to_string(), vec![14..18]),
3695 ("two.ts".to_string(), vec![14..18]),
3696 ]),
3697 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
3698 );
3699
3700 assert_eq!(
3701 search(
3702 &project,
3703 SearchQuery::text(
3704 search_query,
3705 false,
3706 true,
3707 vec![
3708 Glob::new("*.rs").unwrap().compile_matcher(),
3709 Glob::new("*.ts").unwrap().compile_matcher(),
3710 Glob::new("*.odd").unwrap().compile_matcher(),
3711 ],
3712 Vec::new()
3713 ),
3714 cx
3715 )
3716 .await
3717 .unwrap(),
3718 HashMap::from_iter([
3719 ("one.rs".to_string(), vec![8..12]),
3720 ("one.ts".to_string(), vec![14..18]),
3721 ("two.rs".to_string(), vec![8..12]),
3722 ("two.ts".to_string(), vec![14..18]),
3723 ]),
3724 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
3725 );
3726}
3727
3728#[gpui::test]
3729async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
3730 init_test(cx);
3731
3732 let search_query = "file";
3733
3734 let fs = FakeFs::new(cx.background());
3735 fs.insert_tree(
3736 "/dir",
3737 json!({
3738 "one.rs": r#"// Rust file one"#,
3739 "one.ts": r#"// TypeScript file one"#,
3740 "two.rs": r#"// Rust file two"#,
3741 "two.ts": r#"// TypeScript file two"#,
3742 }),
3743 )
3744 .await;
3745 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3746
3747 assert_eq!(
3748 search(
3749 &project,
3750 SearchQuery::text(
3751 search_query,
3752 false,
3753 true,
3754 Vec::new(),
3755 vec![Glob::new("*.odd").unwrap().compile_matcher()],
3756 ),
3757 cx
3758 )
3759 .await
3760 .unwrap(),
3761 HashMap::from_iter([
3762 ("one.rs".to_string(), vec![8..12]),
3763 ("one.ts".to_string(), vec![14..18]),
3764 ("two.rs".to_string(), vec![8..12]),
3765 ("two.ts".to_string(), vec![14..18]),
3766 ]),
3767 "If no exclusions match, all files should be returned"
3768 );
3769
3770 assert_eq!(
3771 search(
3772 &project,
3773 SearchQuery::text(
3774 search_query,
3775 false,
3776 true,
3777 Vec::new(),
3778 vec![Glob::new("*.rs").unwrap().compile_matcher()],
3779 ),
3780 cx
3781 )
3782 .await
3783 .unwrap(),
3784 HashMap::from_iter([
3785 ("one.ts".to_string(), vec![14..18]),
3786 ("two.ts".to_string(), vec![14..18]),
3787 ]),
3788 "Rust exclusion search should give only TypeScript files"
3789 );
3790
3791 assert_eq!(
3792 search(
3793 &project,
3794 SearchQuery::text(
3795 search_query,
3796 false,
3797 true,
3798 Vec::new(),
3799 vec![
3800 Glob::new("*.ts").unwrap().compile_matcher(),
3801 Glob::new("*.odd").unwrap().compile_matcher(),
3802 ],
3803 ),
3804 cx
3805 )
3806 .await
3807 .unwrap(),
3808 HashMap::from_iter([
3809 ("one.rs".to_string(), vec![8..12]),
3810 ("two.rs".to_string(), vec![8..12]),
3811 ]),
3812 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
3813 );
3814
3815 assert!(
3816 search(
3817 &project,
3818 SearchQuery::text(
3819 search_query,
3820 false,
3821 true,
3822 Vec::new(),
3823 vec![
3824 Glob::new("*.rs").unwrap().compile_matcher(),
3825 Glob::new("*.ts").unwrap().compile_matcher(),
3826 Glob::new("*.odd").unwrap().compile_matcher(),
3827 ],
3828 ),
3829 cx
3830 )
3831 .await
3832 .unwrap().is_empty(),
3833 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
3834 );
3835}
3836
3837#[gpui::test]
3838async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
3839 init_test(cx);
3840
3841 let search_query = "file";
3842
3843 let fs = FakeFs::new(cx.background());
3844 fs.insert_tree(
3845 "/dir",
3846 json!({
3847 "one.rs": r#"// Rust file one"#,
3848 "one.ts": r#"// TypeScript file one"#,
3849 "two.rs": r#"// Rust file two"#,
3850 "two.ts": r#"// TypeScript file two"#,
3851 }),
3852 )
3853 .await;
3854 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3855
3856 assert!(
3857 search(
3858 &project,
3859 SearchQuery::text(
3860 search_query,
3861 false,
3862 true,
3863 vec![Glob::new("*.odd").unwrap().compile_matcher()],
3864 vec![Glob::new("*.odd").unwrap().compile_matcher()],
3865 ),
3866 cx
3867 )
3868 .await
3869 .unwrap()
3870 .is_empty(),
3871 "If both no exclusions and inclusions match, exclusions should win and return nothing"
3872 );
3873
3874 assert!(
3875 search(
3876 &project,
3877 SearchQuery::text(
3878 search_query,
3879 false,
3880 true,
3881 vec![Glob::new("*.ts").unwrap().compile_matcher()],
3882 vec![Glob::new("*.ts").unwrap().compile_matcher()],
3883 ),
3884 cx
3885 )
3886 .await
3887 .unwrap()
3888 .is_empty(),
3889 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
3890 );
3891
3892 assert!(
3893 search(
3894 &project,
3895 SearchQuery::text(
3896 search_query,
3897 false,
3898 true,
3899 vec![
3900 Glob::new("*.ts").unwrap().compile_matcher(),
3901 Glob::new("*.odd").unwrap().compile_matcher()
3902 ],
3903 vec![
3904 Glob::new("*.ts").unwrap().compile_matcher(),
3905 Glob::new("*.odd").unwrap().compile_matcher()
3906 ],
3907 ),
3908 cx
3909 )
3910 .await
3911 .unwrap()
3912 .is_empty(),
3913 "Non-matching inclusions and exclusions should not change that."
3914 );
3915
3916 assert_eq!(
3917 search(
3918 &project,
3919 SearchQuery::text(
3920 search_query,
3921 false,
3922 true,
3923 vec![
3924 Glob::new("*.ts").unwrap().compile_matcher(),
3925 Glob::new("*.odd").unwrap().compile_matcher()
3926 ],
3927 vec![
3928 Glob::new("*.rs").unwrap().compile_matcher(),
3929 Glob::new("*.odd").unwrap().compile_matcher()
3930 ],
3931 ),
3932 cx
3933 )
3934 .await
3935 .unwrap(),
3936 HashMap::from_iter([
3937 ("one.ts".to_string(), vec![14..18]),
3938 ("two.ts".to_string(), vec![14..18]),
3939 ]),
3940 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
3941 );
3942}
3943
3944#[test]
3945fn test_glob_literal_prefix() {
3946 assert_eq!(glob_literal_prefix("**/*.js"), "");
3947 assert_eq!(glob_literal_prefix("node_modules/**/*.js"), "node_modules");
3948 assert_eq!(glob_literal_prefix("foo/{bar,baz}.js"), "foo");
3949 assert_eq!(glob_literal_prefix("foo/bar/baz.js"), "foo/bar/baz.js");
3950}
3951
3952async fn search(
3953 project: &ModelHandle<Project>,
3954 query: SearchQuery,
3955 cx: &mut gpui::TestAppContext,
3956) -> Result<HashMap<String, Vec<Range<usize>>>> {
3957 let results = project
3958 .update(cx, |project, cx| project.search(query, cx))
3959 .await?;
3960
3961 Ok(results
3962 .into_iter()
3963 .map(|(buffer, ranges)| {
3964 buffer.read_with(cx, |buffer, _| {
3965 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
3966 let ranges = ranges
3967 .into_iter()
3968 .map(|range| range.to_offset(buffer))
3969 .collect::<Vec<_>>();
3970 (path, ranges)
3971 })
3972 })
3973 .collect())
3974}
3975
3976fn init_test(cx: &mut gpui::TestAppContext) {
3977 cx.foreground().forbid_parking();
3978
3979 cx.update(|cx| {
3980 cx.set_global(SettingsStore::test(cx));
3981 language::init(cx);
3982 Project::init_settings(cx);
3983 });
3984}