1use crate::{worktree::WorktreeHandle, Event, *};
2use fs::{FakeFs, LineEnding, RealFs};
3use futures::{future, StreamExt};
4use globset::Glob;
5use gpui::{executor::Deterministic, test::subscribe, AppContext};
6use language::{
7 language_settings::{AllLanguageSettings, LanguageSettingsContent},
8 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
9 OffsetRangeExt, Point, ToPoint,
10};
11use lsp::Url;
12use parking_lot::Mutex;
13use pretty_assertions::assert_eq;
14use serde_json::json;
15use std::{cell::RefCell, os::unix, rc::Rc, task::Poll};
16use unindent::Unindent as _;
17use util::{assert_set_eq, test::temp_tree};
18
19#[cfg(test)]
20#[ctor::ctor]
21fn init_logger() {
22 if std::env::var("RUST_LOG").is_ok() {
23 env_logger::init();
24 }
25}
26
27#[gpui::test]
28async fn test_symlinks(cx: &mut gpui::TestAppContext) {
29 init_test(cx);
30 cx.foreground().allow_parking();
31
32 let dir = temp_tree(json!({
33 "root": {
34 "apple": "",
35 "banana": {
36 "carrot": {
37 "date": "",
38 "endive": "",
39 }
40 },
41 "fennel": {
42 "grape": "",
43 }
44 }
45 }));
46
47 let root_link_path = dir.path().join("root_link");
48 unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
49 unix::fs::symlink(
50 &dir.path().join("root/fennel"),
51 &dir.path().join("root/finnochio"),
52 )
53 .unwrap();
54
55 let project = Project::test(Arc::new(RealFs), [root_link_path.as_ref()], cx).await;
56 project.read_with(cx, |project, cx| {
57 let tree = project.worktrees(cx).next().unwrap().read(cx);
58 assert_eq!(tree.file_count(), 5);
59 assert_eq!(
60 tree.inode_for_path("fennel/grape"),
61 tree.inode_for_path("finnochio/grape")
62 );
63 });
64}
65
66#[gpui::test]
67async fn test_managing_language_servers(
68 deterministic: Arc<Deterministic>,
69 cx: &mut gpui::TestAppContext,
70) {
71 init_test(cx);
72
73 let mut rust_language = Language::new(
74 LanguageConfig {
75 name: "Rust".into(),
76 path_suffixes: vec!["rs".to_string()],
77 ..Default::default()
78 },
79 Some(tree_sitter_rust::language()),
80 );
81 let mut json_language = Language::new(
82 LanguageConfig {
83 name: "JSON".into(),
84 path_suffixes: vec!["json".to_string()],
85 ..Default::default()
86 },
87 None,
88 );
89 let mut fake_rust_servers = rust_language
90 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
91 name: "the-rust-language-server",
92 capabilities: lsp::ServerCapabilities {
93 completion_provider: Some(lsp::CompletionOptions {
94 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
95 ..Default::default()
96 }),
97 ..Default::default()
98 },
99 ..Default::default()
100 }))
101 .await;
102 let mut fake_json_servers = json_language
103 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
104 name: "the-json-language-server",
105 capabilities: lsp::ServerCapabilities {
106 completion_provider: Some(lsp::CompletionOptions {
107 trigger_characters: Some(vec![":".to_string()]),
108 ..Default::default()
109 }),
110 ..Default::default()
111 },
112 ..Default::default()
113 }))
114 .await;
115
116 let fs = FakeFs::new(cx.background());
117 fs.insert_tree(
118 "/the-root",
119 json!({
120 "test.rs": "const A: i32 = 1;",
121 "test2.rs": "",
122 "Cargo.toml": "a = 1",
123 "package.json": "{\"a\": 1}",
124 }),
125 )
126 .await;
127
128 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
129
130 // Open a buffer without an associated language server.
131 let toml_buffer = project
132 .update(cx, |project, cx| {
133 project.open_local_buffer("/the-root/Cargo.toml", cx)
134 })
135 .await
136 .unwrap();
137
138 // Open a buffer with an associated language server before the language for it has been loaded.
139 let rust_buffer = project
140 .update(cx, |project, cx| {
141 project.open_local_buffer("/the-root/test.rs", cx)
142 })
143 .await
144 .unwrap();
145 rust_buffer.read_with(cx, |buffer, _| {
146 assert_eq!(buffer.language().map(|l| l.name()), None);
147 });
148
149 // Now we add the languages to the project, and ensure they get assigned to all
150 // the relevant open buffers.
151 project.update(cx, |project, _| {
152 project.languages.add(Arc::new(json_language));
153 project.languages.add(Arc::new(rust_language));
154 });
155 deterministic.run_until_parked();
156 rust_buffer.read_with(cx, |buffer, _| {
157 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
158 });
159
160 // A server is started up, and it is notified about Rust files.
161 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
162 assert_eq!(
163 fake_rust_server
164 .receive_notification::<lsp::notification::DidOpenTextDocument>()
165 .await
166 .text_document,
167 lsp::TextDocumentItem {
168 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
169 version: 0,
170 text: "const A: i32 = 1;".to_string(),
171 language_id: Default::default()
172 }
173 );
174
175 // The buffer is configured based on the language server's capabilities.
176 rust_buffer.read_with(cx, |buffer, _| {
177 assert_eq!(
178 buffer.completion_triggers(),
179 &[".".to_string(), "::".to_string()]
180 );
181 });
182 toml_buffer.read_with(cx, |buffer, _| {
183 assert!(buffer.completion_triggers().is_empty());
184 });
185
186 // Edit a buffer. The changes are reported to the language server.
187 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
188 assert_eq!(
189 fake_rust_server
190 .receive_notification::<lsp::notification::DidChangeTextDocument>()
191 .await
192 .text_document,
193 lsp::VersionedTextDocumentIdentifier::new(
194 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
195 1
196 )
197 );
198
199 // Open a third buffer with a different associated language server.
200 let json_buffer = project
201 .update(cx, |project, cx| {
202 project.open_local_buffer("/the-root/package.json", cx)
203 })
204 .await
205 .unwrap();
206
207 // A json language server is started up and is only notified about the json buffer.
208 let mut fake_json_server = fake_json_servers.next().await.unwrap();
209 assert_eq!(
210 fake_json_server
211 .receive_notification::<lsp::notification::DidOpenTextDocument>()
212 .await
213 .text_document,
214 lsp::TextDocumentItem {
215 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
216 version: 0,
217 text: "{\"a\": 1}".to_string(),
218 language_id: Default::default()
219 }
220 );
221
222 // This buffer is configured based on the second language server's
223 // capabilities.
224 json_buffer.read_with(cx, |buffer, _| {
225 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
226 });
227
228 // When opening another buffer whose language server is already running,
229 // it is also configured based on the existing language server's capabilities.
230 let rust_buffer2 = project
231 .update(cx, |project, cx| {
232 project.open_local_buffer("/the-root/test2.rs", cx)
233 })
234 .await
235 .unwrap();
236 rust_buffer2.read_with(cx, |buffer, _| {
237 assert_eq!(
238 buffer.completion_triggers(),
239 &[".".to_string(), "::".to_string()]
240 );
241 });
242
243 // Changes are reported only to servers matching the buffer's language.
244 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
245 rust_buffer2.update(cx, |buffer, cx| {
246 buffer.edit([(0..0, "let x = 1;")], None, cx)
247 });
248 assert_eq!(
249 fake_rust_server
250 .receive_notification::<lsp::notification::DidChangeTextDocument>()
251 .await
252 .text_document,
253 lsp::VersionedTextDocumentIdentifier::new(
254 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
255 1
256 )
257 );
258
259 // Save notifications are reported to all servers.
260 project
261 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
262 .await
263 .unwrap();
264 assert_eq!(
265 fake_rust_server
266 .receive_notification::<lsp::notification::DidSaveTextDocument>()
267 .await
268 .text_document,
269 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
270 );
271 assert_eq!(
272 fake_json_server
273 .receive_notification::<lsp::notification::DidSaveTextDocument>()
274 .await
275 .text_document,
276 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
277 );
278
279 // Renames are reported only to servers matching the buffer's language.
280 fs.rename(
281 Path::new("/the-root/test2.rs"),
282 Path::new("/the-root/test3.rs"),
283 Default::default(),
284 )
285 .await
286 .unwrap();
287 assert_eq!(
288 fake_rust_server
289 .receive_notification::<lsp::notification::DidCloseTextDocument>()
290 .await
291 .text_document,
292 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
293 );
294 assert_eq!(
295 fake_rust_server
296 .receive_notification::<lsp::notification::DidOpenTextDocument>()
297 .await
298 .text_document,
299 lsp::TextDocumentItem {
300 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
301 version: 0,
302 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
303 language_id: Default::default()
304 },
305 );
306
307 rust_buffer2.update(cx, |buffer, cx| {
308 buffer.update_diagnostics(
309 LanguageServerId(0),
310 DiagnosticSet::from_sorted_entries(
311 vec![DiagnosticEntry {
312 diagnostic: Default::default(),
313 range: Anchor::MIN..Anchor::MAX,
314 }],
315 &buffer.snapshot(),
316 ),
317 cx,
318 );
319 assert_eq!(
320 buffer
321 .snapshot()
322 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
323 .count(),
324 1
325 );
326 });
327
328 // When the rename changes the extension of the file, the buffer gets closed on the old
329 // language server and gets opened on the new one.
330 fs.rename(
331 Path::new("/the-root/test3.rs"),
332 Path::new("/the-root/test3.json"),
333 Default::default(),
334 )
335 .await
336 .unwrap();
337 assert_eq!(
338 fake_rust_server
339 .receive_notification::<lsp::notification::DidCloseTextDocument>()
340 .await
341 .text_document,
342 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
343 );
344 assert_eq!(
345 fake_json_server
346 .receive_notification::<lsp::notification::DidOpenTextDocument>()
347 .await
348 .text_document,
349 lsp::TextDocumentItem {
350 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
351 version: 0,
352 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
353 language_id: Default::default()
354 },
355 );
356
357 // We clear the diagnostics, since the language has changed.
358 rust_buffer2.read_with(cx, |buffer, _| {
359 assert_eq!(
360 buffer
361 .snapshot()
362 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
363 .count(),
364 0
365 );
366 });
367
368 // The renamed file's version resets after changing language server.
369 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
370 assert_eq!(
371 fake_json_server
372 .receive_notification::<lsp::notification::DidChangeTextDocument>()
373 .await
374 .text_document,
375 lsp::VersionedTextDocumentIdentifier::new(
376 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
377 1
378 )
379 );
380
381 // Restart language servers
382 project.update(cx, |project, cx| {
383 project.restart_language_servers_for_buffers(
384 vec![rust_buffer.clone(), json_buffer.clone()],
385 cx,
386 );
387 });
388
389 let mut rust_shutdown_requests = fake_rust_server
390 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
391 let mut json_shutdown_requests = fake_json_server
392 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
393 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
394
395 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
396 let mut fake_json_server = fake_json_servers.next().await.unwrap();
397
398 // Ensure rust document is reopened in new rust language server
399 assert_eq!(
400 fake_rust_server
401 .receive_notification::<lsp::notification::DidOpenTextDocument>()
402 .await
403 .text_document,
404 lsp::TextDocumentItem {
405 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
406 version: 0,
407 text: rust_buffer.read_with(cx, |buffer, _| buffer.text()),
408 language_id: Default::default()
409 }
410 );
411
412 // Ensure json documents are reopened in new json language server
413 assert_set_eq!(
414 [
415 fake_json_server
416 .receive_notification::<lsp::notification::DidOpenTextDocument>()
417 .await
418 .text_document,
419 fake_json_server
420 .receive_notification::<lsp::notification::DidOpenTextDocument>()
421 .await
422 .text_document,
423 ],
424 [
425 lsp::TextDocumentItem {
426 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
427 version: 0,
428 text: json_buffer.read_with(cx, |buffer, _| buffer.text()),
429 language_id: Default::default()
430 },
431 lsp::TextDocumentItem {
432 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
433 version: 0,
434 text: rust_buffer2.read_with(cx, |buffer, _| buffer.text()),
435 language_id: Default::default()
436 }
437 ]
438 );
439
440 // Close notifications are reported only to servers matching the buffer's language.
441 cx.update(|_| drop(json_buffer));
442 let close_message = lsp::DidCloseTextDocumentParams {
443 text_document: lsp::TextDocumentIdentifier::new(
444 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
445 ),
446 };
447 assert_eq!(
448 fake_json_server
449 .receive_notification::<lsp::notification::DidCloseTextDocument>()
450 .await,
451 close_message,
452 );
453}
454
455#[gpui::test]
456async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
457 init_test(cx);
458
459 let mut language = Language::new(
460 LanguageConfig {
461 name: "Rust".into(),
462 path_suffixes: vec!["rs".to_string()],
463 ..Default::default()
464 },
465 Some(tree_sitter_rust::language()),
466 );
467 let mut fake_servers = language
468 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
469 name: "the-language-server",
470 ..Default::default()
471 }))
472 .await;
473
474 let fs = FakeFs::new(cx.background());
475 fs.insert_tree(
476 "/the-root",
477 json!({
478 "a.rs": "",
479 "b.rs": "",
480 }),
481 )
482 .await;
483
484 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
485 project.update(cx, |project, _| {
486 project.languages.add(Arc::new(language));
487 });
488 cx.foreground().run_until_parked();
489
490 // Start the language server by opening a buffer with a compatible file extension.
491 let _buffer = project
492 .update(cx, |project, cx| {
493 project.open_local_buffer("/the-root/a.rs", cx)
494 })
495 .await
496 .unwrap();
497
498 // Keep track of the FS events reported to the language server.
499 let fake_server = fake_servers.next().await.unwrap();
500 let file_changes = Arc::new(Mutex::new(Vec::new()));
501 fake_server
502 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
503 registrations: vec![lsp::Registration {
504 id: Default::default(),
505 method: "workspace/didChangeWatchedFiles".to_string(),
506 register_options: serde_json::to_value(
507 lsp::DidChangeWatchedFilesRegistrationOptions {
508 watchers: vec![lsp::FileSystemWatcher {
509 glob_pattern: "/the-root/*.{rs,c}".to_string(),
510 kind: None,
511 }],
512 },
513 )
514 .ok(),
515 }],
516 })
517 .await
518 .unwrap();
519 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
520 let file_changes = file_changes.clone();
521 move |params, _| {
522 let mut file_changes = file_changes.lock();
523 file_changes.extend(params.changes);
524 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
525 }
526 });
527
528 cx.foreground().run_until_parked();
529 assert_eq!(file_changes.lock().len(), 0);
530
531 // Perform some file system mutations, two of which match the watched patterns,
532 // and one of which does not.
533 fs.create_file("/the-root/c.rs".as_ref(), Default::default())
534 .await
535 .unwrap();
536 fs.create_file("/the-root/d.txt".as_ref(), Default::default())
537 .await
538 .unwrap();
539 fs.remove_file("/the-root/b.rs".as_ref(), Default::default())
540 .await
541 .unwrap();
542
543 // The language server receives events for the FS mutations that match its watch patterns.
544 cx.foreground().run_until_parked();
545 assert_eq!(
546 &*file_changes.lock(),
547 &[
548 lsp::FileEvent {
549 uri: lsp::Url::from_file_path("/the-root/b.rs").unwrap(),
550 typ: lsp::FileChangeType::DELETED,
551 },
552 lsp::FileEvent {
553 uri: lsp::Url::from_file_path("/the-root/c.rs").unwrap(),
554 typ: lsp::FileChangeType::CREATED,
555 },
556 ]
557 );
558}
559
560#[gpui::test]
561async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
562 init_test(cx);
563
564 let fs = FakeFs::new(cx.background());
565 fs.insert_tree(
566 "/dir",
567 json!({
568 "a.rs": "let a = 1;",
569 "b.rs": "let b = 2;"
570 }),
571 )
572 .await;
573
574 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
575
576 let buffer_a = project
577 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
578 .await
579 .unwrap();
580 let buffer_b = project
581 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
582 .await
583 .unwrap();
584
585 project.update(cx, |project, cx| {
586 project
587 .update_diagnostics(
588 LanguageServerId(0),
589 lsp::PublishDiagnosticsParams {
590 uri: Url::from_file_path("/dir/a.rs").unwrap(),
591 version: None,
592 diagnostics: vec![lsp::Diagnostic {
593 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
594 severity: Some(lsp::DiagnosticSeverity::ERROR),
595 message: "error 1".to_string(),
596 ..Default::default()
597 }],
598 },
599 &[],
600 cx,
601 )
602 .unwrap();
603 project
604 .update_diagnostics(
605 LanguageServerId(0),
606 lsp::PublishDiagnosticsParams {
607 uri: Url::from_file_path("/dir/b.rs").unwrap(),
608 version: None,
609 diagnostics: vec![lsp::Diagnostic {
610 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
611 severity: Some(lsp::DiagnosticSeverity::WARNING),
612 message: "error 2".to_string(),
613 ..Default::default()
614 }],
615 },
616 &[],
617 cx,
618 )
619 .unwrap();
620 });
621
622 buffer_a.read_with(cx, |buffer, _| {
623 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
624 assert_eq!(
625 chunks
626 .iter()
627 .map(|(s, d)| (s.as_str(), *d))
628 .collect::<Vec<_>>(),
629 &[
630 ("let ", None),
631 ("a", Some(DiagnosticSeverity::ERROR)),
632 (" = 1;", None),
633 ]
634 );
635 });
636 buffer_b.read_with(cx, |buffer, _| {
637 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
638 assert_eq!(
639 chunks
640 .iter()
641 .map(|(s, d)| (s.as_str(), *d))
642 .collect::<Vec<_>>(),
643 &[
644 ("let ", None),
645 ("b", Some(DiagnosticSeverity::WARNING)),
646 (" = 2;", None),
647 ]
648 );
649 });
650}
651
652#[gpui::test]
653async fn test_hidden_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
654 init_test(cx);
655
656 let fs = FakeFs::new(cx.background());
657 fs.insert_tree(
658 "/root",
659 json!({
660 "dir": {
661 "a.rs": "let a = 1;",
662 },
663 "other.rs": "let b = c;"
664 }),
665 )
666 .await;
667
668 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
669
670 let (worktree, _) = project
671 .update(cx, |project, cx| {
672 project.find_or_create_local_worktree("/root/other.rs", false, cx)
673 })
674 .await
675 .unwrap();
676 let worktree_id = worktree.read_with(cx, |tree, _| tree.id());
677
678 project.update(cx, |project, cx| {
679 project
680 .update_diagnostics(
681 LanguageServerId(0),
682 lsp::PublishDiagnosticsParams {
683 uri: Url::from_file_path("/root/other.rs").unwrap(),
684 version: None,
685 diagnostics: vec![lsp::Diagnostic {
686 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
687 severity: Some(lsp::DiagnosticSeverity::ERROR),
688 message: "unknown variable 'c'".to_string(),
689 ..Default::default()
690 }],
691 },
692 &[],
693 cx,
694 )
695 .unwrap();
696 });
697
698 let buffer = project
699 .update(cx, |project, cx| project.open_buffer((worktree_id, ""), cx))
700 .await
701 .unwrap();
702 buffer.read_with(cx, |buffer, _| {
703 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
704 assert_eq!(
705 chunks
706 .iter()
707 .map(|(s, d)| (s.as_str(), *d))
708 .collect::<Vec<_>>(),
709 &[
710 ("let b = ", None),
711 ("c", Some(DiagnosticSeverity::ERROR)),
712 (";", None),
713 ]
714 );
715 });
716
717 project.read_with(cx, |project, cx| {
718 assert_eq!(project.diagnostic_summaries(cx).next(), None);
719 assert_eq!(project.diagnostic_summary(cx).error_count, 0);
720 });
721}
722
723#[gpui::test]
724async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
725 init_test(cx);
726
727 let progress_token = "the-progress-token";
728 let mut language = Language::new(
729 LanguageConfig {
730 name: "Rust".into(),
731 path_suffixes: vec!["rs".to_string()],
732 ..Default::default()
733 },
734 Some(tree_sitter_rust::language()),
735 );
736 let mut fake_servers = language
737 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
738 disk_based_diagnostics_progress_token: Some(progress_token.into()),
739 disk_based_diagnostics_sources: vec!["disk".into()],
740 ..Default::default()
741 }))
742 .await;
743
744 let fs = FakeFs::new(cx.background());
745 fs.insert_tree(
746 "/dir",
747 json!({
748 "a.rs": "fn a() { A }",
749 "b.rs": "const y: i32 = 1",
750 }),
751 )
752 .await;
753
754 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
755 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
756 let worktree_id = project.read_with(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
757
758 // Cause worktree to start the fake language server
759 let _buffer = project
760 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
761 .await
762 .unwrap();
763
764 let mut events = subscribe(&project, cx);
765
766 let fake_server = fake_servers.next().await.unwrap();
767 fake_server
768 .start_progress(format!("{}/0", progress_token))
769 .await;
770 assert_eq!(
771 events.next().await.unwrap(),
772 Event::DiskBasedDiagnosticsStarted {
773 language_server_id: LanguageServerId(0),
774 }
775 );
776
777 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
778 uri: Url::from_file_path("/dir/a.rs").unwrap(),
779 version: None,
780 diagnostics: vec![lsp::Diagnostic {
781 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
782 severity: Some(lsp::DiagnosticSeverity::ERROR),
783 message: "undefined variable 'A'".to_string(),
784 ..Default::default()
785 }],
786 });
787 assert_eq!(
788 events.next().await.unwrap(),
789 Event::DiagnosticsUpdated {
790 language_server_id: LanguageServerId(0),
791 path: (worktree_id, Path::new("a.rs")).into()
792 }
793 );
794
795 fake_server.end_progress(format!("{}/0", progress_token));
796 assert_eq!(
797 events.next().await.unwrap(),
798 Event::DiskBasedDiagnosticsFinished {
799 language_server_id: LanguageServerId(0)
800 }
801 );
802
803 let buffer = project
804 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
805 .await
806 .unwrap();
807
808 buffer.read_with(cx, |buffer, _| {
809 let snapshot = buffer.snapshot();
810 let diagnostics = snapshot
811 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
812 .collect::<Vec<_>>();
813 assert_eq!(
814 diagnostics,
815 &[DiagnosticEntry {
816 range: Point::new(0, 9)..Point::new(0, 10),
817 diagnostic: Diagnostic {
818 severity: lsp::DiagnosticSeverity::ERROR,
819 message: "undefined variable 'A'".to_string(),
820 group_id: 0,
821 is_primary: true,
822 ..Default::default()
823 }
824 }]
825 )
826 });
827
828 // Ensure publishing empty diagnostics twice only results in one update event.
829 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
830 uri: Url::from_file_path("/dir/a.rs").unwrap(),
831 version: None,
832 diagnostics: Default::default(),
833 });
834 assert_eq!(
835 events.next().await.unwrap(),
836 Event::DiagnosticsUpdated {
837 language_server_id: LanguageServerId(0),
838 path: (worktree_id, Path::new("a.rs")).into()
839 }
840 );
841
842 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
843 uri: Url::from_file_path("/dir/a.rs").unwrap(),
844 version: None,
845 diagnostics: Default::default(),
846 });
847 cx.foreground().run_until_parked();
848 assert_eq!(futures::poll!(events.next()), Poll::Pending);
849}
850
851#[gpui::test]
852async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
853 init_test(cx);
854
855 let progress_token = "the-progress-token";
856 let mut language = Language::new(
857 LanguageConfig {
858 path_suffixes: vec!["rs".to_string()],
859 ..Default::default()
860 },
861 None,
862 );
863 let mut fake_servers = language
864 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
865 disk_based_diagnostics_sources: vec!["disk".into()],
866 disk_based_diagnostics_progress_token: Some(progress_token.into()),
867 ..Default::default()
868 }))
869 .await;
870
871 let fs = FakeFs::new(cx.background());
872 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
873
874 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
875 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
876
877 let buffer = project
878 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
879 .await
880 .unwrap();
881
882 // Simulate diagnostics starting to update.
883 let fake_server = fake_servers.next().await.unwrap();
884 fake_server.start_progress(progress_token).await;
885
886 // Restart the server before the diagnostics finish updating.
887 project.update(cx, |project, cx| {
888 project.restart_language_servers_for_buffers([buffer], cx);
889 });
890 let mut events = subscribe(&project, cx);
891
892 // Simulate the newly started server sending more diagnostics.
893 let fake_server = fake_servers.next().await.unwrap();
894 fake_server.start_progress(progress_token).await;
895 assert_eq!(
896 events.next().await.unwrap(),
897 Event::DiskBasedDiagnosticsStarted {
898 language_server_id: LanguageServerId(1)
899 }
900 );
901 project.read_with(cx, |project, _| {
902 assert_eq!(
903 project
904 .language_servers_running_disk_based_diagnostics()
905 .collect::<Vec<_>>(),
906 [LanguageServerId(1)]
907 );
908 });
909
910 // All diagnostics are considered done, despite the old server's diagnostic
911 // task never completing.
912 fake_server.end_progress(progress_token);
913 assert_eq!(
914 events.next().await.unwrap(),
915 Event::DiskBasedDiagnosticsFinished {
916 language_server_id: LanguageServerId(1)
917 }
918 );
919 project.read_with(cx, |project, _| {
920 assert_eq!(
921 project
922 .language_servers_running_disk_based_diagnostics()
923 .collect::<Vec<_>>(),
924 [LanguageServerId(0); 0]
925 );
926 });
927}
928
929#[gpui::test]
930async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
931 init_test(cx);
932
933 let mut language = Language::new(
934 LanguageConfig {
935 path_suffixes: vec!["rs".to_string()],
936 ..Default::default()
937 },
938 None,
939 );
940 let mut fake_servers = language
941 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
942 ..Default::default()
943 }))
944 .await;
945
946 let fs = FakeFs::new(cx.background());
947 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
948
949 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
950 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
951
952 let buffer = project
953 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
954 .await
955 .unwrap();
956
957 // Publish diagnostics
958 let fake_server = fake_servers.next().await.unwrap();
959 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
960 uri: Url::from_file_path("/dir/a.rs").unwrap(),
961 version: None,
962 diagnostics: vec![lsp::Diagnostic {
963 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
964 severity: Some(lsp::DiagnosticSeverity::ERROR),
965 message: "the message".to_string(),
966 ..Default::default()
967 }],
968 });
969
970 cx.foreground().run_until_parked();
971 buffer.read_with(cx, |buffer, _| {
972 assert_eq!(
973 buffer
974 .snapshot()
975 .diagnostics_in_range::<_, usize>(0..1, false)
976 .map(|entry| entry.diagnostic.message.clone())
977 .collect::<Vec<_>>(),
978 ["the message".to_string()]
979 );
980 });
981 project.read_with(cx, |project, cx| {
982 assert_eq!(
983 project.diagnostic_summary(cx),
984 DiagnosticSummary {
985 error_count: 1,
986 warning_count: 0,
987 }
988 );
989 });
990
991 project.update(cx, |project, cx| {
992 project.restart_language_servers_for_buffers([buffer.clone()], cx);
993 });
994
995 // The diagnostics are cleared.
996 cx.foreground().run_until_parked();
997 buffer.read_with(cx, |buffer, _| {
998 assert_eq!(
999 buffer
1000 .snapshot()
1001 .diagnostics_in_range::<_, usize>(0..1, false)
1002 .map(|entry| entry.diagnostic.message.clone())
1003 .collect::<Vec<_>>(),
1004 Vec::<String>::new(),
1005 );
1006 });
1007 project.read_with(cx, |project, cx| {
1008 assert_eq!(
1009 project.diagnostic_summary(cx),
1010 DiagnosticSummary {
1011 error_count: 0,
1012 warning_count: 0,
1013 }
1014 );
1015 });
1016}
1017
1018#[gpui::test]
1019async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1020 init_test(cx);
1021
1022 let mut language = Language::new(
1023 LanguageConfig {
1024 path_suffixes: vec!["rs".to_string()],
1025 ..Default::default()
1026 },
1027 None,
1028 );
1029 let mut fake_servers = language
1030 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1031 name: "the-lsp",
1032 ..Default::default()
1033 }))
1034 .await;
1035
1036 let fs = FakeFs::new(cx.background());
1037 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1038
1039 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1040 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1041
1042 let buffer = project
1043 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1044 .await
1045 .unwrap();
1046
1047 // Before restarting the server, report diagnostics with an unknown buffer version.
1048 let fake_server = fake_servers.next().await.unwrap();
1049 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1050 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1051 version: Some(10000),
1052 diagnostics: Vec::new(),
1053 });
1054 cx.foreground().run_until_parked();
1055
1056 project.update(cx, |project, cx| {
1057 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1058 });
1059 let mut fake_server = fake_servers.next().await.unwrap();
1060 let notification = fake_server
1061 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1062 .await
1063 .text_document;
1064 assert_eq!(notification.version, 0);
1065}
1066
1067#[gpui::test]
1068async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1069 init_test(cx);
1070
1071 let mut rust = Language::new(
1072 LanguageConfig {
1073 name: Arc::from("Rust"),
1074 path_suffixes: vec!["rs".to_string()],
1075 ..Default::default()
1076 },
1077 None,
1078 );
1079 let mut fake_rust_servers = rust
1080 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1081 name: "rust-lsp",
1082 ..Default::default()
1083 }))
1084 .await;
1085 let mut js = Language::new(
1086 LanguageConfig {
1087 name: Arc::from("JavaScript"),
1088 path_suffixes: vec!["js".to_string()],
1089 ..Default::default()
1090 },
1091 None,
1092 );
1093 let mut fake_js_servers = js
1094 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1095 name: "js-lsp",
1096 ..Default::default()
1097 }))
1098 .await;
1099
1100 let fs = FakeFs::new(cx.background());
1101 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1102 .await;
1103
1104 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1105 project.update(cx, |project, _| {
1106 project.languages.add(Arc::new(rust));
1107 project.languages.add(Arc::new(js));
1108 });
1109
1110 let _rs_buffer = project
1111 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1112 .await
1113 .unwrap();
1114 let _js_buffer = project
1115 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
1116 .await
1117 .unwrap();
1118
1119 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1120 assert_eq!(
1121 fake_rust_server_1
1122 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1123 .await
1124 .text_document
1125 .uri
1126 .as_str(),
1127 "file:///dir/a.rs"
1128 );
1129
1130 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1131 assert_eq!(
1132 fake_js_server
1133 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1134 .await
1135 .text_document
1136 .uri
1137 .as_str(),
1138 "file:///dir/b.js"
1139 );
1140
1141 // Disable Rust language server, ensuring only that server gets stopped.
1142 cx.update(|cx| {
1143 cx.update_global(|settings: &mut SettingsStore, cx| {
1144 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1145 settings.languages.insert(
1146 Arc::from("Rust"),
1147 LanguageSettingsContent {
1148 enable_language_server: Some(false),
1149 ..Default::default()
1150 },
1151 );
1152 });
1153 })
1154 });
1155 fake_rust_server_1
1156 .receive_notification::<lsp::notification::Exit>()
1157 .await;
1158
1159 // Enable Rust and disable JavaScript language servers, ensuring that the
1160 // former gets started again and that the latter stops.
1161 cx.update(|cx| {
1162 cx.update_global(|settings: &mut SettingsStore, cx| {
1163 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1164 settings.languages.insert(
1165 Arc::from("Rust"),
1166 LanguageSettingsContent {
1167 enable_language_server: Some(true),
1168 ..Default::default()
1169 },
1170 );
1171 settings.languages.insert(
1172 Arc::from("JavaScript"),
1173 LanguageSettingsContent {
1174 enable_language_server: Some(false),
1175 ..Default::default()
1176 },
1177 );
1178 });
1179 })
1180 });
1181 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1182 assert_eq!(
1183 fake_rust_server_2
1184 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1185 .await
1186 .text_document
1187 .uri
1188 .as_str(),
1189 "file:///dir/a.rs"
1190 );
1191 fake_js_server
1192 .receive_notification::<lsp::notification::Exit>()
1193 .await;
1194}
1195
1196#[gpui::test(iterations = 3)]
1197async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1198 init_test(cx);
1199
1200 let mut language = Language::new(
1201 LanguageConfig {
1202 name: "Rust".into(),
1203 path_suffixes: vec!["rs".to_string()],
1204 ..Default::default()
1205 },
1206 Some(tree_sitter_rust::language()),
1207 );
1208 let mut fake_servers = language
1209 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
1210 disk_based_diagnostics_sources: vec!["disk".into()],
1211 ..Default::default()
1212 }))
1213 .await;
1214
1215 let text = "
1216 fn a() { A }
1217 fn b() { BB }
1218 fn c() { CCC }
1219 "
1220 .unindent();
1221
1222 let fs = FakeFs::new(cx.background());
1223 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1224
1225 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1226 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1227
1228 let buffer = project
1229 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1230 .await
1231 .unwrap();
1232
1233 let mut fake_server = fake_servers.next().await.unwrap();
1234 let open_notification = fake_server
1235 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1236 .await;
1237
1238 // Edit the buffer, moving the content down
1239 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1240 let change_notification_1 = fake_server
1241 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1242 .await;
1243 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1244
1245 // Report some diagnostics for the initial version of the buffer
1246 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1247 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1248 version: Some(open_notification.text_document.version),
1249 diagnostics: vec![
1250 lsp::Diagnostic {
1251 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1252 severity: Some(DiagnosticSeverity::ERROR),
1253 message: "undefined variable 'A'".to_string(),
1254 source: Some("disk".to_string()),
1255 ..Default::default()
1256 },
1257 lsp::Diagnostic {
1258 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1259 severity: Some(DiagnosticSeverity::ERROR),
1260 message: "undefined variable 'BB'".to_string(),
1261 source: Some("disk".to_string()),
1262 ..Default::default()
1263 },
1264 lsp::Diagnostic {
1265 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1266 severity: Some(DiagnosticSeverity::ERROR),
1267 source: Some("disk".to_string()),
1268 message: "undefined variable 'CCC'".to_string(),
1269 ..Default::default()
1270 },
1271 ],
1272 });
1273
1274 // The diagnostics have moved down since they were created.
1275 buffer.next_notification(cx).await;
1276 cx.foreground().run_until_parked();
1277 buffer.read_with(cx, |buffer, _| {
1278 assert_eq!(
1279 buffer
1280 .snapshot()
1281 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1282 .collect::<Vec<_>>(),
1283 &[
1284 DiagnosticEntry {
1285 range: Point::new(3, 9)..Point::new(3, 11),
1286 diagnostic: Diagnostic {
1287 source: Some("disk".into()),
1288 severity: DiagnosticSeverity::ERROR,
1289 message: "undefined variable 'BB'".to_string(),
1290 is_disk_based: true,
1291 group_id: 1,
1292 is_primary: true,
1293 ..Default::default()
1294 },
1295 },
1296 DiagnosticEntry {
1297 range: Point::new(4, 9)..Point::new(4, 12),
1298 diagnostic: Diagnostic {
1299 source: Some("disk".into()),
1300 severity: DiagnosticSeverity::ERROR,
1301 message: "undefined variable 'CCC'".to_string(),
1302 is_disk_based: true,
1303 group_id: 2,
1304 is_primary: true,
1305 ..Default::default()
1306 }
1307 }
1308 ]
1309 );
1310 assert_eq!(
1311 chunks_with_diagnostics(buffer, 0..buffer.len()),
1312 [
1313 ("\n\nfn a() { ".to_string(), None),
1314 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1315 (" }\nfn b() { ".to_string(), None),
1316 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1317 (" }\nfn c() { ".to_string(), None),
1318 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1319 (" }\n".to_string(), None),
1320 ]
1321 );
1322 assert_eq!(
1323 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1324 [
1325 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1326 (" }\nfn c() { ".to_string(), None),
1327 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1328 ]
1329 );
1330 });
1331
1332 // Ensure overlapping diagnostics are highlighted correctly.
1333 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1334 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1335 version: Some(open_notification.text_document.version),
1336 diagnostics: vec![
1337 lsp::Diagnostic {
1338 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1339 severity: Some(DiagnosticSeverity::ERROR),
1340 message: "undefined variable 'A'".to_string(),
1341 source: Some("disk".to_string()),
1342 ..Default::default()
1343 },
1344 lsp::Diagnostic {
1345 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1346 severity: Some(DiagnosticSeverity::WARNING),
1347 message: "unreachable statement".to_string(),
1348 source: Some("disk".to_string()),
1349 ..Default::default()
1350 },
1351 ],
1352 });
1353
1354 buffer.next_notification(cx).await;
1355 cx.foreground().run_until_parked();
1356 buffer.read_with(cx, |buffer, _| {
1357 assert_eq!(
1358 buffer
1359 .snapshot()
1360 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1361 .collect::<Vec<_>>(),
1362 &[
1363 DiagnosticEntry {
1364 range: Point::new(2, 9)..Point::new(2, 12),
1365 diagnostic: Diagnostic {
1366 source: Some("disk".into()),
1367 severity: DiagnosticSeverity::WARNING,
1368 message: "unreachable statement".to_string(),
1369 is_disk_based: true,
1370 group_id: 4,
1371 is_primary: true,
1372 ..Default::default()
1373 }
1374 },
1375 DiagnosticEntry {
1376 range: Point::new(2, 9)..Point::new(2, 10),
1377 diagnostic: Diagnostic {
1378 source: Some("disk".into()),
1379 severity: DiagnosticSeverity::ERROR,
1380 message: "undefined variable 'A'".to_string(),
1381 is_disk_based: true,
1382 group_id: 3,
1383 is_primary: true,
1384 ..Default::default()
1385 },
1386 }
1387 ]
1388 );
1389 assert_eq!(
1390 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1391 [
1392 ("fn a() { ".to_string(), None),
1393 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1394 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1395 ("\n".to_string(), None),
1396 ]
1397 );
1398 assert_eq!(
1399 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1400 [
1401 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1402 ("\n".to_string(), None),
1403 ]
1404 );
1405 });
1406
1407 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1408 // changes since the last save.
1409 buffer.update(cx, |buffer, cx| {
1410 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1411 buffer.edit(
1412 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1413 None,
1414 cx,
1415 );
1416 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1417 });
1418 let change_notification_2 = fake_server
1419 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1420 .await;
1421 assert!(
1422 change_notification_2.text_document.version > change_notification_1.text_document.version
1423 );
1424
1425 // Handle out-of-order diagnostics
1426 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1427 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1428 version: Some(change_notification_2.text_document.version),
1429 diagnostics: vec![
1430 lsp::Diagnostic {
1431 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1432 severity: Some(DiagnosticSeverity::ERROR),
1433 message: "undefined variable 'BB'".to_string(),
1434 source: Some("disk".to_string()),
1435 ..Default::default()
1436 },
1437 lsp::Diagnostic {
1438 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1439 severity: Some(DiagnosticSeverity::WARNING),
1440 message: "undefined variable 'A'".to_string(),
1441 source: Some("disk".to_string()),
1442 ..Default::default()
1443 },
1444 ],
1445 });
1446
1447 buffer.next_notification(cx).await;
1448 cx.foreground().run_until_parked();
1449 buffer.read_with(cx, |buffer, _| {
1450 assert_eq!(
1451 buffer
1452 .snapshot()
1453 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1454 .collect::<Vec<_>>(),
1455 &[
1456 DiagnosticEntry {
1457 range: Point::new(2, 21)..Point::new(2, 22),
1458 diagnostic: Diagnostic {
1459 source: Some("disk".into()),
1460 severity: DiagnosticSeverity::WARNING,
1461 message: "undefined variable 'A'".to_string(),
1462 is_disk_based: true,
1463 group_id: 6,
1464 is_primary: true,
1465 ..Default::default()
1466 }
1467 },
1468 DiagnosticEntry {
1469 range: Point::new(3, 9)..Point::new(3, 14),
1470 diagnostic: Diagnostic {
1471 source: Some("disk".into()),
1472 severity: DiagnosticSeverity::ERROR,
1473 message: "undefined variable 'BB'".to_string(),
1474 is_disk_based: true,
1475 group_id: 5,
1476 is_primary: true,
1477 ..Default::default()
1478 },
1479 }
1480 ]
1481 );
1482 });
1483}
1484
1485#[gpui::test]
1486async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1487 init_test(cx);
1488
1489 let text = concat!(
1490 "let one = ;\n", //
1491 "let two = \n",
1492 "let three = 3;\n",
1493 );
1494
1495 let fs = FakeFs::new(cx.background());
1496 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1497
1498 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1499 let buffer = project
1500 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1501 .await
1502 .unwrap();
1503
1504 project.update(cx, |project, cx| {
1505 project
1506 .update_buffer_diagnostics(
1507 &buffer,
1508 LanguageServerId(0),
1509 None,
1510 vec![
1511 DiagnosticEntry {
1512 range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 10)),
1513 diagnostic: Diagnostic {
1514 severity: DiagnosticSeverity::ERROR,
1515 message: "syntax error 1".to_string(),
1516 ..Default::default()
1517 },
1518 },
1519 DiagnosticEntry {
1520 range: Unclipped(PointUtf16::new(1, 10))..Unclipped(PointUtf16::new(1, 10)),
1521 diagnostic: Diagnostic {
1522 severity: DiagnosticSeverity::ERROR,
1523 message: "syntax error 2".to_string(),
1524 ..Default::default()
1525 },
1526 },
1527 ],
1528 cx,
1529 )
1530 .unwrap();
1531 });
1532
1533 // An empty range is extended forward to include the following character.
1534 // At the end of a line, an empty range is extended backward to include
1535 // the preceding character.
1536 buffer.read_with(cx, |buffer, _| {
1537 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1538 assert_eq!(
1539 chunks
1540 .iter()
1541 .map(|(s, d)| (s.as_str(), *d))
1542 .collect::<Vec<_>>(),
1543 &[
1544 ("let one = ", None),
1545 (";", Some(DiagnosticSeverity::ERROR)),
1546 ("\nlet two =", None),
1547 (" ", Some(DiagnosticSeverity::ERROR)),
1548 ("\nlet three = 3;\n", None)
1549 ]
1550 );
1551 });
1552}
1553
1554#[gpui::test]
1555async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
1556 init_test(cx);
1557
1558 let fs = FakeFs::new(cx.background());
1559 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
1560 .await;
1561
1562 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1563
1564 project.update(cx, |project, cx| {
1565 project
1566 .update_diagnostic_entries(
1567 LanguageServerId(0),
1568 Path::new("/dir/a.rs").to_owned(),
1569 None,
1570 vec![DiagnosticEntry {
1571 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1572 diagnostic: Diagnostic {
1573 severity: DiagnosticSeverity::ERROR,
1574 is_primary: true,
1575 message: "syntax error a1".to_string(),
1576 ..Default::default()
1577 },
1578 }],
1579 cx,
1580 )
1581 .unwrap();
1582 project
1583 .update_diagnostic_entries(
1584 LanguageServerId(1),
1585 Path::new("/dir/a.rs").to_owned(),
1586 None,
1587 vec![DiagnosticEntry {
1588 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1589 diagnostic: Diagnostic {
1590 severity: DiagnosticSeverity::ERROR,
1591 is_primary: true,
1592 message: "syntax error b1".to_string(),
1593 ..Default::default()
1594 },
1595 }],
1596 cx,
1597 )
1598 .unwrap();
1599
1600 assert_eq!(
1601 project.diagnostic_summary(cx),
1602 DiagnosticSummary {
1603 error_count: 2,
1604 warning_count: 0,
1605 }
1606 );
1607 });
1608}
1609
1610#[gpui::test]
1611async fn test_edits_from_lsp_with_past_version(cx: &mut gpui::TestAppContext) {
1612 init_test(cx);
1613
1614 let mut language = Language::new(
1615 LanguageConfig {
1616 name: "Rust".into(),
1617 path_suffixes: vec!["rs".to_string()],
1618 ..Default::default()
1619 },
1620 Some(tree_sitter_rust::language()),
1621 );
1622 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
1623
1624 let text = "
1625 fn a() {
1626 f1();
1627 }
1628 fn b() {
1629 f2();
1630 }
1631 fn c() {
1632 f3();
1633 }
1634 "
1635 .unindent();
1636
1637 let fs = FakeFs::new(cx.background());
1638 fs.insert_tree(
1639 "/dir",
1640 json!({
1641 "a.rs": text.clone(),
1642 }),
1643 )
1644 .await;
1645
1646 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1647 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
1648 let buffer = project
1649 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1650 .await
1651 .unwrap();
1652
1653 let mut fake_server = fake_servers.next().await.unwrap();
1654 let lsp_document_version = fake_server
1655 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1656 .await
1657 .text_document
1658 .version;
1659
1660 // Simulate editing the buffer after the language server computes some edits.
1661 buffer.update(cx, |buffer, cx| {
1662 buffer.edit(
1663 [(
1664 Point::new(0, 0)..Point::new(0, 0),
1665 "// above first function\n",
1666 )],
1667 None,
1668 cx,
1669 );
1670 buffer.edit(
1671 [(
1672 Point::new(2, 0)..Point::new(2, 0),
1673 " // inside first function\n",
1674 )],
1675 None,
1676 cx,
1677 );
1678 buffer.edit(
1679 [(
1680 Point::new(6, 4)..Point::new(6, 4),
1681 "// inside second function ",
1682 )],
1683 None,
1684 cx,
1685 );
1686
1687 assert_eq!(
1688 buffer.text(),
1689 "
1690 // above first function
1691 fn a() {
1692 // inside first function
1693 f1();
1694 }
1695 fn b() {
1696 // inside second function f2();
1697 }
1698 fn c() {
1699 f3();
1700 }
1701 "
1702 .unindent()
1703 );
1704 });
1705
1706 let edits = project
1707 .update(cx, |project, cx| {
1708 project.edits_from_lsp(
1709 &buffer,
1710 vec![
1711 // replace body of first function
1712 lsp::TextEdit {
1713 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
1714 new_text: "
1715 fn a() {
1716 f10();
1717 }
1718 "
1719 .unindent(),
1720 },
1721 // edit inside second function
1722 lsp::TextEdit {
1723 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
1724 new_text: "00".into(),
1725 },
1726 // edit inside third function via two distinct edits
1727 lsp::TextEdit {
1728 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
1729 new_text: "4000".into(),
1730 },
1731 lsp::TextEdit {
1732 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
1733 new_text: "".into(),
1734 },
1735 ],
1736 LanguageServerId(0),
1737 Some(lsp_document_version),
1738 cx,
1739 )
1740 })
1741 .await
1742 .unwrap();
1743
1744 buffer.update(cx, |buffer, cx| {
1745 for (range, new_text) in edits {
1746 buffer.edit([(range, new_text)], None, cx);
1747 }
1748 assert_eq!(
1749 buffer.text(),
1750 "
1751 // above first function
1752 fn a() {
1753 // inside first function
1754 f10();
1755 }
1756 fn b() {
1757 // inside second function f200();
1758 }
1759 fn c() {
1760 f4000();
1761 }
1762 "
1763 .unindent()
1764 );
1765 });
1766}
1767
1768#[gpui::test]
1769async fn test_edits_from_lsp_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
1770 init_test(cx);
1771
1772 let text = "
1773 use a::b;
1774 use a::c;
1775
1776 fn f() {
1777 b();
1778 c();
1779 }
1780 "
1781 .unindent();
1782
1783 let fs = FakeFs::new(cx.background());
1784 fs.insert_tree(
1785 "/dir",
1786 json!({
1787 "a.rs": text.clone(),
1788 }),
1789 )
1790 .await;
1791
1792 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1793 let buffer = project
1794 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1795 .await
1796 .unwrap();
1797
1798 // Simulate the language server sending us a small edit in the form of a very large diff.
1799 // Rust-analyzer does this when performing a merge-imports code action.
1800 let edits = project
1801 .update(cx, |project, cx| {
1802 project.edits_from_lsp(
1803 &buffer,
1804 [
1805 // Replace the first use statement without editing the semicolon.
1806 lsp::TextEdit {
1807 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
1808 new_text: "a::{b, c}".into(),
1809 },
1810 // Reinsert the remainder of the file between the semicolon and the final
1811 // newline of the file.
1812 lsp::TextEdit {
1813 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1814 new_text: "\n\n".into(),
1815 },
1816 lsp::TextEdit {
1817 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1818 new_text: "
1819 fn f() {
1820 b();
1821 c();
1822 }"
1823 .unindent(),
1824 },
1825 // Delete everything after the first newline of the file.
1826 lsp::TextEdit {
1827 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
1828 new_text: "".into(),
1829 },
1830 ],
1831 LanguageServerId(0),
1832 None,
1833 cx,
1834 )
1835 })
1836 .await
1837 .unwrap();
1838
1839 buffer.update(cx, |buffer, cx| {
1840 let edits = edits
1841 .into_iter()
1842 .map(|(range, text)| {
1843 (
1844 range.start.to_point(buffer)..range.end.to_point(buffer),
1845 text,
1846 )
1847 })
1848 .collect::<Vec<_>>();
1849
1850 assert_eq!(
1851 edits,
1852 [
1853 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
1854 (Point::new(1, 0)..Point::new(2, 0), "".into())
1855 ]
1856 );
1857
1858 for (range, new_text) in edits {
1859 buffer.edit([(range, new_text)], None, cx);
1860 }
1861 assert_eq!(
1862 buffer.text(),
1863 "
1864 use a::{b, c};
1865
1866 fn f() {
1867 b();
1868 c();
1869 }
1870 "
1871 .unindent()
1872 );
1873 });
1874}
1875
1876#[gpui::test]
1877async fn test_invalid_edits_from_lsp(cx: &mut gpui::TestAppContext) {
1878 init_test(cx);
1879
1880 let text = "
1881 use a::b;
1882 use a::c;
1883
1884 fn f() {
1885 b();
1886 c();
1887 }
1888 "
1889 .unindent();
1890
1891 let fs = FakeFs::new(cx.background());
1892 fs.insert_tree(
1893 "/dir",
1894 json!({
1895 "a.rs": text.clone(),
1896 }),
1897 )
1898 .await;
1899
1900 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1901 let buffer = project
1902 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1903 .await
1904 .unwrap();
1905
1906 // Simulate the language server sending us edits in a non-ordered fashion,
1907 // with ranges sometimes being inverted or pointing to invalid locations.
1908 let edits = project
1909 .update(cx, |project, cx| {
1910 project.edits_from_lsp(
1911 &buffer,
1912 [
1913 lsp::TextEdit {
1914 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1915 new_text: "\n\n".into(),
1916 },
1917 lsp::TextEdit {
1918 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
1919 new_text: "a::{b, c}".into(),
1920 },
1921 lsp::TextEdit {
1922 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
1923 new_text: "".into(),
1924 },
1925 lsp::TextEdit {
1926 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
1927 new_text: "
1928 fn f() {
1929 b();
1930 c();
1931 }"
1932 .unindent(),
1933 },
1934 ],
1935 LanguageServerId(0),
1936 None,
1937 cx,
1938 )
1939 })
1940 .await
1941 .unwrap();
1942
1943 buffer.update(cx, |buffer, cx| {
1944 let edits = edits
1945 .into_iter()
1946 .map(|(range, text)| {
1947 (
1948 range.start.to_point(buffer)..range.end.to_point(buffer),
1949 text,
1950 )
1951 })
1952 .collect::<Vec<_>>();
1953
1954 assert_eq!(
1955 edits,
1956 [
1957 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
1958 (Point::new(1, 0)..Point::new(2, 0), "".into())
1959 ]
1960 );
1961
1962 for (range, new_text) in edits {
1963 buffer.edit([(range, new_text)], None, cx);
1964 }
1965 assert_eq!(
1966 buffer.text(),
1967 "
1968 use a::{b, c};
1969
1970 fn f() {
1971 b();
1972 c();
1973 }
1974 "
1975 .unindent()
1976 );
1977 });
1978}
1979
1980fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
1981 buffer: &Buffer,
1982 range: Range<T>,
1983) -> Vec<(String, Option<DiagnosticSeverity>)> {
1984 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
1985 for chunk in buffer.snapshot().chunks(range, true) {
1986 if chunks.last().map_or(false, |prev_chunk| {
1987 prev_chunk.1 == chunk.diagnostic_severity
1988 }) {
1989 chunks.last_mut().unwrap().0.push_str(chunk.text);
1990 } else {
1991 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
1992 }
1993 }
1994 chunks
1995}
1996
1997#[gpui::test(iterations = 10)]
1998async fn test_definition(cx: &mut gpui::TestAppContext) {
1999 init_test(cx);
2000
2001 let mut language = Language::new(
2002 LanguageConfig {
2003 name: "Rust".into(),
2004 path_suffixes: vec!["rs".to_string()],
2005 ..Default::default()
2006 },
2007 Some(tree_sitter_rust::language()),
2008 );
2009 let mut fake_servers = language.set_fake_lsp_adapter(Default::default()).await;
2010
2011 let fs = FakeFs::new(cx.background());
2012 fs.insert_tree(
2013 "/dir",
2014 json!({
2015 "a.rs": "const fn a() { A }",
2016 "b.rs": "const y: i32 = crate::a()",
2017 }),
2018 )
2019 .await;
2020
2021 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2022 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2023
2024 let buffer = project
2025 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
2026 .await
2027 .unwrap();
2028
2029 let fake_server = fake_servers.next().await.unwrap();
2030 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2031 let params = params.text_document_position_params;
2032 assert_eq!(
2033 params.text_document.uri.to_file_path().unwrap(),
2034 Path::new("/dir/b.rs"),
2035 );
2036 assert_eq!(params.position, lsp::Position::new(0, 22));
2037
2038 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2039 lsp::Location::new(
2040 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
2041 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2042 ),
2043 )))
2044 });
2045
2046 let mut definitions = project
2047 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2048 .await
2049 .unwrap();
2050
2051 // Assert no new language server started
2052 cx.foreground().run_until_parked();
2053 assert!(fake_servers.try_next().is_err());
2054
2055 assert_eq!(definitions.len(), 1);
2056 let definition = definitions.pop().unwrap();
2057 cx.update(|cx| {
2058 let target_buffer = definition.target.buffer.read(cx);
2059 assert_eq!(
2060 target_buffer
2061 .file()
2062 .unwrap()
2063 .as_local()
2064 .unwrap()
2065 .abs_path(cx),
2066 Path::new("/dir/a.rs"),
2067 );
2068 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2069 assert_eq!(
2070 list_worktrees(&project, cx),
2071 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
2072 );
2073
2074 drop(definition);
2075 });
2076 cx.read(|cx| {
2077 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2078 });
2079
2080 fn list_worktrees<'a>(
2081 project: &'a ModelHandle<Project>,
2082 cx: &'a AppContext,
2083 ) -> Vec<(&'a Path, bool)> {
2084 project
2085 .read(cx)
2086 .worktrees(cx)
2087 .map(|worktree| {
2088 let worktree = worktree.read(cx);
2089 (
2090 worktree.as_local().unwrap().abs_path().as_ref(),
2091 worktree.is_visible(),
2092 )
2093 })
2094 .collect::<Vec<_>>()
2095 }
2096}
2097
2098#[gpui::test]
2099async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2100 init_test(cx);
2101
2102 let mut language = Language::new(
2103 LanguageConfig {
2104 name: "TypeScript".into(),
2105 path_suffixes: vec!["ts".to_string()],
2106 ..Default::default()
2107 },
2108 Some(tree_sitter_typescript::language_typescript()),
2109 );
2110 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
2111
2112 let fs = FakeFs::new(cx.background());
2113 fs.insert_tree(
2114 "/dir",
2115 json!({
2116 "a.ts": "",
2117 }),
2118 )
2119 .await;
2120
2121 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2122 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2123 let buffer = project
2124 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2125 .await
2126 .unwrap();
2127
2128 let fake_server = fake_language_servers.next().await.unwrap();
2129
2130 let text = "let a = b.fqn";
2131 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2132 let completions = project.update(cx, |project, cx| {
2133 project.completions(&buffer, text.len(), cx)
2134 });
2135
2136 fake_server
2137 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2138 Ok(Some(lsp::CompletionResponse::Array(vec![
2139 lsp::CompletionItem {
2140 label: "fullyQualifiedName?".into(),
2141 insert_text: Some("fullyQualifiedName".into()),
2142 ..Default::default()
2143 },
2144 ])))
2145 })
2146 .next()
2147 .await;
2148 let completions = completions.await.unwrap();
2149 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
2150 assert_eq!(completions.len(), 1);
2151 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2152 assert_eq!(
2153 completions[0].old_range.to_offset(&snapshot),
2154 text.len() - 3..text.len()
2155 );
2156
2157 let text = "let a = \"atoms/cmp\"";
2158 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2159 let completions = project.update(cx, |project, cx| {
2160 project.completions(&buffer, text.len() - 1, cx)
2161 });
2162
2163 fake_server
2164 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2165 Ok(Some(lsp::CompletionResponse::Array(vec![
2166 lsp::CompletionItem {
2167 label: "component".into(),
2168 ..Default::default()
2169 },
2170 ])))
2171 })
2172 .next()
2173 .await;
2174 let completions = completions.await.unwrap();
2175 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
2176 assert_eq!(completions.len(), 1);
2177 assert_eq!(completions[0].new_text, "component");
2178 assert_eq!(
2179 completions[0].old_range.to_offset(&snapshot),
2180 text.len() - 4..text.len() - 1
2181 );
2182}
2183
2184#[gpui::test]
2185async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2186 init_test(cx);
2187
2188 let mut language = Language::new(
2189 LanguageConfig {
2190 name: "TypeScript".into(),
2191 path_suffixes: vec!["ts".to_string()],
2192 ..Default::default()
2193 },
2194 Some(tree_sitter_typescript::language_typescript()),
2195 );
2196 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
2197
2198 let fs = FakeFs::new(cx.background());
2199 fs.insert_tree(
2200 "/dir",
2201 json!({
2202 "a.ts": "",
2203 }),
2204 )
2205 .await;
2206
2207 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2208 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2209 let buffer = project
2210 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2211 .await
2212 .unwrap();
2213
2214 let fake_server = fake_language_servers.next().await.unwrap();
2215
2216 let text = "let a = b.fqn";
2217 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2218 let completions = project.update(cx, |project, cx| {
2219 project.completions(&buffer, text.len(), cx)
2220 });
2221
2222 fake_server
2223 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2224 Ok(Some(lsp::CompletionResponse::Array(vec![
2225 lsp::CompletionItem {
2226 label: "fullyQualifiedName?".into(),
2227 insert_text: Some("fully\rQualified\r\nName".into()),
2228 ..Default::default()
2229 },
2230 ])))
2231 })
2232 .next()
2233 .await;
2234 let completions = completions.await.unwrap();
2235 assert_eq!(completions.len(), 1);
2236 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2237}
2238
2239#[gpui::test(iterations = 10)]
2240async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2241 init_test(cx);
2242
2243 let mut language = Language::new(
2244 LanguageConfig {
2245 name: "TypeScript".into(),
2246 path_suffixes: vec!["ts".to_string()],
2247 ..Default::default()
2248 },
2249 None,
2250 );
2251 let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
2252
2253 let fs = FakeFs::new(cx.background());
2254 fs.insert_tree(
2255 "/dir",
2256 json!({
2257 "a.ts": "a",
2258 }),
2259 )
2260 .await;
2261
2262 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2263 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
2264 let buffer = project
2265 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2266 .await
2267 .unwrap();
2268
2269 let fake_server = fake_language_servers.next().await.unwrap();
2270
2271 // Language server returns code actions that contain commands, and not edits.
2272 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
2273 fake_server
2274 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2275 Ok(Some(vec![
2276 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2277 title: "The code action".into(),
2278 command: Some(lsp::Command {
2279 title: "The command".into(),
2280 command: "_the/command".into(),
2281 arguments: Some(vec![json!("the-argument")]),
2282 }),
2283 ..Default::default()
2284 }),
2285 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2286 title: "two".into(),
2287 ..Default::default()
2288 }),
2289 ]))
2290 })
2291 .next()
2292 .await;
2293
2294 let action = actions.await.unwrap()[0].clone();
2295 let apply = project.update(cx, |project, cx| {
2296 project.apply_code_action(buffer.clone(), action, true, cx)
2297 });
2298
2299 // Resolving the code action does not populate its edits. In absence of
2300 // edits, we must execute the given command.
2301 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2302 |action, _| async move { Ok(action) },
2303 );
2304
2305 // While executing the command, the language server sends the editor
2306 // a `workspaceEdit` request.
2307 fake_server
2308 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2309 let fake = fake_server.clone();
2310 move |params, _| {
2311 assert_eq!(params.command, "_the/command");
2312 let fake = fake.clone();
2313 async move {
2314 fake.server
2315 .request::<lsp::request::ApplyWorkspaceEdit>(
2316 lsp::ApplyWorkspaceEditParams {
2317 label: None,
2318 edit: lsp::WorkspaceEdit {
2319 changes: Some(
2320 [(
2321 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2322 vec![lsp::TextEdit {
2323 range: lsp::Range::new(
2324 lsp::Position::new(0, 0),
2325 lsp::Position::new(0, 0),
2326 ),
2327 new_text: "X".into(),
2328 }],
2329 )]
2330 .into_iter()
2331 .collect(),
2332 ),
2333 ..Default::default()
2334 },
2335 },
2336 )
2337 .await
2338 .unwrap();
2339 Ok(Some(json!(null)))
2340 }
2341 }
2342 })
2343 .next()
2344 .await;
2345
2346 // Applying the code action returns a project transaction containing the edits
2347 // sent by the language server in its `workspaceEdit` request.
2348 let transaction = apply.await.unwrap();
2349 assert!(transaction.0.contains_key(&buffer));
2350 buffer.update(cx, |buffer, cx| {
2351 assert_eq!(buffer.text(), "Xa");
2352 buffer.undo(cx);
2353 assert_eq!(buffer.text(), "a");
2354 });
2355}
2356
2357#[gpui::test(iterations = 10)]
2358async fn test_save_file(cx: &mut gpui::TestAppContext) {
2359 init_test(cx);
2360
2361 let fs = FakeFs::new(cx.background());
2362 fs.insert_tree(
2363 "/dir",
2364 json!({
2365 "file1": "the old contents",
2366 }),
2367 )
2368 .await;
2369
2370 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2371 let buffer = project
2372 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2373 .await
2374 .unwrap();
2375 buffer.update(cx, |buffer, cx| {
2376 assert_eq!(buffer.text(), "the old contents");
2377 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2378 });
2379
2380 project
2381 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2382 .await
2383 .unwrap();
2384
2385 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2386 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2387}
2388
2389#[gpui::test]
2390async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2391 init_test(cx);
2392
2393 let fs = FakeFs::new(cx.background());
2394 fs.insert_tree(
2395 "/dir",
2396 json!({
2397 "file1": "the old contents",
2398 }),
2399 )
2400 .await;
2401
2402 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2403 let buffer = project
2404 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2405 .await
2406 .unwrap();
2407 buffer.update(cx, |buffer, cx| {
2408 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2409 });
2410
2411 project
2412 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2413 .await
2414 .unwrap();
2415
2416 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2417 assert_eq!(new_text, buffer.read_with(cx, |buffer, _| buffer.text()));
2418}
2419
2420#[gpui::test]
2421async fn test_save_as(cx: &mut gpui::TestAppContext) {
2422 init_test(cx);
2423
2424 let fs = FakeFs::new(cx.background());
2425 fs.insert_tree("/dir", json!({})).await;
2426
2427 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2428
2429 let languages = project.read_with(cx, |project, _| project.languages().clone());
2430 languages.register(
2431 "/some/path",
2432 LanguageConfig {
2433 name: "Rust".into(),
2434 path_suffixes: vec!["rs".into()],
2435 ..Default::default()
2436 },
2437 tree_sitter_rust::language(),
2438 vec![],
2439 |_| Default::default(),
2440 );
2441
2442 let buffer = project.update(cx, |project, cx| {
2443 project.create_buffer("", None, cx).unwrap()
2444 });
2445 buffer.update(cx, |buffer, cx| {
2446 buffer.edit([(0..0, "abc")], None, cx);
2447 assert!(buffer.is_dirty());
2448 assert!(!buffer.has_conflict());
2449 assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text");
2450 });
2451 project
2452 .update(cx, |project, cx| {
2453 project.save_buffer_as(buffer.clone(), "/dir/file1.rs".into(), cx)
2454 })
2455 .await
2456 .unwrap();
2457 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
2458
2459 cx.foreground().run_until_parked();
2460 buffer.read_with(cx, |buffer, cx| {
2461 assert_eq!(
2462 buffer.file().unwrap().full_path(cx),
2463 Path::new("dir/file1.rs")
2464 );
2465 assert!(!buffer.is_dirty());
2466 assert!(!buffer.has_conflict());
2467 assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust");
2468 });
2469
2470 let opened_buffer = project
2471 .update(cx, |project, cx| {
2472 project.open_local_buffer("/dir/file1.rs", cx)
2473 })
2474 .await
2475 .unwrap();
2476 assert_eq!(opened_buffer, buffer);
2477}
2478
2479#[gpui::test(retries = 5)]
2480async fn test_rescan_and_remote_updates(
2481 deterministic: Arc<Deterministic>,
2482 cx: &mut gpui::TestAppContext,
2483) {
2484 init_test(cx);
2485 cx.foreground().allow_parking();
2486
2487 let dir = temp_tree(json!({
2488 "a": {
2489 "file1": "",
2490 "file2": "",
2491 "file3": "",
2492 },
2493 "b": {
2494 "c": {
2495 "file4": "",
2496 "file5": "",
2497 }
2498 }
2499 }));
2500
2501 let project = Project::test(Arc::new(RealFs), [dir.path()], cx).await;
2502 let rpc = project.read_with(cx, |p, _| p.client.clone());
2503
2504 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2505 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
2506 async move { buffer.await.unwrap() }
2507 };
2508 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
2509 project.read_with(cx, |project, cx| {
2510 let tree = project.worktrees(cx).next().unwrap();
2511 tree.read(cx)
2512 .entry_for_path(path)
2513 .unwrap_or_else(|| panic!("no entry for path {}", path))
2514 .id
2515 })
2516 };
2517
2518 let buffer2 = buffer_for_path("a/file2", cx).await;
2519 let buffer3 = buffer_for_path("a/file3", cx).await;
2520 let buffer4 = buffer_for_path("b/c/file4", cx).await;
2521 let buffer5 = buffer_for_path("b/c/file5", cx).await;
2522
2523 let file2_id = id_for_path("a/file2", cx);
2524 let file3_id = id_for_path("a/file3", cx);
2525 let file4_id = id_for_path("b/c/file4", cx);
2526
2527 // Create a remote copy of this worktree.
2528 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2529
2530 let metadata = tree.read_with(cx, |tree, _| tree.as_local().unwrap().metadata_proto());
2531
2532 let updates = Arc::new(Mutex::new(Vec::new()));
2533 tree.update(cx, |tree, cx| {
2534 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
2535 let updates = updates.clone();
2536 move |update| {
2537 updates.lock().push(update);
2538 async { true }
2539 }
2540 });
2541 });
2542
2543 let remote = cx.update(|cx| Worktree::remote(1, 1, metadata, rpc.clone(), cx));
2544 deterministic.run_until_parked();
2545
2546 cx.read(|cx| {
2547 assert!(!buffer2.read(cx).is_dirty());
2548 assert!(!buffer3.read(cx).is_dirty());
2549 assert!(!buffer4.read(cx).is_dirty());
2550 assert!(!buffer5.read(cx).is_dirty());
2551 });
2552
2553 // Rename and delete files and directories.
2554 tree.flush_fs_events(cx).await;
2555 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
2556 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
2557 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
2558 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
2559 tree.flush_fs_events(cx).await;
2560
2561 let expected_paths = vec![
2562 "a",
2563 "a/file1",
2564 "a/file2.new",
2565 "b",
2566 "d",
2567 "d/file3",
2568 "d/file4",
2569 ];
2570
2571 cx.read(|app| {
2572 assert_eq!(
2573 tree.read(app)
2574 .paths()
2575 .map(|p| p.to_str().unwrap())
2576 .collect::<Vec<_>>(),
2577 expected_paths
2578 );
2579
2580 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
2581 assert_eq!(id_for_path("d/file3", cx), file3_id);
2582 assert_eq!(id_for_path("d/file4", cx), file4_id);
2583
2584 assert_eq!(
2585 buffer2.read(app).file().unwrap().path().as_ref(),
2586 Path::new("a/file2.new")
2587 );
2588 assert_eq!(
2589 buffer3.read(app).file().unwrap().path().as_ref(),
2590 Path::new("d/file3")
2591 );
2592 assert_eq!(
2593 buffer4.read(app).file().unwrap().path().as_ref(),
2594 Path::new("d/file4")
2595 );
2596 assert_eq!(
2597 buffer5.read(app).file().unwrap().path().as_ref(),
2598 Path::new("b/c/file5")
2599 );
2600
2601 assert!(!buffer2.read(app).file().unwrap().is_deleted());
2602 assert!(!buffer3.read(app).file().unwrap().is_deleted());
2603 assert!(!buffer4.read(app).file().unwrap().is_deleted());
2604 assert!(buffer5.read(app).file().unwrap().is_deleted());
2605 });
2606
2607 // Update the remote worktree. Check that it becomes consistent with the
2608 // local worktree.
2609 deterministic.run_until_parked();
2610 remote.update(cx, |remote, _| {
2611 for update in updates.lock().drain(..) {
2612 remote.as_remote_mut().unwrap().update_from_remote(update);
2613 }
2614 });
2615 deterministic.run_until_parked();
2616 remote.read_with(cx, |remote, _| {
2617 assert_eq!(
2618 remote
2619 .paths()
2620 .map(|p| p.to_str().unwrap())
2621 .collect::<Vec<_>>(),
2622 expected_paths
2623 );
2624 });
2625}
2626
2627#[gpui::test(iterations = 10)]
2628async fn test_buffer_identity_across_renames(
2629 deterministic: Arc<Deterministic>,
2630 cx: &mut gpui::TestAppContext,
2631) {
2632 init_test(cx);
2633
2634 let fs = FakeFs::new(cx.background());
2635 fs.insert_tree(
2636 "/dir",
2637 json!({
2638 "a": {
2639 "file1": "",
2640 }
2641 }),
2642 )
2643 .await;
2644
2645 let project = Project::test(fs, [Path::new("/dir")], cx).await;
2646 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2647 let tree_id = tree.read_with(cx, |tree, _| tree.id());
2648
2649 let id_for_path = |path: &'static str, cx: &gpui::TestAppContext| {
2650 project.read_with(cx, |project, cx| {
2651 let tree = project.worktrees(cx).next().unwrap();
2652 tree.read(cx)
2653 .entry_for_path(path)
2654 .unwrap_or_else(|| panic!("no entry for path {}", path))
2655 .id
2656 })
2657 };
2658
2659 let dir_id = id_for_path("a", cx);
2660 let file_id = id_for_path("a/file1", cx);
2661 let buffer = project
2662 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
2663 .await
2664 .unwrap();
2665 buffer.read_with(cx, |buffer, _| assert!(!buffer.is_dirty()));
2666
2667 project
2668 .update(cx, |project, cx| {
2669 project.rename_entry(dir_id, Path::new("b"), cx)
2670 })
2671 .unwrap()
2672 .await
2673 .unwrap();
2674 deterministic.run_until_parked();
2675 assert_eq!(id_for_path("b", cx), dir_id);
2676 assert_eq!(id_for_path("b/file1", cx), file_id);
2677 buffer.read_with(cx, |buffer, _| assert!(!buffer.is_dirty()));
2678}
2679
2680#[gpui::test]
2681async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
2682 init_test(cx);
2683
2684 let fs = FakeFs::new(cx.background());
2685 fs.insert_tree(
2686 "/dir",
2687 json!({
2688 "a.txt": "a-contents",
2689 "b.txt": "b-contents",
2690 }),
2691 )
2692 .await;
2693
2694 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2695
2696 // Spawn multiple tasks to open paths, repeating some paths.
2697 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
2698 (
2699 p.open_local_buffer("/dir/a.txt", cx),
2700 p.open_local_buffer("/dir/b.txt", cx),
2701 p.open_local_buffer("/dir/a.txt", cx),
2702 )
2703 });
2704
2705 let buffer_a_1 = buffer_a_1.await.unwrap();
2706 let buffer_a_2 = buffer_a_2.await.unwrap();
2707 let buffer_b = buffer_b.await.unwrap();
2708 assert_eq!(buffer_a_1.read_with(cx, |b, _| b.text()), "a-contents");
2709 assert_eq!(buffer_b.read_with(cx, |b, _| b.text()), "b-contents");
2710
2711 // There is only one buffer per path.
2712 let buffer_a_id = buffer_a_1.id();
2713 assert_eq!(buffer_a_2.id(), buffer_a_id);
2714
2715 // Open the same path again while it is still open.
2716 drop(buffer_a_1);
2717 let buffer_a_3 = project
2718 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
2719 .await
2720 .unwrap();
2721
2722 // There's still only one buffer per path.
2723 assert_eq!(buffer_a_3.id(), buffer_a_id);
2724}
2725
2726#[gpui::test]
2727async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
2728 init_test(cx);
2729
2730 let fs = FakeFs::new(cx.background());
2731 fs.insert_tree(
2732 "/dir",
2733 json!({
2734 "file1": "abc",
2735 "file2": "def",
2736 "file3": "ghi",
2737 }),
2738 )
2739 .await;
2740
2741 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2742
2743 let buffer1 = project
2744 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2745 .await
2746 .unwrap();
2747 let events = Rc::new(RefCell::new(Vec::new()));
2748
2749 // initially, the buffer isn't dirty.
2750 buffer1.update(cx, |buffer, cx| {
2751 cx.subscribe(&buffer1, {
2752 let events = events.clone();
2753 move |_, _, event, _| match event {
2754 BufferEvent::Operation(_) => {}
2755 _ => events.borrow_mut().push(event.clone()),
2756 }
2757 })
2758 .detach();
2759
2760 assert!(!buffer.is_dirty());
2761 assert!(events.borrow().is_empty());
2762
2763 buffer.edit([(1..2, "")], None, cx);
2764 });
2765
2766 // after the first edit, the buffer is dirty, and emits a dirtied event.
2767 buffer1.update(cx, |buffer, cx| {
2768 assert!(buffer.text() == "ac");
2769 assert!(buffer.is_dirty());
2770 assert_eq!(
2771 *events.borrow(),
2772 &[language::Event::Edited, language::Event::DirtyChanged]
2773 );
2774 events.borrow_mut().clear();
2775 buffer.did_save(
2776 buffer.version(),
2777 buffer.as_rope().fingerprint(),
2778 buffer.file().unwrap().mtime(),
2779 cx,
2780 );
2781 });
2782
2783 // after saving, the buffer is not dirty, and emits a saved event.
2784 buffer1.update(cx, |buffer, cx| {
2785 assert!(!buffer.is_dirty());
2786 assert_eq!(*events.borrow(), &[language::Event::Saved]);
2787 events.borrow_mut().clear();
2788
2789 buffer.edit([(1..1, "B")], None, cx);
2790 buffer.edit([(2..2, "D")], None, cx);
2791 });
2792
2793 // after editing again, the buffer is dirty, and emits another dirty event.
2794 buffer1.update(cx, |buffer, cx| {
2795 assert!(buffer.text() == "aBDc");
2796 assert!(buffer.is_dirty());
2797 assert_eq!(
2798 *events.borrow(),
2799 &[
2800 language::Event::Edited,
2801 language::Event::DirtyChanged,
2802 language::Event::Edited,
2803 ],
2804 );
2805 events.borrow_mut().clear();
2806
2807 // After restoring the buffer to its previously-saved state,
2808 // the buffer is not considered dirty anymore.
2809 buffer.edit([(1..3, "")], None, cx);
2810 assert!(buffer.text() == "ac");
2811 assert!(!buffer.is_dirty());
2812 });
2813
2814 assert_eq!(
2815 *events.borrow(),
2816 &[language::Event::Edited, language::Event::DirtyChanged]
2817 );
2818
2819 // When a file is deleted, the buffer is considered dirty.
2820 let events = Rc::new(RefCell::new(Vec::new()));
2821 let buffer2 = project
2822 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
2823 .await
2824 .unwrap();
2825 buffer2.update(cx, |_, cx| {
2826 cx.subscribe(&buffer2, {
2827 let events = events.clone();
2828 move |_, _, event, _| events.borrow_mut().push(event.clone())
2829 })
2830 .detach();
2831 });
2832
2833 fs.remove_file("/dir/file2".as_ref(), Default::default())
2834 .await
2835 .unwrap();
2836 cx.foreground().run_until_parked();
2837 buffer2.read_with(cx, |buffer, _| assert!(buffer.is_dirty()));
2838 assert_eq!(
2839 *events.borrow(),
2840 &[
2841 language::Event::DirtyChanged,
2842 language::Event::FileHandleChanged
2843 ]
2844 );
2845
2846 // When a file is already dirty when deleted, we don't emit a Dirtied event.
2847 let events = Rc::new(RefCell::new(Vec::new()));
2848 let buffer3 = project
2849 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
2850 .await
2851 .unwrap();
2852 buffer3.update(cx, |_, cx| {
2853 cx.subscribe(&buffer3, {
2854 let events = events.clone();
2855 move |_, _, event, _| events.borrow_mut().push(event.clone())
2856 })
2857 .detach();
2858 });
2859
2860 buffer3.update(cx, |buffer, cx| {
2861 buffer.edit([(0..0, "x")], None, cx);
2862 });
2863 events.borrow_mut().clear();
2864 fs.remove_file("/dir/file3".as_ref(), Default::default())
2865 .await
2866 .unwrap();
2867 cx.foreground().run_until_parked();
2868 assert_eq!(*events.borrow(), &[language::Event::FileHandleChanged]);
2869 cx.read(|cx| assert!(buffer3.read(cx).is_dirty()));
2870}
2871
2872#[gpui::test]
2873async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
2874 init_test(cx);
2875
2876 let initial_contents = "aaa\nbbbbb\nc\n";
2877 let fs = FakeFs::new(cx.background());
2878 fs.insert_tree(
2879 "/dir",
2880 json!({
2881 "the-file": initial_contents,
2882 }),
2883 )
2884 .await;
2885 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2886 let buffer = project
2887 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
2888 .await
2889 .unwrap();
2890
2891 let anchors = (0..3)
2892 .map(|row| buffer.read_with(cx, |b, _| b.anchor_before(Point::new(row, 1))))
2893 .collect::<Vec<_>>();
2894
2895 // Change the file on disk, adding two new lines of text, and removing
2896 // one line.
2897 buffer.read_with(cx, |buffer, _| {
2898 assert!(!buffer.is_dirty());
2899 assert!(!buffer.has_conflict());
2900 });
2901 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
2902 fs.save(
2903 "/dir/the-file".as_ref(),
2904 &new_contents.into(),
2905 LineEnding::Unix,
2906 )
2907 .await
2908 .unwrap();
2909
2910 // Because the buffer was not modified, it is reloaded from disk. Its
2911 // contents are edited according to the diff between the old and new
2912 // file contents.
2913 cx.foreground().run_until_parked();
2914 buffer.update(cx, |buffer, _| {
2915 assert_eq!(buffer.text(), new_contents);
2916 assert!(!buffer.is_dirty());
2917 assert!(!buffer.has_conflict());
2918
2919 let anchor_positions = anchors
2920 .iter()
2921 .map(|anchor| anchor.to_point(&*buffer))
2922 .collect::<Vec<_>>();
2923 assert_eq!(
2924 anchor_positions,
2925 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
2926 );
2927 });
2928
2929 // Modify the buffer
2930 buffer.update(cx, |buffer, cx| {
2931 buffer.edit([(0..0, " ")], None, cx);
2932 assert!(buffer.is_dirty());
2933 assert!(!buffer.has_conflict());
2934 });
2935
2936 // Change the file on disk again, adding blank lines to the beginning.
2937 fs.save(
2938 "/dir/the-file".as_ref(),
2939 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
2940 LineEnding::Unix,
2941 )
2942 .await
2943 .unwrap();
2944
2945 // Because the buffer is modified, it doesn't reload from disk, but is
2946 // marked as having a conflict.
2947 cx.foreground().run_until_parked();
2948 buffer.read_with(cx, |buffer, _| {
2949 assert!(buffer.has_conflict());
2950 });
2951}
2952
2953#[gpui::test]
2954async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
2955 init_test(cx);
2956
2957 let fs = FakeFs::new(cx.background());
2958 fs.insert_tree(
2959 "/dir",
2960 json!({
2961 "file1": "a\nb\nc\n",
2962 "file2": "one\r\ntwo\r\nthree\r\n",
2963 }),
2964 )
2965 .await;
2966
2967 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2968 let buffer1 = project
2969 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2970 .await
2971 .unwrap();
2972 let buffer2 = project
2973 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
2974 .await
2975 .unwrap();
2976
2977 buffer1.read_with(cx, |buffer, _| {
2978 assert_eq!(buffer.text(), "a\nb\nc\n");
2979 assert_eq!(buffer.line_ending(), LineEnding::Unix);
2980 });
2981 buffer2.read_with(cx, |buffer, _| {
2982 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
2983 assert_eq!(buffer.line_ending(), LineEnding::Windows);
2984 });
2985
2986 // Change a file's line endings on disk from unix to windows. The buffer's
2987 // state updates correctly.
2988 fs.save(
2989 "/dir/file1".as_ref(),
2990 &"aaa\nb\nc\n".into(),
2991 LineEnding::Windows,
2992 )
2993 .await
2994 .unwrap();
2995 cx.foreground().run_until_parked();
2996 buffer1.read_with(cx, |buffer, _| {
2997 assert_eq!(buffer.text(), "aaa\nb\nc\n");
2998 assert_eq!(buffer.line_ending(), LineEnding::Windows);
2999 });
3000
3001 // Save a file with windows line endings. The file is written correctly.
3002 buffer2.update(cx, |buffer, cx| {
3003 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3004 });
3005 project
3006 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3007 .await
3008 .unwrap();
3009 assert_eq!(
3010 fs.load("/dir/file2".as_ref()).await.unwrap(),
3011 "one\r\ntwo\r\nthree\r\nfour\r\n",
3012 );
3013}
3014
3015#[gpui::test]
3016async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3017 init_test(cx);
3018
3019 let fs = FakeFs::new(cx.background());
3020 fs.insert_tree(
3021 "/the-dir",
3022 json!({
3023 "a.rs": "
3024 fn foo(mut v: Vec<usize>) {
3025 for x in &v {
3026 v.push(1);
3027 }
3028 }
3029 "
3030 .unindent(),
3031 }),
3032 )
3033 .await;
3034
3035 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3036 let buffer = project
3037 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3038 .await
3039 .unwrap();
3040
3041 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3042 let message = lsp::PublishDiagnosticsParams {
3043 uri: buffer_uri.clone(),
3044 diagnostics: vec![
3045 lsp::Diagnostic {
3046 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3047 severity: Some(DiagnosticSeverity::WARNING),
3048 message: "error 1".to_string(),
3049 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3050 location: lsp::Location {
3051 uri: buffer_uri.clone(),
3052 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3053 },
3054 message: "error 1 hint 1".to_string(),
3055 }]),
3056 ..Default::default()
3057 },
3058 lsp::Diagnostic {
3059 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3060 severity: Some(DiagnosticSeverity::HINT),
3061 message: "error 1 hint 1".to_string(),
3062 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3063 location: lsp::Location {
3064 uri: buffer_uri.clone(),
3065 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3066 },
3067 message: "original diagnostic".to_string(),
3068 }]),
3069 ..Default::default()
3070 },
3071 lsp::Diagnostic {
3072 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3073 severity: Some(DiagnosticSeverity::ERROR),
3074 message: "error 2".to_string(),
3075 related_information: Some(vec![
3076 lsp::DiagnosticRelatedInformation {
3077 location: lsp::Location {
3078 uri: buffer_uri.clone(),
3079 range: lsp::Range::new(
3080 lsp::Position::new(1, 13),
3081 lsp::Position::new(1, 15),
3082 ),
3083 },
3084 message: "error 2 hint 1".to_string(),
3085 },
3086 lsp::DiagnosticRelatedInformation {
3087 location: lsp::Location {
3088 uri: buffer_uri.clone(),
3089 range: lsp::Range::new(
3090 lsp::Position::new(1, 13),
3091 lsp::Position::new(1, 15),
3092 ),
3093 },
3094 message: "error 2 hint 2".to_string(),
3095 },
3096 ]),
3097 ..Default::default()
3098 },
3099 lsp::Diagnostic {
3100 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3101 severity: Some(DiagnosticSeverity::HINT),
3102 message: "error 2 hint 1".to_string(),
3103 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3104 location: lsp::Location {
3105 uri: buffer_uri.clone(),
3106 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3107 },
3108 message: "original diagnostic".to_string(),
3109 }]),
3110 ..Default::default()
3111 },
3112 lsp::Diagnostic {
3113 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3114 severity: Some(DiagnosticSeverity::HINT),
3115 message: "error 2 hint 2".to_string(),
3116 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3117 location: lsp::Location {
3118 uri: buffer_uri,
3119 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3120 },
3121 message: "original diagnostic".to_string(),
3122 }]),
3123 ..Default::default()
3124 },
3125 ],
3126 version: None,
3127 };
3128
3129 project
3130 .update(cx, |p, cx| {
3131 p.update_diagnostics(LanguageServerId(0), message, &[], cx)
3132 })
3133 .unwrap();
3134 let buffer = buffer.read_with(cx, |buffer, _| buffer.snapshot());
3135
3136 assert_eq!(
3137 buffer
3138 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3139 .collect::<Vec<_>>(),
3140 &[
3141 DiagnosticEntry {
3142 range: Point::new(1, 8)..Point::new(1, 9),
3143 diagnostic: Diagnostic {
3144 severity: DiagnosticSeverity::WARNING,
3145 message: "error 1".to_string(),
3146 group_id: 1,
3147 is_primary: true,
3148 ..Default::default()
3149 }
3150 },
3151 DiagnosticEntry {
3152 range: Point::new(1, 8)..Point::new(1, 9),
3153 diagnostic: Diagnostic {
3154 severity: DiagnosticSeverity::HINT,
3155 message: "error 1 hint 1".to_string(),
3156 group_id: 1,
3157 is_primary: false,
3158 ..Default::default()
3159 }
3160 },
3161 DiagnosticEntry {
3162 range: Point::new(1, 13)..Point::new(1, 15),
3163 diagnostic: Diagnostic {
3164 severity: DiagnosticSeverity::HINT,
3165 message: "error 2 hint 1".to_string(),
3166 group_id: 0,
3167 is_primary: false,
3168 ..Default::default()
3169 }
3170 },
3171 DiagnosticEntry {
3172 range: Point::new(1, 13)..Point::new(1, 15),
3173 diagnostic: Diagnostic {
3174 severity: DiagnosticSeverity::HINT,
3175 message: "error 2 hint 2".to_string(),
3176 group_id: 0,
3177 is_primary: false,
3178 ..Default::default()
3179 }
3180 },
3181 DiagnosticEntry {
3182 range: Point::new(2, 8)..Point::new(2, 17),
3183 diagnostic: Diagnostic {
3184 severity: DiagnosticSeverity::ERROR,
3185 message: "error 2".to_string(),
3186 group_id: 0,
3187 is_primary: true,
3188 ..Default::default()
3189 }
3190 }
3191 ]
3192 );
3193
3194 assert_eq!(
3195 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3196 &[
3197 DiagnosticEntry {
3198 range: Point::new(1, 13)..Point::new(1, 15),
3199 diagnostic: Diagnostic {
3200 severity: DiagnosticSeverity::HINT,
3201 message: "error 2 hint 1".to_string(),
3202 group_id: 0,
3203 is_primary: false,
3204 ..Default::default()
3205 }
3206 },
3207 DiagnosticEntry {
3208 range: Point::new(1, 13)..Point::new(1, 15),
3209 diagnostic: Diagnostic {
3210 severity: DiagnosticSeverity::HINT,
3211 message: "error 2 hint 2".to_string(),
3212 group_id: 0,
3213 is_primary: false,
3214 ..Default::default()
3215 }
3216 },
3217 DiagnosticEntry {
3218 range: Point::new(2, 8)..Point::new(2, 17),
3219 diagnostic: Diagnostic {
3220 severity: DiagnosticSeverity::ERROR,
3221 message: "error 2".to_string(),
3222 group_id: 0,
3223 is_primary: true,
3224 ..Default::default()
3225 }
3226 }
3227 ]
3228 );
3229
3230 assert_eq!(
3231 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3232 &[
3233 DiagnosticEntry {
3234 range: Point::new(1, 8)..Point::new(1, 9),
3235 diagnostic: Diagnostic {
3236 severity: DiagnosticSeverity::WARNING,
3237 message: "error 1".to_string(),
3238 group_id: 1,
3239 is_primary: true,
3240 ..Default::default()
3241 }
3242 },
3243 DiagnosticEntry {
3244 range: Point::new(1, 8)..Point::new(1, 9),
3245 diagnostic: Diagnostic {
3246 severity: DiagnosticSeverity::HINT,
3247 message: "error 1 hint 1".to_string(),
3248 group_id: 1,
3249 is_primary: false,
3250 ..Default::default()
3251 }
3252 },
3253 ]
3254 );
3255}
3256
3257#[gpui::test]
3258async fn test_rename(cx: &mut gpui::TestAppContext) {
3259 init_test(cx);
3260
3261 let mut language = Language::new(
3262 LanguageConfig {
3263 name: "Rust".into(),
3264 path_suffixes: vec!["rs".to_string()],
3265 ..Default::default()
3266 },
3267 Some(tree_sitter_rust::language()),
3268 );
3269 let mut fake_servers = language
3270 .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
3271 capabilities: lsp::ServerCapabilities {
3272 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
3273 prepare_provider: Some(true),
3274 work_done_progress_options: Default::default(),
3275 })),
3276 ..Default::default()
3277 },
3278 ..Default::default()
3279 }))
3280 .await;
3281
3282 let fs = FakeFs::new(cx.background());
3283 fs.insert_tree(
3284 "/dir",
3285 json!({
3286 "one.rs": "const ONE: usize = 1;",
3287 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3288 }),
3289 )
3290 .await;
3291
3292 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3293 project.update(cx, |project, _| project.languages.add(Arc::new(language)));
3294 let buffer = project
3295 .update(cx, |project, cx| {
3296 project.open_local_buffer("/dir/one.rs", cx)
3297 })
3298 .await
3299 .unwrap();
3300
3301 let fake_server = fake_servers.next().await.unwrap();
3302
3303 let response = project.update(cx, |project, cx| {
3304 project.prepare_rename(buffer.clone(), 7, cx)
3305 });
3306 fake_server
3307 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
3308 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3309 assert_eq!(params.position, lsp::Position::new(0, 7));
3310 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
3311 lsp::Position::new(0, 6),
3312 lsp::Position::new(0, 9),
3313 ))))
3314 })
3315 .next()
3316 .await
3317 .unwrap();
3318 let range = response.await.unwrap().unwrap();
3319 let range = buffer.read_with(cx, |buffer, _| range.to_offset(buffer));
3320 assert_eq!(range, 6..9);
3321
3322 let response = project.update(cx, |project, cx| {
3323 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
3324 });
3325 fake_server
3326 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
3327 assert_eq!(
3328 params.text_document_position.text_document.uri.as_str(),
3329 "file:///dir/one.rs"
3330 );
3331 assert_eq!(
3332 params.text_document_position.position,
3333 lsp::Position::new(0, 7)
3334 );
3335 assert_eq!(params.new_name, "THREE");
3336 Ok(Some(lsp::WorkspaceEdit {
3337 changes: Some(
3338 [
3339 (
3340 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
3341 vec![lsp::TextEdit::new(
3342 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
3343 "THREE".to_string(),
3344 )],
3345 ),
3346 (
3347 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
3348 vec![
3349 lsp::TextEdit::new(
3350 lsp::Range::new(
3351 lsp::Position::new(0, 24),
3352 lsp::Position::new(0, 27),
3353 ),
3354 "THREE".to_string(),
3355 ),
3356 lsp::TextEdit::new(
3357 lsp::Range::new(
3358 lsp::Position::new(0, 35),
3359 lsp::Position::new(0, 38),
3360 ),
3361 "THREE".to_string(),
3362 ),
3363 ],
3364 ),
3365 ]
3366 .into_iter()
3367 .collect(),
3368 ),
3369 ..Default::default()
3370 }))
3371 })
3372 .next()
3373 .await
3374 .unwrap();
3375 let mut transaction = response.await.unwrap().0;
3376 assert_eq!(transaction.len(), 2);
3377 assert_eq!(
3378 transaction
3379 .remove_entry(&buffer)
3380 .unwrap()
3381 .0
3382 .read_with(cx, |buffer, _| buffer.text()),
3383 "const THREE: usize = 1;"
3384 );
3385 assert_eq!(
3386 transaction
3387 .into_keys()
3388 .next()
3389 .unwrap()
3390 .read_with(cx, |buffer, _| buffer.text()),
3391 "const TWO: usize = one::THREE + one::THREE;"
3392 );
3393}
3394
3395#[gpui::test]
3396async fn test_search(cx: &mut gpui::TestAppContext) {
3397 init_test(cx);
3398
3399 let fs = FakeFs::new(cx.background());
3400 fs.insert_tree(
3401 "/dir",
3402 json!({
3403 "one.rs": "const ONE: usize = 1;",
3404 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3405 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3406 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3407 }),
3408 )
3409 .await;
3410 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3411 assert_eq!(
3412 search(
3413 &project,
3414 SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()),
3415 cx
3416 )
3417 .await
3418 .unwrap(),
3419 HashMap::from_iter([
3420 ("two.rs".to_string(), vec![6..9]),
3421 ("three.rs".to_string(), vec![37..40])
3422 ])
3423 );
3424
3425 let buffer_4 = project
3426 .update(cx, |project, cx| {
3427 project.open_local_buffer("/dir/four.rs", cx)
3428 })
3429 .await
3430 .unwrap();
3431 buffer_4.update(cx, |buffer, cx| {
3432 let text = "two::TWO";
3433 buffer.edit([(20..28, text), (31..43, text)], None, cx);
3434 });
3435
3436 assert_eq!(
3437 search(
3438 &project,
3439 SearchQuery::text("TWO", false, true, Vec::new(), Vec::new()),
3440 cx
3441 )
3442 .await
3443 .unwrap(),
3444 HashMap::from_iter([
3445 ("two.rs".to_string(), vec![6..9]),
3446 ("three.rs".to_string(), vec![37..40]),
3447 ("four.rs".to_string(), vec![25..28, 36..39])
3448 ])
3449 );
3450}
3451
3452#[gpui::test]
3453async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
3454 init_test(cx);
3455
3456 let search_query = "file";
3457
3458 let fs = FakeFs::new(cx.background());
3459 fs.insert_tree(
3460 "/dir",
3461 json!({
3462 "one.rs": r#"// Rust file one"#,
3463 "one.ts": r#"// TypeScript file one"#,
3464 "two.rs": r#"// Rust file two"#,
3465 "two.ts": r#"// TypeScript file two"#,
3466 }),
3467 )
3468 .await;
3469 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3470
3471 assert!(
3472 search(
3473 &project,
3474 SearchQuery::text(
3475 search_query,
3476 false,
3477 true,
3478 vec![Glob::new("*.odd").unwrap().compile_matcher()],
3479 Vec::new()
3480 ),
3481 cx
3482 )
3483 .await
3484 .unwrap()
3485 .is_empty(),
3486 "If no inclusions match, no files should be returned"
3487 );
3488
3489 assert_eq!(
3490 search(
3491 &project,
3492 SearchQuery::text(
3493 search_query,
3494 false,
3495 true,
3496 vec![Glob::new("*.rs").unwrap().compile_matcher()],
3497 Vec::new()
3498 ),
3499 cx
3500 )
3501 .await
3502 .unwrap(),
3503 HashMap::from_iter([
3504 ("one.rs".to_string(), vec![8..12]),
3505 ("two.rs".to_string(), vec![8..12]),
3506 ]),
3507 "Rust only search should give only Rust files"
3508 );
3509
3510 assert_eq!(
3511 search(
3512 &project,
3513 SearchQuery::text(
3514 search_query,
3515 false,
3516 true,
3517 vec![
3518 Glob::new("*.ts").unwrap().compile_matcher(),
3519 Glob::new("*.odd").unwrap().compile_matcher(),
3520 ],
3521 Vec::new()
3522 ),
3523 cx
3524 )
3525 .await
3526 .unwrap(),
3527 HashMap::from_iter([
3528 ("one.ts".to_string(), vec![14..18]),
3529 ("two.ts".to_string(), vec![14..18]),
3530 ]),
3531 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
3532 );
3533
3534 assert_eq!(
3535 search(
3536 &project,
3537 SearchQuery::text(
3538 search_query,
3539 false,
3540 true,
3541 vec![
3542 Glob::new("*.rs").unwrap().compile_matcher(),
3543 Glob::new("*.ts").unwrap().compile_matcher(),
3544 Glob::new("*.odd").unwrap().compile_matcher(),
3545 ],
3546 Vec::new()
3547 ),
3548 cx
3549 )
3550 .await
3551 .unwrap(),
3552 HashMap::from_iter([
3553 ("one.rs".to_string(), vec![8..12]),
3554 ("one.ts".to_string(), vec![14..18]),
3555 ("two.rs".to_string(), vec![8..12]),
3556 ("two.ts".to_string(), vec![14..18]),
3557 ]),
3558 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
3559 );
3560}
3561
3562#[gpui::test]
3563async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
3564 init_test(cx);
3565
3566 let search_query = "file";
3567
3568 let fs = FakeFs::new(cx.background());
3569 fs.insert_tree(
3570 "/dir",
3571 json!({
3572 "one.rs": r#"// Rust file one"#,
3573 "one.ts": r#"// TypeScript file one"#,
3574 "two.rs": r#"// Rust file two"#,
3575 "two.ts": r#"// TypeScript file two"#,
3576 }),
3577 )
3578 .await;
3579 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3580
3581 assert_eq!(
3582 search(
3583 &project,
3584 SearchQuery::text(
3585 search_query,
3586 false,
3587 true,
3588 Vec::new(),
3589 vec![Glob::new("*.odd").unwrap().compile_matcher()],
3590 ),
3591 cx
3592 )
3593 .await
3594 .unwrap(),
3595 HashMap::from_iter([
3596 ("one.rs".to_string(), vec![8..12]),
3597 ("one.ts".to_string(), vec![14..18]),
3598 ("two.rs".to_string(), vec![8..12]),
3599 ("two.ts".to_string(), vec![14..18]),
3600 ]),
3601 "If no exclusions match, all files should be returned"
3602 );
3603
3604 assert_eq!(
3605 search(
3606 &project,
3607 SearchQuery::text(
3608 search_query,
3609 false,
3610 true,
3611 Vec::new(),
3612 vec![Glob::new("*.rs").unwrap().compile_matcher()],
3613 ),
3614 cx
3615 )
3616 .await
3617 .unwrap(),
3618 HashMap::from_iter([
3619 ("one.ts".to_string(), vec![14..18]),
3620 ("two.ts".to_string(), vec![14..18]),
3621 ]),
3622 "Rust exclusion search should give only TypeScript files"
3623 );
3624
3625 assert_eq!(
3626 search(
3627 &project,
3628 SearchQuery::text(
3629 search_query,
3630 false,
3631 true,
3632 Vec::new(),
3633 vec![
3634 Glob::new("*.ts").unwrap().compile_matcher(),
3635 Glob::new("*.odd").unwrap().compile_matcher(),
3636 ],
3637 ),
3638 cx
3639 )
3640 .await
3641 .unwrap(),
3642 HashMap::from_iter([
3643 ("one.rs".to_string(), vec![8..12]),
3644 ("two.rs".to_string(), vec![8..12]),
3645 ]),
3646 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
3647 );
3648
3649 assert!(
3650 search(
3651 &project,
3652 SearchQuery::text(
3653 search_query,
3654 false,
3655 true,
3656 Vec::new(),
3657 vec![
3658 Glob::new("*.rs").unwrap().compile_matcher(),
3659 Glob::new("*.ts").unwrap().compile_matcher(),
3660 Glob::new("*.odd").unwrap().compile_matcher(),
3661 ],
3662 ),
3663 cx
3664 )
3665 .await
3666 .unwrap().is_empty(),
3667 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
3668 );
3669}
3670
3671#[gpui::test]
3672async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
3673 init_test(cx);
3674
3675 let search_query = "file";
3676
3677 let fs = FakeFs::new(cx.background());
3678 fs.insert_tree(
3679 "/dir",
3680 json!({
3681 "one.rs": r#"// Rust file one"#,
3682 "one.ts": r#"// TypeScript file one"#,
3683 "two.rs": r#"// Rust file two"#,
3684 "two.ts": r#"// TypeScript file two"#,
3685 }),
3686 )
3687 .await;
3688 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3689
3690 assert!(
3691 search(
3692 &project,
3693 SearchQuery::text(
3694 search_query,
3695 false,
3696 true,
3697 vec![Glob::new("*.odd").unwrap().compile_matcher()],
3698 vec![Glob::new("*.odd").unwrap().compile_matcher()],
3699 ),
3700 cx
3701 )
3702 .await
3703 .unwrap()
3704 .is_empty(),
3705 "If both no exclusions and inclusions match, exclusions should win and return nothing"
3706 );
3707
3708 assert!(
3709 search(
3710 &project,
3711 SearchQuery::text(
3712 search_query,
3713 false,
3714 true,
3715 vec![Glob::new("*.ts").unwrap().compile_matcher()],
3716 vec![Glob::new("*.ts").unwrap().compile_matcher()],
3717 ),
3718 cx
3719 )
3720 .await
3721 .unwrap()
3722 .is_empty(),
3723 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
3724 );
3725
3726 assert!(
3727 search(
3728 &project,
3729 SearchQuery::text(
3730 search_query,
3731 false,
3732 true,
3733 vec![
3734 Glob::new("*.ts").unwrap().compile_matcher(),
3735 Glob::new("*.odd").unwrap().compile_matcher()
3736 ],
3737 vec![
3738 Glob::new("*.ts").unwrap().compile_matcher(),
3739 Glob::new("*.odd").unwrap().compile_matcher()
3740 ],
3741 ),
3742 cx
3743 )
3744 .await
3745 .unwrap()
3746 .is_empty(),
3747 "Non-matching inclusions and exclusions should not change that."
3748 );
3749
3750 assert_eq!(
3751 search(
3752 &project,
3753 SearchQuery::text(
3754 search_query,
3755 false,
3756 true,
3757 vec![
3758 Glob::new("*.ts").unwrap().compile_matcher(),
3759 Glob::new("*.odd").unwrap().compile_matcher()
3760 ],
3761 vec![
3762 Glob::new("*.rs").unwrap().compile_matcher(),
3763 Glob::new("*.odd").unwrap().compile_matcher()
3764 ],
3765 ),
3766 cx
3767 )
3768 .await
3769 .unwrap(),
3770 HashMap::from_iter([
3771 ("one.ts".to_string(), vec![14..18]),
3772 ("two.ts".to_string(), vec![14..18]),
3773 ]),
3774 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
3775 );
3776}
3777
3778async fn search(
3779 project: &ModelHandle<Project>,
3780 query: SearchQuery,
3781 cx: &mut gpui::TestAppContext,
3782) -> Result<HashMap<String, Vec<Range<usize>>>> {
3783 let results = project
3784 .update(cx, |project, cx| project.search(query, cx))
3785 .await?;
3786
3787 Ok(results
3788 .into_iter()
3789 .map(|(buffer, ranges)| {
3790 buffer.read_with(cx, |buffer, _| {
3791 let path = buffer.file().unwrap().path().to_string_lossy().to_string();
3792 let ranges = ranges
3793 .into_iter()
3794 .map(|range| range.to_offset(buffer))
3795 .collect::<Vec<_>>();
3796 (path, ranges)
3797 })
3798 })
3799 .collect())
3800}
3801
3802fn init_test(cx: &mut gpui::TestAppContext) {
3803 cx.foreground().forbid_parking();
3804
3805 cx.update(|cx| {
3806 cx.set_global(SettingsStore::test(cx));
3807 language::init(cx);
3808 Project::init_settings(cx);
3809 });
3810}